summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/config/grape_entity_patch_spec.rb21
-rw-r--r--spec/controllers/admin/instance_review_controller_spec.rb6
-rw-r--r--spec/controllers/admin/serverless/domains_controller_spec.rb370
-rw-r--r--spec/controllers/admin/topics/avatars_controller_spec.rb20
-rw-r--r--spec/controllers/admin/topics_controller_spec.rb131
-rw-r--r--spec/controllers/application_controller_spec.rb12
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb2
-rw-r--r--spec/controllers/concerns/group_tree_spec.rb112
-rw-r--r--spec/controllers/dashboard/milestones_controller_spec.rb7
-rw-r--r--spec/controllers/every_controller_spec.rb19
-rw-r--r--spec/controllers/graphql_controller_spec.rb8
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb176
-rw-r--r--spec/controllers/groups/registry/repositories_controller_spec.rb25
-rw-r--r--spec/controllers/help_controller_spec.rb8
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb102
-rw-r--r--spec/controllers/jira_connect/app_descriptor_controller_spec.rb15
-rw-r--r--spec/controllers/jira_connect/events_controller_spec.rb165
-rw-r--r--spec/controllers/metrics_controller_spec.rb6
-rw-r--r--spec/controllers/profiles/two_factor_auths_controller_spec.rb6
-rw-r--r--spec/controllers/profiles_controller_spec.rb5
-rw-r--r--spec/controllers/projects/alerting/notifications_controller_spec.rb112
-rw-r--r--spec/controllers/projects/branches_controller_spec.rb22
-rw-r--r--spec/controllers/projects/compare_controller_spec.rb2
-rw-r--r--spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb61
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb13
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb2
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb2
-rw-r--r--spec/controllers/projects/registry/repositories_controller_spec.rb13
-rw-r--r--spec/controllers/projects/tags_controller_spec.rb19
-rw-r--r--spec/controllers/projects/usage_quotas_controller_spec.rb20
-rw-r--r--spec/controllers/projects_controller_spec.rb85
-rw-r--r--spec/controllers/registrations_controller_spec.rb16
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb25
-rw-r--r--spec/controllers/search_controller_spec.rb12
-rw-r--r--spec/controllers/uploads_controller_spec.rb40
-rw-r--r--spec/db/schema_spec.rb6
-rw-r--r--spec/experiments/new_project_sast_enabled_experiment_spec.rb15
-rw-r--r--spec/factories/bulk_import.rb1
-rw-r--r--spec/factories/ci/pending_builds.rb1
-rw-r--r--spec/factories/ci/pipelines.rb4
-rw-r--r--spec/factories/ci/runner_projects.rb9
-rw-r--r--spec/factories/ci/runners.rb14
-rw-r--r--spec/factories/dependency_proxy.rb12
-rw-r--r--spec/factories/dependency_proxy/group_settings.rb9
-rw-r--r--spec/factories/dependency_proxy/image_ttl_group_policies.rb4
-rw-r--r--spec/factories/design_management/versions.rb2
-rw-r--r--spec/factories/groups.rb16
-rw-r--r--spec/factories/integration_data.rb8
-rw-r--r--spec/factories/integrations.rb20
-rw-r--r--spec/factories/namespaces.rb2
-rw-r--r--spec/factories/namespaces/project_namespaces.rb3
-rw-r--r--spec/factories/namespaces/user_namespaces.rb8
-rw-r--r--spec/factories/packages/build_infos.rb (renamed from spec/factories/packages/build_info.rb)0
-rw-r--r--spec/factories/packages/composer/cache_files.rb20
-rw-r--r--spec/factories/packages/composer/metadata.rb10
-rw-r--r--spec/factories/packages/conan/file_metadata.rb20
-rw-r--r--spec/factories/packages/conan/metadata.rb9
-rw-r--r--spec/factories/packages/dependencies.rb12
-rw-r--r--spec/factories/packages/dependency_links.rb20
-rw-r--r--spec/factories/packages/maven/maven_metadata.rb11
-rw-r--r--spec/factories/packages/nuget/dependency_link_metadata.rb8
-rw-r--r--spec/factories/packages/nuget/metadata.rb11
-rw-r--r--spec/factories/packages/package_files.rb (renamed from spec/factories/packages/package_file.rb)0
-rw-r--r--spec/factories/packages/package_tags.rb8
-rw-r--r--spec/factories/packages/packages.rb (renamed from spec/factories/packages.rb)113
-rw-r--r--spec/factories/packages/pypi/metadata.rb8
-rw-r--r--spec/factories/pages_domains.rb12
-rw-r--r--spec/factories/project_error_tracking_settings.rb1
-rw-r--r--spec/factories/usage_data.rb14
-rw-r--r--spec/factories/users.rb2
-rw-r--r--spec/factories_spec.rb1
-rw-r--r--spec/features/admin/admin_appearance_spec.rb4
-rw-r--r--spec/features/admin/admin_hook_logs_spec.rb4
-rw-r--r--spec/features/admin/admin_jobs_spec.rb (renamed from spec/features/admin/admin_builds_spec.rb)20
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb2
-rw-r--r--spec/features/admin/admin_projects_spec.rb2
-rw-r--r--spec/features/admin/admin_serverless_domains_spec.rb89
-rw-r--r--spec/features/admin/admin_settings_spec.rb66
-rw-r--r--spec/features/boards/new_issue_spec.rb22
-rw-r--r--spec/features/boards/reload_boards_on_browser_back_spec.rb4
-rw-r--r--spec/features/boards/sidebar_labels_spec.rb50
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb2
-rw-r--r--spec/features/cycle_analytics_spec.rb59
-rw-r--r--spec/features/dashboard/activity_spec.rb6
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb2
-rw-r--r--spec/features/groups/board_spec.rb2
-rw-r--r--spec/features/groups/container_registry_spec.rb12
-rw-r--r--spec/features/groups/dependency_proxy_for_containers_spec.rb108
-rw-r--r--spec/features/groups/dependency_proxy_spec.rb50
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb34
-rw-r--r--spec/features/groups/import_export/migration_history_spec.rb30
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb11
-rw-r--r--spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb15
-rw-r--r--spec/features/groups/milestone_spec.rb8
-rw-r--r--spec/features/groups/packages_spec.rb4
-rw-r--r--spec/features/issues/related_issues_spec.rb98
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb9
-rw-r--r--spec/features/markdown/markdown_spec.rb8
-rw-r--r--spec/features/merge_request/user_merges_immediately_spec.rb2
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb19
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb32
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb12
-rw-r--r--spec/features/merge_request/user_sees_suggest_pipeline_spec.rb11
-rw-r--r--spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb2
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb10
-rw-r--r--spec/features/profiles/password_spec.rb10
-rw-r--r--spec/features/profiles/two_factor_auths_spec.rb4
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb52
-rw-r--r--spec/features/projects/badges/coverage_spec.rb129
-rw-r--r--spec/features/projects/badges/pipeline_badge_spec.rb2
-rw-r--r--spec/features/projects/ci/lint_spec.rb2
-rw-r--r--spec/features/projects/container_registry_spec.rb12
-rw-r--r--spec/features/projects/environments/environments_spec.rb5
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb2
-rw-r--r--spec/features/projects/files/user_uploads_files_spec.rb14
-rw-r--r--spec/features/projects/infrastructure_registry_spec.rb6
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb282
-rw-r--r--spec/features/projects/jobs_spec.rb8
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb11
-rw-r--r--spec/features/projects/members/invite_group_spec.rb7
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb18
-rw-r--r--spec/features/projects/navbar_spec.rb1
-rw-r--r--spec/features/projects/new_project_spec.rb8
-rw-r--r--spec/features/projects/packages_spec.rb4
-rw-r--r--spec/features/projects/settings/monitor_settings_spec.rb35
-rw-r--r--spec/features/projects/settings/webhooks_settings_spec.rb4
-rw-r--r--spec/features/projects/show/user_uploads_files_spec.rb22
-rw-r--r--spec/features/projects/user_creates_project_spec.rb23
-rw-r--r--spec/features/security/project/internal_access_spec.rb1
-rw-r--r--spec/features/security/project/private_access_spec.rb1
-rw-r--r--spec/features/security/project/public_access_spec.rb1
-rw-r--r--spec/features/snippets/notes_on_personal_snippets_spec.rb4
-rw-r--r--spec/features/users/login_spec.rb18
-rw-r--r--spec/features/users/show_spec.rb19
-rw-r--r--spec/finders/ci/commit_statuses_finder_spec.rb6
-rw-r--r--spec/finders/ci/pipelines_for_merge_request_finder_spec.rb176
-rw-r--r--spec/finders/clusters/agents_finder_spec.rb41
-rw-r--r--spec/finders/error_tracking/errors_finder_spec.rb15
-rw-r--r--spec/finders/issues_finder_spec.rb26
-rw-r--r--spec/finders/members_finder_spec.rb62
-rw-r--r--spec/finders/packages/group_packages_finder_spec.rb13
-rw-r--r--spec/finders/projects/members/effective_access_level_finder_spec.rb20
-rw-r--r--spec/finders/projects/topics_finder_spec.rb45
-rw-r--r--spec/finders/tags_finder_spec.rb79
-rw-r--r--spec/fixtures/api/schemas/entities/member.json6
-rw-r--r--spec/fixtures/api/schemas/external_validation.json3
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_details.json10
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/environment.json7
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/integration.json (renamed from spec/fixtures/api/schemas/public_api/v4/service.json)0
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/integrations.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/services.json4
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json96
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/tree/project.json11
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/tree/project/boards.ndjson2
-rw-r--r--spec/fixtures/lib/gitlab/performance_bar/peek_data.json87
-rw-r--r--spec/fixtures/markdown.md.erb9
-rw-r--r--spec/fixtures/ssl/letsencrypt_expired_x3.pem98
-rw-r--r--spec/frontend/.eslintrc.yml8
-rw-r--r--spec/frontend/__helpers__/fixtures.js5
-rw-r--r--spec/frontend/__helpers__/flush_promises.js3
-rw-r--r--spec/frontend/access_tokens/components/projects_token_selector_spec.js5
-rw-r--r--spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js4
-rw-r--r--spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js4
-rw-r--r--spec/frontend/add_context_commits_modal/store/mutations_spec.js4
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_form_spec.js122
-rw-r--r--spec/frontend/admin/users/components/actions/actions_spec.js12
-rw-r--r--spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap4
-rw-r--r--spec/frontend/admin/users/components/modals/delete_user_modal_spec.js24
-rw-r--r--spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap2
-rw-r--r--spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js139
-rw-r--r--spec/frontend/analytics/shared/utils_spec.js156
-rw-r--r--spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js2
-rw-r--r--spec/frontend/blob/file_template_mediator_spec.js53
-rw-r--r--spec/frontend/boards/components/board_add_new_column_trigger_spec.js59
-rw-r--r--spec/frontend/boards/stores/actions_spec.js69
-rw-r--r--spec/frontend/clusters/agents/components/show_spec.js195
-rw-r--r--spec/frontend/clusters/agents/components/token_table_spec.js135
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap18
-rw-r--r--spec/frontend/clusters_list/components/agent_empty_state_spec.js77
-rw-r--r--spec/frontend/clusters_list/components/agent_table_spec.js117
-rw-r--r--spec/frontend/clusters_list/components/agents_spec.js246
-rw-r--r--spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js129
-rw-r--r--spec/frontend/clusters_list/components/install_agent_modal_spec.js190
-rw-r--r--spec/frontend/clusters_list/components/mock_data.js12
-rw-r--r--spec/frontend/clusters_list/mocks/apollo.js45
-rw-r--r--spec/frontend/clusters_list/stubs.js14
-rw-r--r--spec/frontend/comment_type_toggle_spec.js169
-rw-r--r--spec/frontend/commit/commit_pipeline_status_component_spec.js4
-rw-r--r--spec/frontend/commit/pipelines/pipelines_table_spec.js4
-rw-r--r--spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap9
-rw-r--r--spec/frontend/content_editor/components/top_toolbar_spec.js1
-rw-r--r--spec/frontend/content_editor/components/wrappers/details_spec.js40
-rw-r--r--spec/frontend/content_editor/components/wrappers/frontmatter_spec.js43
-rw-r--r--spec/frontend/content_editor/extensions/color_chip_spec.js33
-rw-r--r--spec/frontend/content_editor/extensions/details_content_spec.js76
-rw-r--r--spec/frontend/content_editor/extensions/details_spec.js92
-rw-r--r--spec/frontend/content_editor/extensions/math_inline_spec.js42
-rw-r--r--spec/frontend/content_editor/extensions/table_of_contents_spec.js35
-rw-r--r--spec/frontend/content_editor/markdown_processing_examples.js2
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js107
-rw-r--r--spec/frontend/cycle_analytics/base_spec.js8
-rw-r--r--spec/frontend/cycle_analytics/mock_data.js36
-rw-r--r--spec/frontend/cycle_analytics/store/actions_spec.js37
-rw-r--r--spec/frontend/cycle_analytics/store/getters_spec.js27
-rw-r--r--spec/frontend/cycle_analytics/store/mutations_spec.js29
-rw-r--r--spec/frontend/cycle_analytics/utils_spec.js16
-rw-r--r--spec/frontend/deploy_freeze/helpers.js5
-rw-r--r--spec/frontend/deploy_keys/components/action_btn_spec.js2
-rw-r--r--spec/frontend/deploy_keys/components/app_spec.js2
-rw-r--r--spec/frontend/deploy_keys/components/key_spec.js3
-rw-r--r--spec/frontend/deploy_keys/components/keys_panel_spec.js2
-rw-r--r--spec/frontend/deprecated_jquery_dropdown_spec.js3
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap4
-rw-r--r--spec/frontend/design_management/utils/cache_update_spec.js10
-rw-r--r--spec/frontend/design_management/utils/error_messages_spec.js31
-rw-r--r--spec/frontend/diffs/components/app_spec.js22
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js4
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js6
-rw-r--r--spec/frontend/diffs/mock_data/diff_with_commit.js5
-rw-r--r--spec/frontend/diffs/store/actions_spec.js4
-rw-r--r--spec/frontend/diffs/utils/tree_worker_utils_spec.js (renamed from spec/frontend/diffs/utils/workers_spec.js)8
-rw-r--r--spec/frontend/editor/source_editor_ci_schema_ext_spec.js6
-rw-r--r--spec/frontend/environments/environment_delete_spec.js13
-rw-r--r--spec/frontend/environments/environment_monitoring_spec.js22
-rw-r--r--spec/frontend/environments/environment_pin_spec.js10
-rw-r--r--spec/frontend/environments/environment_rollback_spec.js14
-rw-r--r--spec/frontend/environments/environment_terminal_button_spec.js19
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js47
-rw-r--r--spec/frontend/error_tracking_settings/components/app_spec.js43
-rw-r--r--spec/frontend/experimentation/utils_spec.js14
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js16
-rw-r--r--spec/frontend/filterable_list_spec.js2
-rw-r--r--spec/frontend/filtered_search/dropdown_user_spec.js5
-rw-r--r--spec/frontend/filtered_search/droplab/constants_spec.js (renamed from spec/frontend/droplab/constants_spec.js)2
-rw-r--r--spec/frontend/filtered_search/droplab/drop_down_spec.js (renamed from spec/frontend/droplab/drop_down_spec.js)6
-rw-r--r--spec/frontend/filtered_search/droplab/hook_spec.js (renamed from spec/frontend/droplab/hook_spec.js)6
-rw-r--r--spec/frontend/filtered_search/droplab/plugins/ajax_filter_spec.js (renamed from spec/frontend/droplab/plugins/ajax_filter_spec.js)2
-rw-r--r--spec/frontend/filtered_search/droplab/plugins/ajax_spec.js (renamed from spec/frontend/droplab/plugins/ajax_spec.js)2
-rw-r--r--spec/frontend/filtered_search/droplab/plugins/input_setter_spec.js (renamed from spec/frontend/droplab/plugins/input_setter_spec.js)2
-rw-r--r--spec/frontend/filtered_search/visual_token_value_spec.js8
-rw-r--r--spec/frontend/fixtures/abuse_reports.rb4
-rw-r--r--spec/frontend/fixtures/admin_users.rb4
-rw-r--r--spec/frontend/fixtures/analytics.rb10
-rw-r--r--spec/frontend/fixtures/api_markdown.rb6
-rw-r--r--spec/frontend/fixtures/api_markdown.yml83
-rw-r--r--spec/frontend/fixtures/api_merge_requests.rb4
-rw-r--r--spec/frontend/fixtures/api_projects.rb4
-rw-r--r--spec/frontend/fixtures/application_settings.rb4
-rw-r--r--spec/frontend/fixtures/autocomplete.rb4
-rw-r--r--spec/frontend/fixtures/autocomplete_sources.rb4
-rw-r--r--spec/frontend/fixtures/blob.rb4
-rw-r--r--spec/frontend/fixtures/branches.rb5
-rw-r--r--spec/frontend/fixtures/clusters.rb4
-rw-r--r--spec/frontend/fixtures/commit.rb5
-rw-r--r--spec/frontend/fixtures/deploy_keys.rb4
-rw-r--r--spec/frontend/fixtures/freeze_period.rb4
-rw-r--r--spec/frontend/fixtures/groups.rb4
-rw-r--r--spec/frontend/fixtures/issues.rb4
-rw-r--r--spec/frontend/fixtures/jobs.rb4
-rw-r--r--spec/frontend/fixtures/labels.rb4
-rw-r--r--spec/frontend/fixtures/merge_requests.rb4
-rw-r--r--spec/frontend/fixtures/merge_requests_diffs.rb4
-rw-r--r--spec/frontend/fixtures/metrics_dashboard.rb4
-rw-r--r--spec/frontend/fixtures/pipeline_schedules.rb4
-rw-r--r--spec/frontend/fixtures/pipelines.rb4
-rw-r--r--spec/frontend/fixtures/projects.rb8
-rw-r--r--spec/frontend/fixtures/projects_json.rb4
-rw-r--r--spec/frontend/fixtures/prometheus_service.rb4
-rw-r--r--spec/frontend/fixtures/raw.rb8
-rw-r--r--spec/frontend/fixtures/releases.rb8
-rw-r--r--spec/frontend/fixtures/runner.rb4
-rw-r--r--spec/frontend/fixtures/search.rb4
-rw-r--r--spec/frontend/fixtures/services.rb4
-rw-r--r--spec/frontend/fixtures/sessions.rb4
-rw-r--r--spec/frontend/fixtures/snippet.rb4
-rw-r--r--spec/frontend/fixtures/startup_css.rb8
-rw-r--r--spec/frontend/fixtures/static/oauth_remember_me.html33
-rw-r--r--spec/frontend/fixtures/tags.rb4
-rw-r--r--spec/frontend/fixtures/timezones.rb4
-rw-r--r--spec/frontend/fixtures/todos.rb4
-rw-r--r--spec/frontend/fixtures/u2f.rb4
-rw-r--r--spec/frontend/fixtures/webauthn.rb4
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js4
-rw-r--r--spec/frontend/header_search/components/app_spec.js32
-rw-r--r--spec/frontend/header_search/components/header_search_autocomplete_items_spec.js108
-rw-r--r--spec/frontend/header_search/mock_data.js69
-rw-r--r--spec/frontend/header_search/store/actions_spec.js34
-rw-r--r--spec/frontend/header_search/store/getters_spec.js40
-rw-r--r--spec/frontend/header_search/store/mutations_spec.js29
-rw-r--r--spec/frontend/ide/components/jobs/detail_spec.js24
-rw-r--r--spec/frontend/ide/stores/modules/commit/getters_spec.js2
-rw-r--r--spec/frontend/ide/stores/utils_spec.js2
-rw-r--r--spec/frontend/import_entities/components/pagination_bar_spec.js92
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/jira_issues_fields_spec.js3
-rw-r--r--spec/frontend/integrations/edit/components/override_dropdown_spec.js2
-rw-r--r--spec/frontend/integrations/integration_settings_form_spec.js12
-rw-r--r--spec/frontend/integrations/overrides/components/integration_overrides_spec.js29
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js4
-rw-r--r--spec/frontend/invite_members/mock_data/api_responses.js14
-rw-r--r--spec/frontend/invite_members/utils/response_message_parser_spec.js28
-rw-r--r--spec/frontend/issuable/components/csv_export_modal_spec.js5
-rw-r--r--spec/frontend/issuable/components/csv_import_modal_spec.js7
-rw-r--r--spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js7
-rw-r--r--spec/frontend/issuable_form_spec.js19
-rw-r--r--spec/frontend/issuable_list/components/issuable_item_spec.js15
-rw-r--r--spec/frontend/issuable_suggestions/components/item_spec.js63
-rw-r--r--spec/frontend/issues_list/components/issues_list_app_spec.js36
-rw-r--r--spec/frontend/issues_list/components/new_issue_dropdown_spec.js131
-rw-r--r--spec/frontend/issues_list/mock_data.js34
-rw-r--r--spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap32
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js35
-rw-r--r--spec/frontend/jobs/components/job_container_item_spec.js2
-rw-r--r--spec/frontend/jobs/components/job_log_controllers_spec.js14
-rw-r--r--spec/frontend/jobs/components/log/collapsible_section_spec.js8
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js12
-rw-r--r--spec/frontend/jobs/mixins/delayed_job_mixin_spec.js2
-rw-r--r--spec/frontend/jobs/store/actions_spec.js112
-rw-r--r--spec/frontend/jobs/store/getters_spec.js8
-rw-r--r--spec/frontend/jobs/store/mutations_spec.js80
-rw-r--r--spec/frontend/jobs/store/utils_spec.js12
-rw-r--r--spec/frontend/lib/apollo/suppress_network_errors_during_navigation_link_spec.js155
-rw-r--r--spec/frontend/lib/logger/__snapshots__/hello_spec.js.snap21
-rw-r--r--spec/frontend/lib/logger/hello_spec.js28
-rw-r--r--spec/frontend/lib/utils/color_utils_spec.js18
-rw-r--r--spec/frontend/lib/utils/datetime/date_format_utility_spec.js15
-rw-r--r--spec/frontend/lib/utils/is_navigating_away_spec.js23
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js21
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js35
-rw-r--r--spec/frontend/members/components/action_buttons/remove_member_button_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/user_action_buttons_spec.js5
-rw-r--r--spec/frontend/members/components/modals/leave_modal_spec.js29
-rw-r--r--spec/frontend/members/components/modals/remove_member_modal_spec.js33
-rw-r--r--spec/frontend/members/components/table/expires_at_spec.js86
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js88
-rw-r--r--spec/frontend/members/mock_data.js7
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap2
-rw-r--r--spec/frontend/monitoring/fixture_data.js6
-rw-r--r--spec/frontend/namespace_select_spec.js65
-rw-r--r--spec/frontend/notebook/cells/code_spec.js5
-rw-r--r--spec/frontend/notebook/cells/markdown_spec.js9
-rw-r--r--spec/frontend/notebook/cells/output/index_spec.js7
-rw-r--r--spec/frontend/notebook/index_spec.js9
-rw-r--r--spec/frontend/notes/components/comment_type_dropdown_spec.js14
-rw-r--r--spec/frontend/notes/components/diff_with_note_spec.js9
-rw-r--r--spec/frontend/notes/components/note_form_spec.js10
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js7
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js21
-rw-r--r--spec/frontend/notes/stores/getters_spec.js5
-rw-r--r--spec/frontend/oauth_remember_me_spec.js2
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap36
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap34
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap112
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap36
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap36
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap168
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap45
-rw-r--r--spec/frontend/packages/details/components/additional_metadata_spec.js119
-rw-r--r--spec/frontend/packages/details/components/composer_installation_spec.js133
-rw-r--r--spec/frontend/packages/details/components/conan_installation_spec.js72
-rw-r--r--spec/frontend/packages/details/components/dependency_row_spec.js62
-rw-r--r--spec/frontend/packages/details/components/installation_title_spec.js58
-rw-r--r--spec/frontend/packages/details/components/installations_commands_spec.js61
-rw-r--r--spec/frontend/packages/details/components/maven_installation_spec.js184
-rw-r--r--spec/frontend/packages/details/components/npm_installation_spec.js123
-rw-r--r--spec/frontend/packages/details/components/nuget_installation_spec.js79
-rw-r--r--spec/frontend/packages/details/components/package_title_spec.js189
-rw-r--r--spec/frontend/packages/details/components/pypi_installation_spec.js72
-rw-r--r--spec/frontend/packages/details/store/getters_spec.js295
-rw-r--r--spec/frontend/packages/shared/components/package_list_row_spec.js14
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/app_spec.js173
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/mock_data.js21
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap (renamed from spec/frontend/packages/details/components/__snapshots__/file_sha_spec.js.snap)0
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap (renamed from spec/frontend/packages_and_registries/infrastructure_registry/components/__snapshots__/terraform_installation_spec.js.snap)0
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js (renamed from spec/frontend/packages/details/components/app_spec.js)104
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js (renamed from spec/frontend/packages_and_registries/infrastructure_registry/components/details_title_spec.js)2
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/file_sha_spec.js (renamed from spec/frontend/packages/details/components/file_sha_spec.js)2
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js (renamed from spec/frontend/packages/details/components/package_files_spec.js)4
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js (renamed from spec/frontend/packages/details/components/package_history_spec.js)6
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js (renamed from spec/frontend/packages_and_registries/infrastructure_registry/components/terraform_installation_spec.js)2
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/mock_data.js (renamed from spec/frontend/packages/details/mock_data.js)0
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js (renamed from spec/frontend/packages/details/store/actions_spec.js)8
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js40
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/mutations_spec.js (renamed from spec/frontend/packages/details/store/mutations_spec.js)6
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap9
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js29
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js62
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap12
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap122
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/packages_list_app_spec.js.snap68
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap42
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js154
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js156
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/packages_list_app_spec.js273
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js145
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/publish_method_spec.js47
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js34
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js189
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js14
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js322
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js277
-rw-r--r--spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js66
-rw-r--r--spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js23
-rw-r--r--spec/frontend/packages_and_registries/settings/group/mock_data.js50
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap6
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap2
-rw-r--r--spec/frontend/pages/admin/projects/components/namespace_select_spec.js93
-rw-r--r--spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js175
-rw-r--r--spec/frontend/pages/profiles/password_prompt/password_prompt_modal_spec.js92
-rw-r--r--spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap2
-rw-r--r--spec/frontend/pages/projects/new/components/new_project_url_select_spec.js122
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js78
-rw-r--r--spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js2
-rw-r--r--spec/frontend/pipeline_editor/components/editor/text_editor_spec.js5
-rw-r--r--spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js51
-rw-r--r--spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js42
-rw-r--r--spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js21
-rw-r--r--spec/frontend/pipeline_editor/mock_data.js53
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js35
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js9
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/pipeline_mini_graph_spec.js2
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/pipieline_stop_modal_spec.js27
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js60
-rw-r--r--spec/frontend/pipelines/pipeline_multi_actions_spec.js2
-rw-r--r--spec/frontend/pipelines/pipelines_artifacts_spec.js3
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js2
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js6
-rw-r--r--spec/frontend/pipelines/test_reports/stores/actions_spec.js3
-rw-r--r--spec/frontend/pipelines/test_reports/stores/getters_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/stores/mutations_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/test_reports_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_table_spec.js4
-rw-r--r--spec/frontend/projects/new/components/app_spec.js (renamed from spec/frontend/pages/projects/new/components/app_spec.js)2
-rw-r--r--spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js (renamed from spec/frontend/pages/projects/new/components/new_project_push_tip_popover_spec.js)2
-rw-r--r--spec/frontend/projects/new/components/new_project_url_select_spec.js235
-rw-r--r--spec/frontend/projects/projects_filterable_list_spec.js2
-rw-r--r--spec/frontend/projects/settings/components/new_access_dropdown_spec.js345
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js11
-rw-r--r--spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js72
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js7
-rw-r--r--spec/frontend/related_merge_requests/components/related_merge_requests_spec.js6
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js3
-rw-r--r--spec/frontend/releases/components/app_index_apollo_client_spec.js4
-rw-r--r--spec/frontend/releases/components/app_show_spec.js6
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js4
-rw-r--r--spec/frontend/releases/components/evidence_block_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_assets_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_header_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_spec.js4
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js6
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js4
-rw-r--r--spec/frontend/releases/stores/modules/list/actions_spec.js6
-rw-r--r--spec/frontend/releases/stores/modules/list/mutations_spec.js8
-rw-r--r--spec/frontend/releases/util_spec.js14
-rw-r--r--spec/frontend/reports/codequality_report/components/codequality_issue_body_spec.js6
-rw-r--r--spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js10
-rw-r--r--spec/frontend/reports/codequality_report/store/getters_spec.js6
-rw-r--r--spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js12
-rw-r--r--spec/frontend/reports/components/report_section_spec.js12
-rw-r--r--spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js18
-rw-r--r--spec/frontend/repository/commits_service_spec.js84
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js109
-rw-r--r--spec/frontend/repository/components/blob_edit_spec.js6
-rw-r--r--spec/frontend/repository/components/blob_viewers/video_viewer_spec.js22
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js38
-rw-r--r--spec/frontend/repository/components/fork_suggestion_spec.js44
-rw-r--r--spec/frontend/repository/components/new_directory_modal_spec.js203
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap75
-rw-r--r--spec/frontend/repository/components/table/index_spec.js33
-rw-r--r--spec/frontend/repository/components/table/row_spec.js34
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js22
-rw-r--r--spec/frontend/repository/router_spec.js28
-rw-r--r--spec/frontend/runner/admin_runners/admin_runners_app_spec.js22
-rw-r--r--spec/frontend/runner/components/cells/runner_actions_cell_spec.js14
-rw-r--r--spec/frontend/runner/components/cells/runner_summary_cell_spec.js (renamed from spec/frontend/runner/components/cells/runner_name_cell_spec.js)27
-rw-r--r--spec/frontend/runner/components/runner_list_spec.js17
-rw-r--r--spec/frontend/runner/components/runner_state_locked_badge_spec.js45
-rw-r--r--spec/frontend/runner/components/runner_state_paused_badge_spec.js45
-rw-r--r--spec/frontend/runner/components/runner_type_badge_spec.js23
-rw-r--r--spec/frontend/runner/components/runner_type_help_spec.js32
-rw-r--r--spec/frontend/runner/group_runners/group_runners_app_spec.js28
-rw-r--r--spec/frontend/runner/mock_data.js22
-rw-r--r--spec/frontend/search_settings/components/search_settings_spec.js35
-rw-r--r--spec/frontend/sidebar/assignees_spec.js8
-rw-r--r--spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js20
-rw-r--r--spec/frontend/sidebar/sidebar_labels_spec.js4
-rw-r--r--spec/frontend/sidebar/todo_spec.js2
-rw-r--r--spec/frontend/snippets/components/show_spec.js18
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js41
-rw-r--r--spec/frontend/test_setup.js3
-rw-r--r--spec/frontend/tracking/get_standard_context_spec.js29
-rw-r--r--spec/frontend/tracking/tracking_initialization_spec.js140
-rw-r--r--spec/frontend/tracking/tracking_spec.js (renamed from spec/frontend/tracking_spec.js)258
-rw-r--r--spec/frontend/tracking/utils_spec.js99
-rw-r--r--spec/frontend/users_select/test_helper.js5
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/extensions/actions_spec.js35
-rw-r--r--spec/frontend/vue_mr_widget/components/extensions/index_spec.js7
-rw-r--r--spec/frontend/vue_mr_widget/components/extensions/status_icon_spec.js36
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js25
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js9
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js2
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js18
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js51
-rw-r--r--spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/test_extension.js37
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap4
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js39
-rw-r--r--spec/frontend/vue_shared/components/color_picker/color_picker_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/dropdown_keyboard_navigation_spec.js141
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js57
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js52
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js97
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_assignees_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js27
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/project_selector/project_selector_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js51
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js58
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js144
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js109
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js24
-rw-r--r--spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap11
-rw-r--r--spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js116
-rw-r--r--spec/frontend/vue_shared/components/user_deletion_obstacles/utils_spec.js43
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js30
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js22
-rw-r--r--spec/frontend/vue_shared/directives/validation_spec.js137
-rw-r--r--spec/frontend/vue_shared/oncall_schedules_list_spec.js87
-rw-r--r--spec/frontend/vue_shared/security_reports/mock_data.js2
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js12
-rw-r--r--spec/frontend/vue_shared/security_reports/store/getters_spec.js4
-rw-r--r--spec/frontend_integration/fixture_generators.yml5
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/graphql.js4
-rw-r--r--spec/graphql/mutations/clusters/agent_tokens/create_spec.rb61
-rw-r--r--spec/graphql/mutations/clusters/agent_tokens/delete_spec.rb52
-rw-r--r--spec/graphql/mutations/clusters/agents/create_spec.rb50
-rw-r--r--spec/graphql/mutations/clusters/agents/delete_spec.rb51
-rw-r--r--spec/graphql/mutations/customer_relations/contacts/create_spec.rb101
-rw-r--r--spec/graphql/mutations/customer_relations/contacts/update_spec.rb75
-rw-r--r--spec/graphql/mutations/customer_relations/organizations/create_spec.rb11
-rw-r--r--spec/graphql/mutations/customer_relations/organizations/update_spec.rb23
-rw-r--r--spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb55
-rw-r--r--spec/graphql/mutations/groups/update_spec.rb4
-rw-r--r--spec/graphql/mutations/issues/create_spec.rb15
-rw-r--r--spec/graphql/resolvers/board_list_issues_resolver_spec.rb14
-rw-r--r--spec/graphql/resolvers/board_list_resolver_spec.rb39
-rw-r--r--spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb32
-rw-r--r--spec/graphql/resolvers/clusters/agents_resolver_spec.rb77
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb133
-rw-r--r--spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb48
-rw-r--r--spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb66
-rw-r--r--spec/graphql/resolvers/project_pipelines_resolver_spec.rb20
-rw-r--r--spec/graphql/types/base_field_spec.rb11
-rw-r--r--spec/graphql/types/board_list_type_spec.rb27
-rw-r--r--spec/graphql/types/ci/pipeline_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_type_spec.rb2
-rw-r--r--spec/graphql/types/clusters/agent_token_type_spec.rb13
-rw-r--r--spec/graphql/types/clusters/agent_type_spec.rb13
-rw-r--r--spec/graphql/types/container_expiration_policy_older_than_enum_spec.rb2
-rw-r--r--spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb1
-rw-r--r--spec/graphql/types/issue_type_spec.rb2
-rw-r--r--spec/graphql/types/kas/agent_configuration_type_spec.rb11
-rw-r--r--spec/graphql/types/kas/agent_connection_type_spec.rb22
-rw-r--r--spec/graphql/types/kas/agent_metadata_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/nuget/metadatum_type_spec.rb6
-rw-r--r--spec/graphql/types/packages/package_type_spec.rb2
-rw-r--r--spec/graphql/types/permission_types/ci/runner_spec.rb15
-rw-r--r--spec/graphql/types/project_type_spec.rb144
-rw-r--r--spec/graphql/types/query_type_spec.rb11
-rw-r--r--spec/helpers/application_settings_helper_spec.rb32
-rw-r--r--spec/helpers/avatars_helper_spec.rb14
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb13
-rw-r--r--spec/helpers/container_expiration_policies_helper_spec.rb1
-rw-r--r--spec/helpers/feature_flags_helper_spec.rb23
-rw-r--r--spec/helpers/groups_helper_spec.rb2
-rw-r--r--spec/helpers/hooks_helper_spec.rb23
-rw-r--r--spec/helpers/issuables_helper_spec.rb12
-rw-r--r--spec/helpers/issues_helper_spec.rb4
-rw-r--r--spec/helpers/one_trust_helper_spec.rb48
-rw-r--r--spec/helpers/packages_helper_spec.rb30
-rw-r--r--spec/helpers/projects/cluster_agents_helper_spec.rb21
-rw-r--r--spec/helpers/projects_helper_spec.rb12
-rw-r--r--spec/helpers/routing/pseudonymization_helper_spec.rb30
-rw-r--r--spec/helpers/search_helper_spec.rb4
-rw-r--r--spec/helpers/startupjs_helper_spec.rb22
-rw-r--r--spec/helpers/tab_helper_spec.rb54
-rw-r--r--spec/helpers/time_zone_helper_spec.rb12
-rw-r--r--spec/helpers/user_callouts_helper_spec.rb33
-rw-r--r--spec/initializers/carrierwave_patch_spec.rb21
-rw-r--r--spec/initializers/database_config_spec.rb10
-rw-r--r--spec/initializers/lograge_spec.rb38
-rw-r--r--spec/initializers/zz_metrics_spec.rb20
-rw-r--r--spec/lib/api/base_spec.rb92
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb16
-rw-r--r--spec/lib/api/entities/clusters/agent_authorization_spec.rb35
-rw-r--r--spec/lib/api/entities/user_spec.rb13
-rw-r--r--spec/lib/api/every_api_endpoint_spec.rb4
-rw-r--r--spec/lib/api/helpers_spec.rb12
-rw-r--r--spec/lib/api/validations/validators/project_portable_spec.rb33
-rw-r--r--spec/lib/atlassian/jira_connect/asymmetric_jwt_spec.rb99
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb10
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb40
-rw-r--r--spec/lib/banzai/cross_project_reference_spec.rb2
-rw-r--r--spec/lib/banzai/filter/front_matter_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb13
-rw-r--r--spec/lib/banzai/filter/references/reference_cache_spec.rb39
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb8
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb8
-rw-r--r--spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/clients/graphql_spec.rb2
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb34
-rw-r--r--spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb98
-rw-r--r--spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb (renamed from spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb)2
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb49
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb18
-rw-r--r--spec/lib/bulk_imports/ndjson_pipeline_spec.rb16
-rw-r--r--spec/lib/bulk_imports/network_error_spec.rb72
-rw-r--r--spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb21
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb168
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb73
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb16
-rw-r--r--spec/lib/container_registry/client_spec.rb94
-rw-r--r--spec/lib/container_registry/tag_spec.rb20
-rw-r--r--spec/lib/error_tracking/sentry_client/issue_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb12
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb12
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb5
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb5
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb5
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb5
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb5
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb5
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb5
-rw-r--r--spec/lib/gitlab/application_rate_limiter_spec.rb10
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb109
-rw-r--r--spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb140
-rw-r--r--spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb23
-rw-r--r--spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb35
-rw-r--r--spec/lib/gitlab/backtrace_cleaner_spec.rb3
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb10
-rw-r--r--spec/lib/gitlab/chat/command_spec.rb1
-rw-r--r--spec/lib/gitlab/checks/matching_merge_request_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/build/auto_retry_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/retry_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/external/rules_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/reports/security/flag_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/trace/archive_spec.rb101
-rw-r--r--spec/lib/gitlab/ci/trace/metrics_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/trace/remote_checksum_spec.rb85
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb27
-rw-r--r--spec/lib/gitlab/database/bulk_update_spec.rb53
-rw-r--r--spec/lib/gitlab/database/connection_spec.rb10
-rw-r--r--spec/lib/gitlab/database/consistency_spec.rb8
-rw-r--r--spec/lib/gitlab/database/count_spec.rb44
-rw-r--r--spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb4
-rw-r--r--spec/lib/gitlab/database/load_balancing/active_record_proxy_spec.rb20
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb8
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_spec.rb8
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb49
-rw-r--r--spec/lib/gitlab/database/load_balancing/primary_host_spec.rb52
-rw-r--r--spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb124
-rw-r--r--spec/lib/gitlab/database/load_balancing/setup_spec.rb119
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb30
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb87
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb321
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb205
-rw-r--r--spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb9
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb9
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb19
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_details_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_log_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb109
-rw-r--r--spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb5
-rw-r--r--spec/lib/gitlab/database/partitioning/multi_database_partition_dropper_spec.rb38
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb18
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb2
-rw-r--r--spec/lib/gitlab/database/schema_migrations/context_spec.rb18
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb2
-rw-r--r--spec/lib/gitlab/database_spec.rb34
-rw-r--r--spec/lib/gitlab/doctor/secrets_spec.rb10
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb30
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb9
-rw-r--r--spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb8
-rw-r--r--spec/lib/gitlab/endpoint_attributes_spec.rb133
-rw-r--r--spec/lib/gitlab/etag_caching/router/graphql_spec.rb2
-rw-r--r--spec/lib/gitlab/etag_caching/router/restful_spec.rb2
-rw-r--r--spec/lib/gitlab/feature_categories_spec.rb74
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb63
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb3
-rw-r--r--spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb54
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/parallel_importer_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb137
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb164
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_spec.rb13
-rw-r--r--spec/lib/gitlab/github_import/representation/lfs_object_spec.rb17
-rw-r--r--spec/lib/gitlab/github_import/representation/note_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb17
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/sequential_importer_spec.rb31
-rw-r--r--spec/lib/gitlab/health_checks/probes/collection_spec.rb3
-rw-r--r--spec/lib/gitlab/health_checks/redis/rate_limiting_check_spec.rb8
-rw-r--r--spec/lib/gitlab/health_checks/redis/sessions_check_spec.rb8
-rw-r--r--spec/lib/gitlab/import/import_failure_service_spec.rb244
-rw-r--r--spec/lib/gitlab/import/metrics_spec.rb108
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml8
-rw-r--r--spec/lib/gitlab/import_export/attributes_permitter_spec.rb52
-rw-r--r--spec/lib/gitlab/import_export/command_line_util_spec.rb34
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb19
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb36
-rw-r--r--spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb40
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb3
-rw-r--r--spec/lib/gitlab/instrumentation/redis_spec.rb4
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb105
-rw-r--r--spec/lib/gitlab/kas_spec.rb42
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb3
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb140
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb29
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb29
-rw-r--r--spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb9
-rw-r--r--spec/lib/gitlab/metrics/instrumentation_spec.rb342
-rw-r--r--spec/lib/gitlab/metrics/rails_slis_spec.rb58
-rw-r--r--spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb187
-rw-r--r--spec/lib/gitlab/metrics/sli_spec.rb99
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/web_transaction_spec.rb15
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb2
-rw-r--r--spec/lib/gitlab/middleware/multipart/handler_spec.rb1
-rw-r--r--spec/lib/gitlab/middleware/speedscope_spec.rb52
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb28
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb34
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb60
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb34
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb21
-rw-r--r--spec/lib/gitlab/performance_bar/stats_spec.rb16
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb37
-rw-r--r--spec/lib/gitlab/rack_attack/request_spec.rb31
-rw-r--r--spec/lib/gitlab/rate_limit_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/redis/queues_spec.rb20
-rw-r--r--spec/lib/gitlab/redis/rate_limiting_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/sessions_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/trace_chunks_spec.rb50
-rw-r--r--spec/lib/gitlab/regex_spec.rb28
-rw-r--r--spec/lib/gitlab/request_endpoints_spec.rb24
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb135
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb21
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb20
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb114
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb61
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb35
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb93
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb13
-rw-r--r--spec/lib/gitlab/sidekiq_versioning/manager_spec.rb25
-rw-r--r--spec/lib/gitlab/sidekiq_versioning_spec.rb6
-rw-r--r--spec/lib/gitlab/slash_commands/issue_move_spec.rb2
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb93
-rw-r--r--spec/lib/gitlab/tracking/docs/helper_spec.rb91
-rw-r--r--spec/lib/gitlab/tracking/docs/renderer_spec.rb23
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb19
-rw-r--r--spec/lib/gitlab/tracking_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/active_user_count_metric_spec.rb13
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_users_associating_milestones_to_releases_metric_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb115
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb1
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb105
-rw-r--r--spec/lib/gitlab/utils/delegator_override/error_spec.rb13
-rw-r--r--spec/lib/gitlab/utils/delegator_override/validator_spec.rb81
-rw-r--r--spec/lib/gitlab/utils/delegator_override_spec.rb97
-rw-r--r--spec/lib/gitlab/view/presenter/base_spec.rb34
-rw-r--r--spec/lib/gitlab/with_feature_category_spec.rb69
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb44
-rw-r--r--spec/lib/gitlab/x509/certificate_spec.rb (renamed from spec/lib/gitlab/email/smime/certificate_spec.rb)2
-rw-r--r--spec/lib/peek/views/active_record_spec.rb202
-rw-r--r--spec/lib/rouge/formatters/html_gitlab_spec.rb2
-rw-r--r--spec/lib/sidebars/groups/menus/scope_menu_spec.rb15
-rw-r--r--spec/lib/sidebars/projects/menus/scope_menu_spec.rb6
-rw-r--r--spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb64
-rw-r--r--spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb56
-rw-r--r--spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb49
-rw-r--r--spec/migrations/20191204114127_delete_legacy_triggers_spec.rb23
-rw-r--r--spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb23
-rw-r--r--spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb23
-rw-r--r--spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb48
-rw-r--r--spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb23
-rw-r--r--spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb45
-rw-r--r--spec/migrations/20210918202855_reschedule_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb30
-rw-r--r--spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb23
-rw-r--r--spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb21
-rw-r--r--spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb21
-rw-r--r--spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb21
-rw-r--r--spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb29
-rw-r--r--spec/migrations/add_default_and_free_plans_spec.rb34
-rw-r--r--spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb57
-rw-r--r--spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb28
-rw-r--r--spec/migrations/backfill_operations_feature_flags_active_spec.rb52
-rw-r--r--spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb57
-rw-r--r--spec/migrations/backport_enterprise_schema_spec.rb41
-rw-r--r--spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb21
-rw-r--r--spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb35
-rw-r--r--spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb34
-rw-r--r--spec/migrations/cleanup_legacy_artifact_migration_spec.rb52
-rw-r--r--spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb21
-rw-r--r--spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb36
-rw-r--r--spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb58
-rw-r--r--spec/migrations/enqueue_reset_merge_status_second_run_spec.rb52
-rw-r--r--spec/migrations/enqueue_reset_merge_status_spec.rb52
-rw-r--r--spec/migrations/fill_productivity_analytics_start_date_spec.rb39
-rw-r--r--spec/migrations/fix_max_pages_size_spec.rb19
-rw-r--r--spec/migrations/fix_null_type_labels_spec.rb36
-rw-r--r--spec/migrations/fix_pool_repository_source_project_id_spec.rb29
-rw-r--r--spec/migrations/fix_wrong_pages_access_level_spec.rb99
-rw-r--r--spec/migrations/generate_lets_encrypt_private_key_spec.rb14
-rw-r--r--spec/migrations/insert_project_hooks_plan_limits_spec.rb67
-rw-r--r--spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb114
-rw-r--r--spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb63
-rw-r--r--spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb81
-rw-r--r--spec/migrations/migrate_k8s_service_integration_spec.rb162
-rw-r--r--spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb55
-rw-r--r--spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb59
-rw-r--r--spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb135
-rw-r--r--spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb43
-rw-r--r--spec/migrations/move_limits_from_plans_spec.rb35
-rw-r--r--spec/migrations/nullify_users_role_spec.rb33
-rw-r--r--spec/migrations/populate_project_statistics_packages_size_spec.rb37
-rw-r--r--spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb39
-rw-r--r--spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb28
-rw-r--r--spec/migrations/remove_empty_github_service_templates_spec.rb55
-rw-r--r--spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb37
-rw-r--r--spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb48
-rw-r--r--spec/migrations/schedule_pages_metadata_migration_spec.rb29
-rw-r--r--spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb47
-rw-r--r--spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb48
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_spec.rb81
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb57
-rw-r--r--spec/migrations/set_issue_id_for_all_versions_spec.rb38
-rw-r--r--spec/migrations/sync_issuables_state_id_spec.rb41
-rw-r--r--spec/migrations/truncate_user_fullname_spec.rb23
-rw-r--r--spec/migrations/update_minimum_password_length_spec.rb30
-rw-r--r--spec/models/analytics/cycle_analytics/issue_stage_event_spec.rb2
-rw-r--r--spec/models/analytics/cycle_analytics/merge_request_stage_event_spec.rb2
-rw-r--r--spec/models/application_record_spec.rb2
-rw-r--r--spec/models/application_setting_spec.rb7
-rw-r--r--spec/models/bulk_import_spec.rb14
-rw-r--r--spec/models/bulk_imports/entity_spec.rb40
-rw-r--r--spec/models/bulk_imports/file_transfer/group_config_spec.rb6
-rw-r--r--spec/models/bulk_imports/file_transfer/project_config_spec.rb48
-rw-r--r--spec/models/bulk_imports/tracker_spec.rb3
-rw-r--r--spec/models/ci/bridge_spec.rb6
-rw-r--r--spec/models/ci/build_spec.rb46
-rw-r--r--spec/models/ci/build_trace_metadata_spec.rb29
-rw-r--r--spec/models/ci/pipeline_spec.rb155
-rw-r--r--spec/models/ci/processable_spec.rb9
-rw-r--r--spec/models/ci/resource_group_spec.rb57
-rw-r--r--spec/models/ci/runner_spec.rb23
-rw-r--r--spec/models/clusters/agents/group_authorization_spec.rb6
-rw-r--r--spec/models/clusters/agents/implicit_authorization_spec.rb2
-rw-r--r--spec/models/clusters/agents/project_authorization_spec.rb6
-rw-r--r--spec/models/clusters/applications/runner_spec.rb3
-rw-r--r--spec/models/commit_spec.rb18
-rw-r--r--spec/models/commit_status_spec.rb10
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb42
-rw-r--r--spec/models/concerns/checksummable_spec.rb12
-rw-r--r--spec/models/concerns/ci/has_status_spec.rb12
-rw-r--r--spec/models/concerns/vulnerability_finding_helpers_spec.rb27
-rw-r--r--spec/models/customer_relations/contact_spec.rb1
-rw-r--r--spec/models/dependency_proxy/blob_spec.rb5
-rw-r--r--spec/models/dependency_proxy/image_ttl_group_policy_spec.rb9
-rw-r--r--spec/models/dependency_proxy/manifest_spec.rb5
-rw-r--r--spec/models/deployment_spec.rb167
-rw-r--r--spec/models/environment_spec.rb66
-rw-r--r--spec/models/error_tracking/error_spec.rb9
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb46
-rw-r--r--spec/models/group_spec.rb31
-rw-r--r--spec/models/instance_configuration_spec.rb21
-rw-r--r--spec/models/integration_spec.rb1
-rw-r--r--spec/models/integrations/open_project_spec.rb30
-rw-r--r--spec/models/integrations/open_project_tracker_data_spec.rb19
-rw-r--r--spec/models/issue_spec.rb31
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb56
-rw-r--r--spec/models/member_spec.rb83
-rw-r--r--spec/models/members/project_member_spec.rb11
-rw-r--r--spec/models/merge_request_spec.rb81
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb9
-rw-r--r--spec/models/namespace_setting_spec.rb2
-rw-r--r--spec/models/namespace_spec.rb183
-rw-r--r--spec/models/namespaces/user_namespace_spec.rb12
-rw-r--r--spec/models/note_spec.rb28
-rw-r--r--spec/models/operations/feature_flag_spec.rb47
-rw-r--r--spec/models/packages/helm/file_metadatum_spec.rb4
-rw-r--r--spec/models/packages/package_spec.rb2
-rw-r--r--spec/models/pages_domain_spec.rb19
-rw-r--r--spec/models/preloaders/merge_requests_preloader_spec.rb42
-rw-r--r--spec/models/product_analytics_event_spec.rb11
-rw-r--r--spec/models/project_feature_usage_spec.rb4
-rw-r--r--spec/models/project_spec.rb69
-rw-r--r--spec/models/project_statistics_spec.rb4
-rw-r--r--spec/models/projects/topic_spec.rb79
-rw-r--r--spec/models/protected_branch_spec.rb11
-rw-r--r--spec/models/repository_spec.rb2
-rw-r--r--spec/models/snippet_repository_spec.rb1
-rw-r--r--spec/models/upload_spec.rb24
-rw-r--r--spec/models/user_detail_spec.rb1
-rw-r--r--spec/models/user_preference_spec.rb6
-rw-r--r--spec/models/user_spec.rb257
-rw-r--r--spec/models/users/credit_card_validation_spec.rb18
-rw-r--r--spec/policies/clusters/agent_policy_spec.rb28
-rw-r--r--spec/policies/clusters/agent_token_policy_spec.rb31
-rw-r--r--spec/policies/group_policy_spec.rb2
-rw-r--r--spec/policies/namespaces/project_namespace_policy_spec.rb46
-rw-r--r--spec/policies/namespaces/user_namespace_policy_spec.rb (renamed from spec/policies/namespace_policy_spec.rb)2
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb143
-rw-r--r--spec/presenters/commit_status_presenter_spec.rb19
-rw-r--r--spec/presenters/group_clusterable_presenter_spec.rb6
-rw-r--r--spec/presenters/instance_clusterable_presenter_spec.rb6
-rw-r--r--spec/presenters/project_clusterable_presenter_spec.rb6
-rw-r--r--spec/requests/api/api_spec.rb112
-rw-r--r--spec/requests/api/bulk_imports_spec.rb9
-rw-r--r--spec/requests/api/ci/resource_groups_spec.rb95
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb6
-rw-r--r--spec/requests/api/ci/runner/runners_delete_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb6
-rw-r--r--spec/requests/api/ci/runner/runners_verify_post_spec.rb2
-rw-r--r--spec/requests/api/ci/runners_reset_registration_token_spec.rb2
-rw-r--r--spec/requests/api/ci/runners_spec.rb98
-rw-r--r--spec/requests/api/ci/triggers_spec.rb6
-rw-r--r--spec/requests/api/container_repositories_spec.rb26
-rw-r--r--spec/requests/api/deployments_spec.rb10
-rw-r--r--spec/requests/api/environments_spec.rb7
-rw-r--r--spec/requests/api/error_tracking/client_keys_spec.rb (renamed from spec/requests/api/error_tracking_client_keys_spec.rb)2
-rw-r--r--spec/requests/api/error_tracking/collector_spec.rb (renamed from spec/requests/api/error_tracking_collector_spec.rb)2
-rw-r--r--spec/requests/api/error_tracking/project_settings_spec.rb (renamed from spec/requests/api/error_tracking_spec.rb)2
-rw-r--r--spec/requests/api/graphql/boards/board_list_issues_query_spec.rb16
-rw-r--r--spec/requests/api/graphql/boards/board_list_query_spec.rb98
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb6
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb88
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb12
-rw-r--r--spec/requests/api/graphql/container_repository/container_repository_details_spec.rb2
-rw-r--r--spec/requests/api/graphql/group/container_repositories_spec.rb9
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb71
-rw-r--r--spec/requests/api/graphql/group/issues_spec.rb123
-rw-r--r--spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb52
-rw-r--r--spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb42
-rw-r--r--spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb43
-rw-r--r--spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb65
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb2
-rw-r--r--spec/requests/api/graphql/namespace/projects_spec.rb8
-rw-r--r--spec/requests/api/graphql/project/cluster_agents_spec.rb108
-rw-r--r--spec/requests/api/graphql/project/container_repositories_spec.rb13
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb125
-rw-r--r--spec/requests/api/graphql/project/merge_request/pipelines_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb8
-rw-r--r--spec/requests/api/graphql/project/releases_spec.rb24
-rw-r--r--spec/requests/api/graphql/users_spec.rb12
-rw-r--r--spec/requests/api/group_container_repositories_spec.rb7
-rw-r--r--spec/requests/api/groups_spec.rb38
-rw-r--r--spec/requests/api/helm_packages_spec.rb18
-rw-r--r--spec/requests/api/integrations_spec.rb363
-rw-r--r--spec/requests/api/internal/base_spec.rb8
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb90
-rw-r--r--spec/requests/api/issues/issues_spec.rb38
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb157
-rw-r--r--spec/requests/api/maven_packages_spec.rb24
-rw-r--r--spec/requests/api/merge_requests_spec.rb12
-rw-r--r--spec/requests/api/package_files_spec.rb2
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb5
-rw-r--r--spec/requests/api/project_export_spec.rb139
-rw-r--r--spec/requests/api/projects_spec.rb25
-rw-r--r--spec/requests/api/repositories_spec.rb12
-rw-r--r--spec/requests/api/services_spec.rb361
-rw-r--r--spec/requests/api/settings_spec.rb5
-rw-r--r--spec/requests/api/users_spec.rb27
-rw-r--r--spec/requests/groups/registry/repositories_controller_spec.rb1
-rw-r--r--spec/requests/import/url_controller_spec.rb45
-rw-r--r--spec/requests/projects/cluster_agents_controller_spec.rb40
-rw-r--r--spec/requests/projects/google_cloud_controller_spec.rb50
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb12
-rw-r--r--spec/requests/rack_attack_global_spec.rb274
-rw-r--r--spec/routing/admin/serverless/domains_controller_routing_spec.rb22
-rw-r--r--spec/serializers/member_entity_spec.rb26
-rw-r--r--spec/serializers/merge_request_metrics_helper_spec.rb69
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb2
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb64
-rw-r--r--spec/services/application_settings/update_service_spec.rb26
-rw-r--r--spec/services/boards/issues/list_service_spec.rb21
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb (renamed from spec/services/bulk_import_service_spec.rb)19
-rw-r--r--spec/services/bulk_imports/file_export_service_spec.rb37
-rw-r--r--spec/services/bulk_imports/get_importable_data_service_spec.rb46
-rw-r--r--spec/services/bulk_imports/relation_export_service_spec.rb18
-rw-r--r--spec/services/bulk_imports/tree_export_service_spec.rb35
-rw-r--r--spec/services/ci/archive_trace_service_spec.rb26
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb22
-rw-r--r--spec/services/ci/drop_pipeline_service_spec.rb3
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb13
-rw-r--r--spec/services/ci/pipelines/hook_service_spec.rb47
-rw-r--r--spec/services/ci/play_bridge_service_spec.rb12
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb68
-rw-r--r--spec/services/ci/register_job_service_spec.rb50
-rw-r--r--spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb86
-rw-r--r--spec/services/ci/retry_build_service_spec.rb13
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb39
-rw-r--r--spec/services/ci/stuck_builds/drop_pending_service_spec.rb (renamed from spec/services/ci/stuck_builds/drop_service_spec.rb)97
-rw-r--r--spec/services/ci/stuck_builds/drop_running_service_spec.rb72
-rw-r--r--spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb53
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb52
-rw-r--r--spec/services/ci/update_pending_build_service_spec.rb44
-rw-r--r--spec/services/clusters/agent_tokens/create_service_spec.rb64
-rw-r--r--spec/services/clusters/agents/create_service_spec.rb52
-rw-r--r--spec/services/clusters/agents/delete_service_spec.rb35
-rw-r--r--spec/services/concerns/rate_limited_service_spec.rb196
-rw-r--r--spec/services/container_expiration_policies/cleanup_service_spec.rb6
-rw-r--r--spec/services/customer_relations/contacts/create_service_spec.rb61
-rw-r--r--spec/services/customer_relations/contacts/update_service_spec.rb56
-rw-r--r--spec/services/customer_relations/organizations/create_service_spec.rb10
-rw-r--r--spec/services/customer_relations/organizations/update_service_spec.rb4
-rw-r--r--spec/services/dependency_proxy/auth_token_service_spec.rb75
-rw-r--r--spec/services/dependency_proxy/find_or_create_blob_service_spec.rb28
-rw-r--r--spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb32
-rw-r--r--spec/services/dependency_proxy/group_settings/update_service_spec.rb60
-rw-r--r--spec/services/deployments/older_deployments_drop_service_spec.rb2
-rw-r--r--spec/services/deployments/update_service_spec.rb8
-rw-r--r--spec/services/error_tracking/list_issues_service_spec.rb90
-rw-r--r--spec/services/feature_flags/hook_service_spec.rb31
-rw-r--r--spec/services/groups/transfer_service_spec.rb93
-rw-r--r--spec/services/groups/update_service_spec.rb2
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb8
-rw-r--r--spec/services/import/validate_remote_git_endpoint_service_spec.rb96
-rw-r--r--spec/services/issues/close_service_spec.rb14
-rw-r--r--spec/services/issues/create_service_spec.rb19
-rw-r--r--spec/services/issues/relative_position_rebalancing_service_spec.rb15
-rw-r--r--spec/services/issues/reopen_service_spec.rb20
-rw-r--r--spec/services/members/create_service_spec.rb2
-rw-r--r--spec/services/members/invite_service_spec.rb2
-rw-r--r--spec/services/merge_requests/assign_issues_service_spec.rb2
-rw-r--r--spec/services/merge_requests/build_service_spec.rb2
-rw-r--r--spec/services/merge_requests/mergeability/check_base_service_spec.rb40
-rw-r--r--spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb57
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb104
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb2
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb2
-rw-r--r--spec/services/notification_service_spec.rb2
-rw-r--r--spec/services/packages/composer/create_package_service_spec.rb25
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb133
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb541
-rw-r--r--spec/services/projects/create_service_spec.rb40
-rw-r--r--spec/services/projects/destroy_service_spec.rb109
-rw-r--r--spec/services/projects/group_links/update_service_spec.rb90
-rw-r--r--spec/services/projects/import_service_spec.rb6
-rw-r--r--spec/services/projects/move_access_service_spec.rb2
-rw-r--r--spec/services/projects/operations/update_service_spec.rb8
-rw-r--r--spec/services/projects/participants_service_spec.rb146
-rw-r--r--spec/services/projects/transfer_service_spec.rb185
-rw-r--r--spec/services/projects/update_pages_service_spec.rb16
-rw-r--r--spec/services/projects/update_service_spec.rb62
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb15
-rw-r--r--spec/services/security/ci_configuration/sast_create_service_spec.rb23
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb24
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb4
-rw-r--r--spec/services/users/activity_service_spec.rb4
-rw-r--r--spec/services/users/update_service_spec.rb70
-rw-r--r--spec/services/users/upsert_credit_card_validation_service_spec.rb36
-rw-r--r--spec/services/web_hook_service_spec.rb2
-rw-r--r--spec/spec_helper.rb4
-rw-r--r--spec/support/before_all_adapter.rb14
-rw-r--r--spec/support/capybara.rb3
-rw-r--r--spec/support/database/cross-database-modification-allowlist.yml1343
-rw-r--r--spec/support/database/cross-join-allowlist.yml151
-rw-r--r--spec/support/database/multiple_databases.rb52
-rw-r--r--spec/support/database/prevent_cross_database_modification.rb24
-rw-r--r--spec/support/database/prevent_cross_joins.rb27
-rw-r--r--spec/support/database_cleaner.rb27
-rw-r--r--spec/support/database_load_balancing.rb30
-rw-r--r--spec/support/db_cleaner.rb75
-rw-r--r--spec/support/helpers/dependency_proxy_helpers.rb14
-rw-r--r--spec/support/helpers/feature_flag_helpers.rb2
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb13
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb8
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb12
-rw-r--r--spec/support/helpers/usage_data_helpers.rb12
-rw-r--r--spec/support/matchers/be_request_urgency.rb8
-rw-r--r--spec/support/matchers/graphql_matchers.rb28
-rw-r--r--spec/support/matchers/markdown_matchers.rb2
-rw-r--r--spec/support/redis.rb16
-rw-r--r--spec/support/redis/redis_helpers.rb10
-rw-r--r--spec/support/redis/redis_new_instance_shared_examples.rb55
-rw-r--r--spec/support/redis/redis_shared_examples.rb28
-rw-r--r--spec/support/shared_contexts/bulk_imports_requests_shared_context.rb52
-rw-r--r--spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb20
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb5
-rw-r--r--spec/support/shared_contexts/policies/project_policy_shared_context.rb1
-rw-r--r--spec/support/shared_examples/ci/stuck_builds_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/container_registry_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb30
-rw-r--r--spec/support/shared_examples/features/project_upload_files_shared_examples.rb50
-rw-r--r--spec/support/shared_examples/graphql/connection_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb36
-rw-r--r--spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb68
-rw-r--r--spec/support/shared_examples/models/concerns/ttl_expirable_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb36
-rw-r--r--spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb37
-rw-r--r--spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb36
-rw-r--r--spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/services/dependency_proxy_settings_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/workers/concerns/dependency_proxy/cleanup_worker_shared_examples.rb53
-rw-r--r--spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb24
-rw-r--r--spec/support_specs/database/multiple_databases_spec.rb59
-rw-r--r--spec/support_specs/database/prevent_cross_joins_spec.rb16
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb53
-rw-r--r--spec/tasks/gitlab/packages/composer_rake_spec.rb29
-rw-r--r--spec/tooling/danger/product_intelligence_spec.rb10
-rw-r--r--spec/tooling/danger/project_helper_spec.rb16
-rw-r--r--spec/tooling/danger/specs_spec.rb133
-rw-r--r--spec/tooling/quality/test_level_spec.rb18
-rw-r--r--spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb9
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb17
-rw-r--r--spec/views/projects/branches/index.html.haml_spec.rb43
-rw-r--r--spec/views/projects/commits/_commit.html.haml_spec.rb18
-rw-r--r--spec/views/projects/services/edit.html.haml_spec.rb4
-rw-r--r--spec/views/projects/tags/index.html.haml_spec.rb22
-rw-r--r--spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb70
-rw-r--r--spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb2
-rw-r--r--spec/workers/build_finished_worker_spec.rb28
-rw-r--r--spec/workers/bulk_import_worker_spec.rb6
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb29
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb112
-rw-r--r--spec/workers/ci/create_downstream_pipeline_worker_spec.rb37
-rw-r--r--spec/workers/ci/stuck_builds/drop_running_worker_spec.rb28
-rw-r--r--spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb28
-rw-r--r--spec/workers/cleanup_container_repository_worker_spec.rb6
-rw-r--r--spec/workers/concerns/application_worker_spec.rb12
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb63
-rw-r--r--spec/workers/concerns/worker_context_spec.rb4
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb55
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb2
-rw-r--r--spec/workers/create_note_diff_file_worker_spec.rb18
-rw-r--r--spec/workers/database/drop_detached_partitions_worker_spec.rb7
-rw-r--r--spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb9
-rw-r--r--spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb9
-rw-r--r--spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb74
-rw-r--r--spec/workers/email_receiver_worker_spec.rb15
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb25
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb44
-rw-r--r--spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb28
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb27
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb11
-rw-r--r--spec/workers/issue_placement_worker_spec.rb2
-rw-r--r--spec/workers/packages/composer/cache_cleanup_worker_spec.rb8
-rw-r--r--spec/workers/packages/composer/cache_update_worker_spec.rb8
-rw-r--r--spec/workers/pages_remove_worker_spec.rb11
-rw-r--r--spec/workers/pipeline_hooks_worker_spec.rb8
-rw-r--r--spec/workers/pipeline_process_worker_spec.rb10
-rw-r--r--spec/workers/post_receive_spec.rb2
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb15
-rw-r--r--spec/workers/stuck_ci_jobs_worker_spec.rb50
1206 files changed, 29993 insertions, 14770 deletions
diff --git a/spec/config/grape_entity_patch_spec.rb b/spec/config/grape_entity_patch_spec.rb
deleted file mode 100644
index 7334f270ca1..00000000000
--- a/spec/config/grape_entity_patch_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Grape::Entity patch' do
- let(:entity_class) { Class.new(Grape::Entity) }
-
- describe 'NameError in block exposure with argument' do
- subject(:represent) { entity_class.represent({}, serializable: true) }
-
- before do
- entity_class.expose :raise_no_method_error do |_|
- foo
- end
- end
-
- it 'propagates the error to the caller' do
- expect { represent }.to raise_error(NameError)
- end
- end
-end
diff --git a/spec/controllers/admin/instance_review_controller_spec.rb b/spec/controllers/admin/instance_review_controller_spec.rb
index d15894eeb5d..898cd30cdca 100644
--- a/spec/controllers/admin/instance_review_controller_spec.rb
+++ b/spec/controllers/admin/instance_review_controller_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Admin::InstanceReviewController do
include UsageDataHelpers
let(:admin) { create(:admin) }
- let(:subscriptions_url) { ::Gitlab::SubscriptionPortal::SUBSCRIPTIONS_URL }
+ let(:subscriptions_instance_review_url) { Gitlab::SubscriptionPortal.subscriptions_instance_review_url }
before do
sign_in(admin)
@@ -44,7 +44,7 @@ RSpec.describe Admin::InstanceReviewController do
notes_count: 0
} }.to_query
- expect(response).to redirect_to("#{subscriptions_url}/instance_review?#{params}")
+ expect(response).to redirect_to("#{subscriptions_instance_review_url}?#{params}")
end
end
@@ -61,7 +61,7 @@ RSpec.describe Admin::InstanceReviewController do
version: ::Gitlab::VERSION
} }.to_query
- expect(response).to redirect_to("#{subscriptions_url}/instance_review?#{params}")
+ expect(response).to redirect_to("#{subscriptions_instance_review_url}?#{params}")
end
end
end
diff --git a/spec/controllers/admin/serverless/domains_controller_spec.rb b/spec/controllers/admin/serverless/domains_controller_spec.rb
deleted file mode 100644
index e7503fb37fa..00000000000
--- a/spec/controllers/admin/serverless/domains_controller_spec.rb
+++ /dev/null
@@ -1,370 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Admin::Serverless::DomainsController do
- let(:admin) { create(:admin) }
- let(:user) { create(:user) }
-
- describe '#index' do
- context 'non-admin user' do
- before do
- sign_in(user)
- end
-
- it 'responds with 404' do
- get :index
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'admin user' do
- before do
- create(:pages_domain)
- sign_in(admin)
- end
-
- context 'with serverless_domain feature disabled' do
- before do
- stub_feature_flags(serverless_domain: false)
- end
-
- it 'responds with 404' do
- get :index
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when instance-level serverless domain exists' do
- let!(:serverless_domain) { create(:pages_domain, :instance_serverless) }
-
- it 'loads the instance serverless domain' do
- get :index
-
- expect(assigns(:domain).id).to eq(serverless_domain.id)
- end
- end
-
- context 'when domain does not exist' do
- it 'initializes an instance serverless domain' do
- get :index
-
- domain = assigns(:domain)
-
- expect(domain.persisted?).to eq(false)
- expect(domain.wildcard).to eq(true)
- expect(domain.scope).to eq('instance')
- expect(domain.usage).to eq('serverless')
- end
- end
- end
- end
-
- describe '#create' do
- let(:create_params) do
- sample_domain = build(:pages_domain)
-
- {
- domain: 'serverless.gitlab.io',
- user_provided_certificate: sample_domain.certificate,
- user_provided_key: sample_domain.key
- }
- end
-
- context 'non-admin user' do
- before do
- sign_in(user)
- end
-
- it 'responds with 404' do
- post :create, params: { pages_domain: create_params }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'admin user' do
- before do
- sign_in(admin)
- end
-
- context 'with serverless_domain feature disabled' do
- before do
- stub_feature_flags(serverless_domain: false)
- end
-
- it 'responds with 404' do
- post :create, params: { pages_domain: create_params }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when an instance-level serverless domain exists' do
- let!(:serverless_domain) { create(:pages_domain, :instance_serverless) }
-
- it 'does not create a new domain' do
- expect { post :create, params: { pages_domain: create_params } }.not_to change { PagesDomain.instance_serverless.count }
- end
-
- it 'redirects to index' do
- post :create, params: { pages_domain: create_params }
-
- expect(response).to redirect_to admin_serverless_domains_path
- expect(flash[:notice]).to include('An instance-level serverless domain already exists.')
- end
- end
-
- context 'when an instance-level serverless domain does not exist' do
- it 'creates an instance serverless domain with the provided attributes' do
- expect { post :create, params: { pages_domain: create_params } }.to change { PagesDomain.instance_serverless.count }.by(1)
-
- domain = PagesDomain.instance_serverless.first
- expect(domain.domain).to eq(create_params[:domain])
- expect(domain.certificate).to eq(create_params[:user_provided_certificate])
- expect(domain.key).to eq(create_params[:user_provided_key])
- expect(domain.wildcard).to eq(true)
- expect(domain.scope).to eq('instance')
- expect(domain.usage).to eq('serverless')
- end
-
- it 'redirects to index' do
- post :create, params: { pages_domain: create_params }
-
- expect(response).to redirect_to admin_serverless_domains_path
- expect(flash[:notice]).to include('Domain was successfully created.')
- end
- end
-
- context 'when there are errors' do
- it 'renders index view' do
- post :create, params: { pages_domain: { foo: 'bar' } }
-
- expect(assigns(:domain).errors.size).to be > 0
- expect(response).to render_template('index')
- end
- end
- end
- end
-
- describe '#update' do
- let(:domain) { create(:pages_domain, :instance_serverless) }
-
- let(:update_params) do
- sample_domain = build(:pages_domain)
-
- {
- user_provided_certificate: sample_domain.certificate,
- user_provided_key: sample_domain.key
- }
- end
-
- context 'non-admin user' do
- before do
- sign_in(user)
- end
-
- it 'responds with 404' do
- put :update, params: { id: domain.id, pages_domain: update_params }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'admin user' do
- before do
- sign_in(admin)
- end
-
- context 'with serverless_domain feature disabled' do
- before do
- stub_feature_flags(serverless_domain: false)
- end
-
- it 'responds with 404' do
- put :update, params: { id: domain.id, pages_domain: update_params }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when domain exists' do
- it 'updates the domain with the provided attributes' do
- new_certificate = build(:pages_domain, :ecdsa).certificate
- new_key = build(:pages_domain, :ecdsa).key
-
- put :update, params: { id: domain.id, pages_domain: { user_provided_certificate: new_certificate, user_provided_key: new_key } }
-
- domain.reload
-
- expect(domain.certificate).to eq(new_certificate)
- expect(domain.key).to eq(new_key)
- end
-
- it 'does not update the domain name' do
- put :update, params: { id: domain.id, pages_domain: { domain: 'new.com' } }
-
- expect(domain.reload.domain).not_to eq('new.com')
- end
-
- it 'redirects to index' do
- put :update, params: { id: domain.id, pages_domain: update_params }
-
- expect(response).to redirect_to admin_serverless_domains_path
- expect(flash[:notice]).to include('Domain was successfully updated.')
- end
- end
-
- context 'when domain does not exist' do
- it 'returns 404' do
- put :update, params: { id: 0, pages_domain: update_params }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when there are errors' do
- it 'renders index view' do
- put :update, params: { id: domain.id, pages_domain: { user_provided_certificate: 'bad certificate' } }
-
- expect(assigns(:domain).errors.size).to be > 0
- expect(response).to render_template('index')
- end
- end
- end
- end
-
- describe '#verify' do
- let(:domain) { create(:pages_domain, :instance_serverless) }
-
- context 'non-admin user' do
- before do
- sign_in(user)
- end
-
- it 'responds with 404' do
- post :verify, params: { id: domain.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'admin user' do
- before do
- sign_in(admin)
- end
-
- def stub_service
- service = double(:service)
-
- expect(VerifyPagesDomainService).to receive(:new).with(domain).and_return(service)
-
- service
- end
-
- context 'with serverless_domain feature disabled' do
- before do
- stub_feature_flags(serverless_domain: false)
- end
-
- it 'responds with 404' do
- post :verify, params: { id: domain.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- it 'handles verification success' do
- expect(stub_service).to receive(:execute).and_return(status: :success)
-
- post :verify, params: { id: domain.id }
-
- expect(response).to redirect_to admin_serverless_domains_path
- expect(flash[:notice]).to eq('Successfully verified domain ownership')
- end
-
- it 'handles verification failure' do
- expect(stub_service).to receive(:execute).and_return(status: :failed)
-
- post :verify, params: { id: domain.id }
-
- expect(response).to redirect_to admin_serverless_domains_path
- expect(flash[:alert]).to eq('Failed to verify domain ownership')
- end
- end
- end
-
- describe '#destroy' do
- let!(:domain) { create(:pages_domain, :instance_serverless) }
-
- context 'non-admin user' do
- before do
- sign_in(user)
- end
-
- it 'responds with 404' do
- delete :destroy, params: { id: domain.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'admin user' do
- before do
- sign_in(admin)
- end
-
- context 'with serverless_domain feature disabled' do
- before do
- stub_feature_flags(serverless_domain: false)
- end
-
- it 'responds with 404' do
- delete :destroy, params: { id: domain.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when domain exists' do
- context 'and is not associated to any clusters' do
- it 'deletes the domain' do
- expect { delete :destroy, params: { id: domain.id } }
- .to change { PagesDomain.count }.from(1).to(0)
-
- expect(response).to have_gitlab_http_status(:found)
- expect(flash[:notice]).to include('Domain was successfully deleted.')
- end
- end
-
- context 'and is associated to any clusters' do
- before do
- create(:serverless_domain_cluster, pages_domain: domain)
- end
-
- it 'does not delete the domain' do
- expect { delete :destroy, params: { id: domain.id } }
- .not_to change { PagesDomain.count }
-
- expect(response).to have_gitlab_http_status(:conflict)
- expect(flash[:notice]).to include('Domain cannot be deleted while associated to one or more clusters.')
- end
- end
- end
-
- context 'when domain does not exist' do
- before do
- domain.destroy!
- end
-
- it 'responds with 404' do
- delete :destroy, params: { id: domain.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-end
diff --git a/spec/controllers/admin/topics/avatars_controller_spec.rb b/spec/controllers/admin/topics/avatars_controller_spec.rb
new file mode 100644
index 00000000000..7edc0e0c497
--- /dev/null
+++ b/spec/controllers/admin/topics/avatars_controller_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::Topics::AvatarsController do
+ let(:user) { create(:admin) }
+ let(:topic) { create(:topic, avatar: fixture_file_upload("spec/fixtures/dk.png")) }
+
+ before do
+ sign_in(user)
+ controller.instance_variable_set(:@topic, topic)
+ end
+
+ it 'removes avatar from DB by calling destroy' do
+ delete :destroy, params: { topic_id: topic.id }
+ @topic = assigns(:topic)
+ expect(@topic.avatar.present?).to be_falsey
+ expect(@topic).to be_valid
+ end
+end
diff --git a/spec/controllers/admin/topics_controller_spec.rb b/spec/controllers/admin/topics_controller_spec.rb
new file mode 100644
index 00000000000..6d66cb43338
--- /dev/null
+++ b/spec/controllers/admin/topics_controller_spec.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::TopicsController do
+ let_it_be(:topic) { create(:topic, name: 'topic') }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(admin)
+ end
+
+ describe 'GET #index' do
+ it 'renders the template' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('index')
+ end
+
+ context 'as a normal user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a 404 error' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET #new' do
+ it 'renders the template' do
+ get :new
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('new')
+ end
+
+ context 'as a normal user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a 404 error' do
+ get :new
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET #edit' do
+ it 'renders the template' do
+ get :edit, params: { id: topic.id }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('edit')
+ end
+
+ context 'as a normal user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a 404 error' do
+ get :edit, params: { id: topic.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'POST #create' do
+ it 'creates topic' do
+ expect do
+ post :create, params: { projects_topic: { name: 'test' } }
+ end.to change { Projects::Topic.count }.by(1)
+ end
+
+ it 'shows error message for invalid topic' do
+ post :create, params: { projects_topic: { name: nil } }
+
+ errors = assigns[:topic].errors
+ expect(errors).to contain_exactly(errors.full_message(:name, I18n.t('errors.messages.blank')))
+ end
+
+ context 'as a normal user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a 404 error' do
+ post :create, params: { projects_topic: { name: 'test' } }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'PUT #update' do
+ it 'updates topic' do
+ put :update, params: { id: topic.id, projects_topic: { name: 'test' } }
+
+ expect(response).to redirect_to(edit_admin_topic_path(topic))
+ expect(topic.reload.name).to eq('test')
+ end
+
+ it 'shows error message for invalid topic' do
+ put :update, params: { id: topic.id, projects_topic: { name: nil } }
+
+ errors = assigns[:topic].errors
+ expect(errors).to contain_exactly(errors.full_message(:name, I18n.t('errors.messages.blank')))
+ end
+
+ context 'as a normal user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a 404 error' do
+ put :update, params: { id: topic.id, projects_topic: { name: 'test' } }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 218aa04dd3f..e9a49319f21 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -704,7 +704,7 @@ RSpec.describe ApplicationController do
get :index
- expect(response.headers['Cache-Control']).to eq 'no-store'
+ expect(response.headers['Cache-Control']).to eq 'private, no-store'
expect(response.headers['Pragma']).to eq 'no-cache'
end
@@ -740,7 +740,7 @@ RSpec.describe ApplicationController do
it 'sets no-cache headers', :aggregate_failures do
subject
- expect(response.headers['Cache-Control']).to eq 'no-store'
+ expect(response.headers['Cache-Control']).to eq 'private, no-store'
expect(response.headers['Pragma']).to eq 'no-cache'
expect(response.headers['Expires']).to eq 'Fri, 01 Jan 1990 00:00:00 GMT'
end
@@ -967,6 +967,14 @@ RSpec.describe ApplicationController do
end
end
+ describe '.endpoint_id_for_action' do
+ controller(described_class) { }
+
+ it 'returns an expected endpoint id' do
+ expect(controller.class.endpoint_id_for_action('hello')).to eq('AnonymousController#hello')
+ end
+ end
+
describe '#current_user' do
controller(described_class) do
def index; end
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index cc60ab16d2e..b2200050e41 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe Boards::IssuesController do
it 'does not query issues table more than once' do
recorder = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: board, list: list1) }
- query_count = recorder.occurrences.select { |query,| query.start_with?('SELECT issues.*') }.each_value.first
+ query_count = recorder.occurrences.select { |query,| query.match?(/FROM "?issues"?/) }.each_value.first
expect(query_count).to eq(1)
end
diff --git a/spec/controllers/concerns/group_tree_spec.rb b/spec/controllers/concerns/group_tree_spec.rb
index a0707688e54..e808f1caa6e 100644
--- a/spec/controllers/concerns/group_tree_spec.rb
+++ b/spec/controllers/concerns/group_tree_spec.rb
@@ -21,82 +21,94 @@ RSpec.describe GroupTree do
end
describe 'GET #index' do
- it 'filters groups' do
- other_group = create(:group, name: 'filter')
- other_group.add_owner(user)
+ shared_examples 'returns filtered groups' do
+ it 'filters groups' do
+ other_group = create(:group, name: 'filter')
+ other_group.add_owner(user)
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- expect(assigns(:groups)).to contain_exactly(other_group)
- end
+ expect(assigns(:groups)).to contain_exactly(other_group)
+ end
- context 'for subgroups' do
- it 'only renders root groups when no parent was given' do
- create(:group, :public, parent: group)
+ context 'for subgroups' do
+ it 'only renders root groups when no parent was given' do
+ create(:group, :public, parent: group)
- get :index, format: :json
+ get :index, format: :json
- expect(assigns(:groups)).to contain_exactly(group)
- end
+ expect(assigns(:groups)).to contain_exactly(group)
+ end
- it 'contains only the subgroup when a parent was given' do
- subgroup = create(:group, :public, parent: group)
+ it 'contains only the subgroup when a parent was given' do
+ subgroup = create(:group, :public, parent: group)
- get :index, params: { parent_id: group.id }, format: :json
+ get :index, params: { parent_id: group.id }, format: :json
- expect(assigns(:groups)).to contain_exactly(subgroup)
- end
+ expect(assigns(:groups)).to contain_exactly(subgroup)
+ end
- it 'allows filtering for subgroups and includes the parents for rendering' do
- subgroup = create(:group, :public, parent: group, name: 'filter')
+ it 'allows filtering for subgroups and includes the parents for rendering' do
+ subgroup = create(:group, :public, parent: group, name: 'filter')
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- expect(assigns(:groups)).to contain_exactly(group, subgroup)
- end
+ expect(assigns(:groups)).to contain_exactly(group, subgroup)
+ end
- it 'does not include groups the user does not have access to' do
- parent = create(:group, :private)
- subgroup = create(:group, :private, parent: parent, name: 'filter')
- subgroup.add_developer(user)
- _other_subgroup = create(:group, :private, parent: parent, name: 'filte')
+ it 'does not include groups the user does not have access to' do
+ parent = create(:group, :private)
+ subgroup = create(:group, :private, parent: parent, name: 'filter')
+ subgroup.add_developer(user)
+ _other_subgroup = create(:group, :private, parent: parent, name: 'filte')
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- expect(assigns(:groups)).to contain_exactly(parent, subgroup)
- end
+ expect(assigns(:groups)).to contain_exactly(parent, subgroup)
+ end
- it 'preloads parents regardless of pagination' do
- allow(Kaminari.config).to receive(:default_per_page).and_return(1)
- group = create(:group, :public)
- subgroup = create(:group, :public, parent: group)
- search_result = create(:group, :public, name: 'result', parent: subgroup)
+ it 'preloads parents regardless of pagination' do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+ group = create(:group, :public)
+ subgroup = create(:group, :public, parent: group)
+ search_result = create(:group, :public, name: 'result', parent: subgroup)
- get :index, params: { filter: 'resu' }, format: :json
+ get :index, params: { filter: 'resu' }, format: :json
- expect(assigns(:groups)).to contain_exactly(group, subgroup, search_result)
+ expect(assigns(:groups)).to contain_exactly(group, subgroup, search_result)
+ end
end
- end
- context 'json content' do
- it 'shows groups as json' do
- get :index, format: :json
+ context 'json content' do
+ it 'shows groups as json' do
+ get :index, format: :json
- expect(json_response.first['id']).to eq(group.id)
- end
+ expect(json_response.first['id']).to eq(group.id)
+ end
- context 'nested groups' do
- it 'expands the tree when filtering' do
- subgroup = create(:group, :public, parent: group, name: 'filter')
+ context 'nested groups' do
+ it 'expands the tree when filtering' do
+ subgroup = create(:group, :public, parent: group, name: 'filter')
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- children_response = json_response.first['children']
+ children_response = json_response.first['children']
- expect(json_response.first['id']).to eq(group.id)
- expect(children_response.first['id']).to eq(subgroup.id)
+ expect(json_response.first['id']).to eq(group.id)
+ expect(children_response.first['id']).to eq(subgroup.id)
+ end
end
end
end
+
+ it_behaves_like 'returns filtered groups'
+
+ context 'when feature flag :linear_group_tree_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_group_tree_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'returns filtered groups'
+ end
end
end
diff --git a/spec/controllers/dashboard/milestones_controller_spec.rb b/spec/controllers/dashboard/milestones_controller_spec.rb
index 899aa7a41c1..2d41bc431ec 100644
--- a/spec/controllers/dashboard/milestones_controller_spec.rb
+++ b/spec/controllers/dashboard/milestones_controller_spec.rb
@@ -65,11 +65,12 @@ RSpec.describe Dashboard::MilestonesController do
expect(response.body).not_to include(project_milestone.title)
end
- it 'shows counts of open and closed group and project milestones to which the user belongs to' do
+ it 'shows counts of open/closed/all group and project milestones to which the user belongs to' do
get :index
- expect(response.body).to include("Open\n<span class=\"badge badge-pill\">2</span>")
- expect(response.body).to include("Closed\n<span class=\"badge badge-pill\">2</span>")
+ expect(response.body).to have_content('Open 2')
+ expect(response.body).to have_content('Closed 2')
+ expect(response.body).to have_content('All 4')
end
context 'external authorization' do
diff --git a/spec/controllers/every_controller_spec.rb b/spec/controllers/every_controller_spec.rb
index a1c377eff76..902872b6e92 100644
--- a/spec/controllers/every_controller_spec.rb
+++ b/spec/controllers/every_controller_spec.rb
@@ -1,24 +1,14 @@
# frozen_string_literal: true
require 'spec_helper'
-
RSpec.describe "Every controller" do
context "feature categories" do
let_it_be(:feature_categories) do
- YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).map(&:to_sym).to_set
+ Gitlab::FeatureCategories.default.categories.map(&:to_sym).to_set
end
let_it_be(:controller_actions) do
- # This will return tuples of all controller actions defined in the routes
- # Only for controllers inheriting ApplicationController
- # Excluding controllers from gems (OAuth, Sidekiq)
- Rails.application.routes.routes
- .map { |route| route.required_defaults.presence }
- .compact
- .select { |route| route[:controller].present? && route[:action].present? }
- .map { |route| [constantize_controller(route[:controller]), route[:action]] }
- .select { |(controller, action)| controller&.include?(::Gitlab::WithFeatureCategory) }
- .reject { |(controller, action)| controller == ApplicationController || controller == Devise::UnlocksController }
+ Gitlab::RequestEndpoints.all_controller_actions
end
let_it_be(:routes_without_category) do
@@ -74,9 +64,6 @@ RSpec.describe "Every controller" do
end
def actions_defined_in_feature_category_config(controller)
- controller.send(:class_attributes)[:feature_category_config]
- .values
- .flatten
- .map(&:to_s)
+ controller.send(:class_attributes)[:endpoint_attributes_config].defined_actions
end
end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index aed97a01a72..6e7bcfdaa08 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -38,6 +38,14 @@ RSpec.describe GraphqlController do
sign_in(user)
end
+ it 'sets feature category in ApplicationContext from request' do
+ request.headers["HTTP_X_GITLAB_FEATURE_CATEGORY"] = "web_ide"
+
+ post :execute
+
+ expect(::Gitlab::ApplicationContext.current_context_attribute(:feature_category)).to eq('web_ide')
+ end
+
it 'returns 200 when user can access API' do
post :execute
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 7415c2860c8..fa402d556c7 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Groups::DependencyProxyForContainersController do
include HttpBasicAuthHelpers
include DependencyProxyHelpers
+ include WorkhorseHelpers
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:group) { create(:group, :private) }
@@ -242,16 +243,9 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
describe 'GET #blob' do
- let_it_be(:blob) { create(:dependency_proxy_blob) }
+ let(:blob) { create(:dependency_proxy_blob, group: group) }
let(:blob_sha) { blob.file_name.sub('.gz', '') }
- let(:blob_response) { { status: :success, blob: blob, from_cache: false } }
-
- before do
- allow_next_instance_of(DependencyProxy::FindOrCreateBlobService) do |instance|
- allow(instance).to receive(:execute).and_return(blob_response)
- end
- end
subject { get_blob }
@@ -264,40 +258,31 @@ RSpec.describe Groups::DependencyProxyForContainersController do
it_behaves_like 'without permission'
it_behaves_like 'feature flag disabled with private group'
- context 'remote blob request fails' do
- let(:blob_response) do
- {
- status: :error,
- http_status: 400,
- message: ''
- }
- end
-
- before do
- group.add_guest(user)
- end
-
- it 'proxies status from the remote blob request', :aggregate_failures do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(response.body).to be_empty
- end
- end
-
context 'a valid user' do
before do
group.add_guest(user)
end
it_behaves_like 'a successful blob pull'
- it_behaves_like 'a package tracking event', described_class.name, 'pull_blob'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_blob_from_cache'
- context 'with a cache entry' do
- let(:blob_response) { { status: :success, blob: blob, from_cache: true } }
+ context 'when cache entry does not exist' do
+ let(:blob_sha) { 'a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4' }
- it_behaves_like 'returning response status', :success
- it_behaves_like 'a package tracking event', described_class.name, 'pull_blob_from_cache'
+ it 'returns Workhorse send-dependency instructions' do
+ subject
+
+ send_data_type, send_data = workhorse_send_data
+ header, url = send_data.values_at('Header', 'Url')
+
+ expect(send_data_type).to eq('send-dependency')
+ expect(header).to eq("Authorization" => ["Bearer abcd1234"])
+ expect(url).to eq(DependencyProxy::Registry.blob_url('alpine', blob_sha))
+ expect(response.headers['Content-Type']).to eq('application/gzip')
+ expect(response.headers['Content-Disposition']).to eq(
+ ActionDispatch::Http::ContentDisposition.format(disposition: 'attachment', filename: blob.file_name)
+ )
+ end
end
end
@@ -319,6 +304,74 @@ RSpec.describe Groups::DependencyProxyForContainersController do
it_behaves_like 'a successful blob pull'
end
end
+
+ context 'when dependency_proxy_workhorse disabled' do
+ let(:blob_response) { { status: :success, blob: blob, from_cache: false } }
+
+ before do
+ stub_feature_flags(dependency_proxy_workhorse: false)
+
+ allow_next_instance_of(DependencyProxy::FindOrCreateBlobService) do |instance|
+ allow(instance).to receive(:execute).and_return(blob_response)
+ end
+ end
+
+ context 'remote blob request fails' do
+ let(:blob_response) do
+ {
+ status: :error,
+ http_status: 400,
+ message: ''
+ }
+ end
+
+ before do
+ group.add_guest(user)
+ end
+
+ it 'proxies status from the remote blob request', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to be_empty
+ end
+ end
+
+ context 'a valid user' do
+ before do
+ group.add_guest(user)
+ end
+
+ it_behaves_like 'a successful blob pull'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_blob'
+
+ context 'with a cache entry' do
+ let(:blob_response) { { status: :success, blob: blob, from_cache: true } }
+
+ it_behaves_like 'returning response status', :success
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_blob_from_cache'
+ end
+ end
+
+ context 'a valid deploy token' do
+ let_it_be(:user) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ it_behaves_like 'a successful blob pull'
+
+ context 'pulling from a subgroup' do
+ let_it_be_with_reload(:parent_group) { create(:group) }
+ let_it_be_with_reload(:group) { create(:group, parent: parent_group) }
+
+ before do
+ parent_group.create_dependency_proxy_setting!(enabled: true)
+ group_deploy_token.update_column(:group_id, parent_group.id)
+ end
+
+ it_behaves_like 'a successful blob pull'
+ end
+ end
+ end
end
it_behaves_like 'not found when disabled'
@@ -328,6 +381,61 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
end
+ describe 'GET #authorize_upload_blob' do
+ let(:blob_sha) { 'a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4' }
+
+ subject(:authorize_upload_blob) do
+ request.headers.merge!(workhorse_internal_api_request_header)
+
+ get :authorize_upload_blob, params: { group_id: group.to_param, image: 'alpine', sha: blob_sha }
+ end
+
+ it_behaves_like 'without permission'
+
+ context 'with a valid user' do
+ before do
+ group.add_guest(user)
+ end
+
+ it 'sends Workhorse file upload instructions', :aggregate_failures do
+ authorize_upload_blob
+
+ expect(response.headers['Content-Type']).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(DependencyProxy::FileUploader.workhorse_local_upload_path)
+ end
+ end
+ end
+
+ describe 'GET #upload_blob' do
+ let(:blob_sha) { 'a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4' }
+ let(:file) { fixture_file_upload("spec/fixtures/dependency_proxy/#{blob_sha}.gz", 'application/gzip') }
+
+ subject do
+ request.headers.merge!(workhorse_internal_api_request_header)
+
+ get :upload_blob, params: {
+ group_id: group.to_param,
+ image: 'alpine',
+ sha: blob_sha,
+ file: file
+ }
+ end
+
+ it_behaves_like 'without permission'
+
+ context 'with a valid user' do
+ before do
+ group.add_guest(user)
+
+ expect_next_found_instance_of(Group) do |instance|
+ expect(instance).to receive_message_chain(:dependency_proxy_blobs, :create!)
+ end
+ end
+
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_blob'
+ end
+ end
+
def enable_dependency_proxy
group.create_dependency_proxy_setting!(enabled: true)
end
diff --git a/spec/controllers/groups/registry/repositories_controller_spec.rb b/spec/controllers/groups/registry/repositories_controller_spec.rb
index f4541eda293..9ac19b06718 100644
--- a/spec/controllers/groups/registry/repositories_controller_spec.rb
+++ b/spec/controllers/groups/registry/repositories_controller_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Groups::Registry::RepositoriesController do
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
+ stub_container_registry_info
group.add_owner(user)
group.add_guest(guest)
sign_in(user)
@@ -37,6 +38,18 @@ RSpec.describe Groups::Registry::RepositoriesController do
'name' => repo.name
)
end
+
+ [ContainerRegistry::Path::InvalidRegistryPathError, Faraday::Error].each do |error_class|
+ context "when there is a #{error_class}" do
+ it 'displays a connection error message' do
+ expect(::ContainerRegistry::Client).to receive(:registry_info).and_raise(error_class, nil, nil)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
end
shared_examples 'with name parameter' do
@@ -71,6 +84,18 @@ RSpec.describe Groups::Registry::RepositoriesController do
expect(response).to have_gitlab_http_status(:ok)
expect_no_snowplow_event
end
+
+ [ContainerRegistry::Path::InvalidRegistryPathError, Faraday::Error].each do |error_class|
+ context "when there is an invalid path error #{error_class}" do
+ it 'displays a connection error message' do
+ expect(::ContainerRegistry::Client).to receive(:registry_info).and_raise(error_class, nil, nil)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
end
context 'json format' do
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index 599e82afe9b..4e2123c8cc4 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -34,14 +34,6 @@ RSpec.describe HelpController do
is_expected.to redirect_to("#{documentation_base_url}/ee/#{path}.html")
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(help_page_documentation_redirect: false)
- end
-
- it_behaves_like 'documentation pages local render'
- end
end
before do
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 3b2ed2c63ed..3adba32c74a 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -51,62 +51,87 @@ RSpec.describe Import::BulkImportsController do
end
describe 'GET status' do
+ def get_status(params_override = {})
+ params = { page: 1, per_page: 20, filter: '' }.merge(params_override)
+
+ get :status,
+ params: params,
+ format: :json,
+ session: {
+ bulk_import_gitlab_url: 'https://gitlab.example.com',
+ bulk_import_gitlab_access_token: 'demo-pat'
+ }
+ end
+
+ include_context 'bulk imports requests context', 'https://gitlab.example.com'
+
let(:client) { BulkImports::Clients::HTTP.new(url: 'http://gitlab.example', token: 'token') }
+ let(:version) { "#{BulkImport::MIN_MAJOR_VERSION}.#{BulkImport::MIN_MINOR_VERSION_FOR_PROJECT}.0" }
+ let(:version_response) { double(code: 200, success?: true, parsed_response: { 'version' => version }) }
describe 'serialized group data' do
- let(:client_response) do
+ let(:expected_response) do
double(
parsed_response: [
- { 'id' => 1, 'full_name' => 'group1', 'full_path' => 'full/path/group1', 'web_url' => 'http://demo.host/full/path/group1' },
- { 'id' => 2, 'full_name' => 'group2', 'full_path' => 'full/path/group2', 'web_url' => 'http://demo.host/full/path/group1' }
+ {
+ "full_name" => "Stub",
+ "full_path" => "stub-group",
+ "id" => 2595438,
+ "web_url" => "https://gitlab.com/groups/auto-breakfast"
+ }
],
headers: {
'x-next-page' => '2',
'x-page' => '1',
'x-per-page' => '20',
- 'x-total' => '37',
+ 'x-total' => '42',
'x-total-pages' => '2'
}
)
end
- let(:client_params) do
- {
- top_level_only: true,
- min_access_level: Gitlab::Access::OWNER
- }
- end
-
- before do
- allow(controller).to receive(:client).and_return(client)
- allow(client).to receive(:get).with('groups', client_params).and_return(client_response)
- end
-
it 'returns serialized group data' do
- get :status, format: :json
+ get_status
+
+ version_validation = {
+ "features" => {
+ "project_migration" => {
+ "available" => true,
+ "min_version" => BulkImport.min_gl_version_for_project_migration.to_s
+ },
+ "source_instance_version" => version
+ }
+ }
- expect(json_response).to eq({ importable_data: client_response.parsed_response }.as_json)
+ expect(json_response).to include("importable_data" => expected_response.parsed_response, "version_validation" => hash_including(version_validation))
end
it 'forwards pagination headers' do
- get :status, format: :json
-
- expect(response.headers['x-per-page']).to eq client_response.headers['x-per-page']
- expect(response.headers['x-page']).to eq client_response.headers['x-page']
- expect(response.headers['x-next-page']).to eq client_response.headers['x-next-page']
- expect(response.headers['x-prev-page']).to eq client_response.headers['x-prev-page']
- expect(response.headers['x-total']).to eq client_response.headers['x-total']
- expect(response.headers['x-total-pages']).to eq client_response.headers['x-total-pages']
+ get_status
+
+ expect(response.headers['x-per-page']).to eq expected_response.headers['x-per-page']
+ expect(response.headers['x-page']).to eq expected_response.headers['x-page']
+ expect(response.headers['x-next-page']).to eq expected_response.headers['x-next-page']
+ expect(response.headers['x-prev-page']).to eq expected_response.headers['x-prev-page']
+ expect(response.headers['x-total']).to eq expected_response.headers['x-total']
+ expect(response.headers['x-total-pages']).to eq expected_response.headers['x-total-pages']
end
context 'when filtering' do
- it 'returns filtered result' do
- filter = 'test'
- search_params = client_params.merge(search: filter)
+ let_it_be(:filter) { 'test' }
- expect(client).to receive(:get).with('groups', search_params).and_return(client_response)
+ let(:client_params) do
+ {
+ top_level_only: true,
+ min_access_level: Gitlab::Access::OWNER,
+ search: filter
+ }
+ end
+
+ it 'returns filtered result' do
+ get_status(filter: filter)
- get :status, format: :json, params: { filter: filter }
+ expect(json_response['importable_data'].first['full_name']).to eq('Test')
end
end
end
@@ -148,18 +173,19 @@ RSpec.describe Import::BulkImportsController do
context 'when connection error occurs' do
before do
- allow(controller).to receive(:client).and_return(client)
- allow(client).to receive(:get).and_raise(BulkImports::Error)
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:get).and_raise(BulkImports::Error)
+ end
end
it 'returns 422' do
- get :status, format: :json
+ get_status
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'clears session' do
- get :status, format: :json
+ get_status
expect(session[:gitlab_url]).to be_nil
expect(session[:gitlab_access_token]).to be_nil
@@ -199,9 +225,9 @@ RSpec.describe Import::BulkImportsController do
session[:bulk_import_gitlab_url] = instance_url
end
- it 'executes BulkImportService' do
+ it 'executes BulkImpors::CreatetService' do
expect_next_instance_of(
- BulkImportService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service|
+ ::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service|
allow(service).to receive(:execute).and_return(ServiceResponse.success(payload: bulk_import))
end
@@ -214,7 +240,7 @@ RSpec.describe Import::BulkImportsController do
it 'returns error when validation fails' do
error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity)
expect_next_instance_of(
- BulkImportService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service|
+ ::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service|
allow(service).to receive(:execute).and_return(error_response)
end
diff --git a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
index 25c11d92b4e..9d890efdd33 100644
--- a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
+++ b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
@@ -46,7 +46,8 @@ RSpec.describe JiraConnect::AppDescriptorController do
apiVersion: 1,
apiMigrations: {
'context-qsh': true,
- gdpr: true
+ gdpr: true,
+ 'signed-install': true
}
)
@@ -89,5 +90,17 @@ RSpec.describe JiraConnect::AppDescriptorController do
)
)
end
+
+ context 'when jira_connect_asymmetric_jwt is disabled' do
+ before do
+ stub_feature_flags(jira_connect_asymmetric_jwt: false)
+ end
+
+ specify do
+ get :show
+
+ expect(json_response).to include('apiMigrations' => include('signed-install' => false))
+ end
+ end
end
end
diff --git a/spec/controllers/jira_connect/events_controller_spec.rb b/spec/controllers/jira_connect/events_controller_spec.rb
index e9fecb594a7..78bd0dc8318 100644
--- a/spec/controllers/jira_connect/events_controller_spec.rb
+++ b/spec/controllers/jira_connect/events_controller_spec.rb
@@ -3,9 +3,49 @@
require 'spec_helper'
RSpec.describe JiraConnect::EventsController do
+ shared_examples 'verifies asymmetric JWT token' do
+ context 'when token is valid' do
+ include_context 'valid JWT token'
+
+ it 'renders successful' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
+ context 'when token is invalid' do
+ include_context 'invalid JWT token'
+
+ it 'renders unauthorized' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ shared_context 'valid JWT token' do
+ before do
+ allow_next_instance_of(Atlassian::JiraConnect::AsymmetricJwt) do |asymmetric_jwt|
+ allow(asymmetric_jwt).to receive(:valid?).and_return(true)
+ allow(asymmetric_jwt).to receive(:iss_claim).and_return(client_key)
+ end
+ end
+ end
+
+ shared_context 'invalid JWT token' do
+ before do
+ allow_next_instance_of(Atlassian::JiraConnect::AsymmetricJwt) do |asymmetric_jwt|
+ allow(asymmetric_jwt).to receive(:valid?).and_return(false)
+ end
+ end
+ end
+
describe '#installed' do
let(:client_key) { '1234' }
let(:shared_secret) { 'secret' }
+
let(:params) do
{
clientKey: client_key,
@@ -14,10 +54,16 @@ RSpec.describe JiraConnect::EventsController do
}
end
+ include_context 'valid JWT token'
+
subject do
post :installed, params: params
end
+ it_behaves_like 'verifies asymmetric JWT token' do
+ let(:send_request) { subject }
+ end
+
it 'saves the jira installation data' do
expect { subject }.to change { JiraConnectInstallation.count }.by(1)
end
@@ -31,13 +77,15 @@ RSpec.describe JiraConnect::EventsController do
expect(installation.base_url).to eq('https://test.atlassian.net')
end
- context 'client key already exists' do
- it 'returns 422' do
- create(:jira_connect_installation, client_key: client_key)
+ context 'when jira_connect_asymmetric_jwt is disabled' do
+ before do
+ stub_feature_flags(jira_connect_asymmetric_jwt: false)
+ end
- subject
+ it 'saves the jira installation data without JWT validation' do
+ expect(Atlassian::JiraConnect::AsymmetricJwt).not_to receive(:new)
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect { subject }.to change { JiraConnectInstallation.count }.by(1)
end
end
@@ -49,27 +97,68 @@ RSpec.describe JiraConnect::EventsController do
}
end
- it 'validates the JWT token in authorization header and returns 200 without creating a new installation' do
- create(:jira_connect_installation, client_key: client_key, shared_secret: shared_secret)
- request.headers["Authorization"] = "Bearer #{Atlassian::Jwt.encode({ iss: client_key }, shared_secret)}"
+ it 'returns 422' do
+ subject
- expect { subject }.not_to change { JiraConnectInstallation.count }
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
- end
- describe '#uninstalled' do
- let!(:installation) { create(:jira_connect_installation) }
- let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/events/uninstalled', 'POST', 'https://gitlab.test') }
+ context 'and an installation exists' do
+ let!(:installation) { create(:jira_connect_installation, client_key: client_key, shared_secret: shared_secret) }
- before do
- request.headers['Authorization'] = "JWT #{auth_token}"
+ it 'validates the JWT token in authorization header and returns 200 without creating a new installation' do
+ expect { subject }.not_to change { JiraConnectInstallation.count }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when jira_connect_asymmetric_jwt is disabled' do
+ before do
+ stub_feature_flags(jira_connect_asymmetric_jwt: false)
+ end
+
+ it 'decodes the JWT token in authorization header and returns 200 without creating a new installation' do
+ request.headers["Authorization"] = "Bearer #{Atlassian::Jwt.encode({ iss: client_key }, shared_secret)}"
+
+ expect(Atlassian::JiraConnect::AsymmetricJwt).not_to receive(:new)
+
+ expect { subject }.not_to change { JiraConnectInstallation.count }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
+ end
+ end
+
+ describe '#uninstalled' do
+ let_it_be(:installation) { create(:jira_connect_installation) }
+
+ let(:client_key) { installation.client_key }
+ let(:params) do
+ {
+ clientKey: client_key,
+ baseUrl: 'https://test.atlassian.net'
+ }
+ end
+
+ it_behaves_like 'verifies asymmetric JWT token' do
+ let(:send_request) { post :uninstalled, params: params }
+ end
+
+ subject(:post_uninstalled) { post :uninstalled, params: params }
- subject(:post_uninstalled) { post :uninstalled }
+ context 'when JWT is invalid' do
+ include_context 'invalid JWT token'
- context 'when JWT is invalid' do
- let(:auth_token) { 'invalid_token' }
+ it 'does not delete the installation' do
+ expect { post_uninstalled }.not_to change { JiraConnectInstallation.count }
+ end
+
+ context 'when jira_connect_asymmetric_jwt is disabled' do
+ before do
+ stub_feature_flags(jira_connect_asymmetric_jwt: false)
+ request.headers['Authorization'] = 'JWT invalid token'
+ end
it 'returns 403' do
post_uninstalled
@@ -81,14 +170,42 @@ RSpec.describe JiraConnect::EventsController do
expect { post_uninstalled }.not_to change { JiraConnectInstallation.count }
end
end
+ end
+
+ context 'when JWT is valid' do
+ include_context 'valid JWT token'
+
+ let(:jira_base_path) { '/-/jira_connect' }
+ let(:jira_event_path) { '/-/jira_connect/events/uninstalled' }
+
+ it 'calls the DestroyService and returns ok in case of success' do
+ expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
+ expect(destroy_service).to receive(:execute).and_return(true)
+ end
+
+ post_uninstalled
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'calls the DestroyService and returns unprocessable_entity in case of failure' do
+ expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
+ expect(destroy_service).to receive(:execute).and_return(false)
+ end
+
+ post_uninstalled
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+
+ context 'when jira_connect_asymmetric_jwt is disabled' do
+ before do
+ stub_feature_flags(jira_connect_asymmetric_jwt: false)
- context 'when JWT is valid' do
- let(:auth_token) do
- Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret)
+ request.headers['Authorization'] = "JWT #{Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret)}"
end
- let(:jira_base_path) { '/-/jira_connect' }
- let(:jira_event_path) { '/-/jira_connect/events/uninstalled' }
+ let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/events/uninstalled', 'POST', 'https://gitlab.test') }
it 'calls the DestroyService and returns ok in case of success' do
expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
diff --git a/spec/controllers/metrics_controller_spec.rb b/spec/controllers/metrics_controller_spec.rb
index 9fa90dde997..4f74af295c6 100644
--- a/spec/controllers/metrics_controller_spec.rb
+++ b/spec/controllers/metrics_controller_spec.rb
@@ -67,6 +67,12 @@ RSpec.describe MetricsController, :request_store do
expect(response.body).to match(/^prometheus_counter 1$/)
end
+ it 'initializes the rails request SLIs' do
+ expect(Gitlab::Metrics::RailsSlis).to receive(:initialize_request_slis_if_needed!).and_call_original
+
+ get :index
+ end
+
context 'prometheus metrics are disabled' do
before do
allow(Gitlab::Metrics).to receive(:prometheus_metrics_enabled?).and_return(false)
diff --git a/spec/controllers/profiles/two_factor_auths_controller_spec.rb b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
index ca63760d988..e57bd5be937 100644
--- a/spec/controllers/profiles/two_factor_auths_controller_spec.rb
+++ b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
@@ -27,12 +27,6 @@ RSpec.describe Profiles::TwoFactorAuthsController do
expect(flash[:notice])
.to eq _('You need to verify your primary email first before enabling Two-Factor Authentication.')
end
-
- it 'does not redirect when the `ensure_verified_primary_email_for_2fa` feature flag is disabled' do
- stub_feature_flags(ensure_verified_primary_email_for_2fa: false)
-
- expect(response).not_to redirect_to(profile_emails_path)
- end
end
shared_examples 'user must enter a valid current password' do
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index b4019643baf..4959003d788 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -3,7 +3,8 @@
require('spec_helper')
RSpec.describe ProfilesController, :request_store do
- let(:user) { create(:user) }
+ let(:password) { 'longsecret987!' }
+ let(:user) { create(:user, password: password) }
describe 'POST update' do
it 'does not update password' do
@@ -23,7 +24,7 @@ RSpec.describe ProfilesController, :request_store do
sign_in(user)
put :update,
- params: { user: { email: "john@gmail.com", name: "John" } }
+ params: { user: { email: "john@gmail.com", name: "John", validation_password: password } }
user.reload
diff --git a/spec/controllers/projects/alerting/notifications_controller_spec.rb b/spec/controllers/projects/alerting/notifications_controller_spec.rb
index fe0c4ce00bf..2fff8026b22 100644
--- a/spec/controllers/projects/alerting/notifications_controller_spec.rb
+++ b/spec/controllers/projects/alerting/notifications_controller_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::Alerting::NotificationsController do
+ include HttpBasicAuthHelpers
+
let_it_be(:project) { create(:project) }
let_it_be(:environment) { create(:environment, project: project) }
@@ -53,86 +55,96 @@ RSpec.describe Projects::Alerting::NotificationsController do
end
end
- context 'bearer token' do
- context 'when set' do
- context 'when extractable' do
- before do
- request.headers['HTTP_AUTHORIZATION'] = 'Bearer some token'
- end
-
- it 'extracts bearer token' do
- expect(notify_service).to receive(:execute).with('some token', nil)
-
- make_request
- end
-
- context 'with a corresponding integration' do
- context 'with integration parameters specified' do
- let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
+ shared_examples 'a working token' do
+ it 'extracts token' do
+ expect(notify_service).to receive(:execute).with('some token', nil)
- let(:params) { project_params(endpoint_identifier: integration.endpoint_identifier, name: integration.name) }
-
- context 'the integration is active' do
- it 'extracts and finds the integration' do
- expect(notify_service).to receive(:execute).with('some token', integration)
+ make_request
+ end
- make_request
- end
- end
+ context 'with a corresponding integration' do
+ context 'with integration parameters specified' do
+ let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
- context 'when the integration is inactive' do
- before do
- integration.update!(active: false)
- end
+ let(:params) { project_params(endpoint_identifier: integration.endpoint_identifier, name: integration.name) }
- it 'does not find an integration' do
- expect(notify_service).to receive(:execute).with('some token', nil)
+ context 'the integration is active' do
+ it 'extracts and finds the integration' do
+ expect(notify_service).to receive(:execute).with('some token', integration)
- make_request
- end
- end
+ make_request
end
+ end
- context 'without integration parameters specified' do
- let_it_be(:integration) { create(:alert_management_http_integration, :legacy, project: project) }
+ context 'when the integration is inactive' do
+ before do
+ integration.update!(active: false)
+ end
- it 'extracts and finds the legacy integration' do
- expect(notify_service).to receive(:execute).with('some token', integration)
+ it 'does not find an integration' do
+ expect(notify_service).to receive(:execute).with('some token', nil)
- make_request
- end
+ make_request
end
end
end
- context 'when inextractable' do
- it 'passes nil for a non-bearer token' do
- request.headers['HTTP_AUTHORIZATION'] = 'some token'
+ context 'without integration parameters specified' do
+ let_it_be(:integration) { create(:alert_management_http_integration, :legacy, project: project) }
- expect(notify_service).to receive(:execute).with(nil, nil)
+ it 'extracts and finds the legacy integration' do
+ expect(notify_service).to receive(:execute).with('some token', integration)
make_request
end
end
end
+ end
- context 'when missing' do
- it 'passes nil' do
- expect(notify_service).to receive(:execute).with(nil, nil)
-
- make_request
+ context 'with bearer token' do
+ context 'when set' do
+ before do
+ request.headers.merge(build_token_auth_header('some token'))
end
+
+ it_behaves_like 'a working token'
+ end
+ end
+
+ context 'with basic auth token' do
+ before do
+ request.headers.merge basic_auth_header(nil, 'some token')
+ end
+
+ it_behaves_like 'a working token'
+ end
+
+ context 'when inextractable token' do
+ it 'passes nil for a non-bearer token' do
+ request.headers['HTTP_AUTHORIZATION'] = 'some token'
+
+ expect(notify_service).to receive(:execute).with(nil, nil)
+
+ make_request
+ end
+ end
+
+ context 'when missing token' do
+ it 'passes nil' do
+ expect(notify_service).to receive(:execute).with(nil, nil)
+
+ make_request
end
end
end
- context 'generic alert payload' do
+ context 'with generic alert payload' do
it_behaves_like 'process alert payload', Projects::Alerting::NotifyService do
let(:payload) { { title: 'Alert title' } }
end
end
- context 'Prometheus alert payload' do
+ context 'with Prometheus alert payload' do
include PrometheusHelpers
it_behaves_like 'process alert payload', Projects::Prometheus::Alerts::NotifyService do
diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb
index a00e302a64f..43e8bbd83cf 100644
--- a/spec/controllers/projects/branches_controller_spec.rb
+++ b/spec/controllers/projects/branches_controller_spec.rb
@@ -239,7 +239,7 @@ RSpec.describe Projects::BranchesController do
end
end
- context 'without issue feature access' do
+ context 'without issue feature access', :sidekiq_inline do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
@@ -656,6 +656,26 @@ RSpec.describe Projects::BranchesController do
)
end
end
+
+ context 'when gitaly is not available' do
+ before do
+ allow_next_instance_of(Gitlab::GitalyClient::RefService) do |ref_service|
+ allow(ref_service).to receive(:local_branches).and_raise(GRPC::DeadlineExceeded)
+ end
+
+ get :index, format: :html, params: {
+ namespace_id: project.namespace, project_id: project
+ }
+ end
+
+ it 'returns with a status 200' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'sets gitaly_unavailable variable' do
+ expect(assigns[:gitaly_unavailable]).to be_truthy
+ end
+ end
end
describe 'GET diverging_commit_counts' do
diff --git a/spec/controllers/projects/compare_controller_spec.rb b/spec/controllers/projects/compare_controller_spec.rb
index 2412b970342..48afd42e8ff 100644
--- a/spec/controllers/projects/compare_controller_spec.rb
+++ b/spec/controllers/projects/compare_controller_spec.rb
@@ -409,7 +409,7 @@ RSpec.describe Projects::CompareController do
end
end
- context 'when the user does not have access to the project' do
+ context 'when the user does not have access to the project', :sidekiq_inline do
before do
project.team.truncate
project.update!(visibility: 'private')
diff --git a/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb b/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
index 56c0ef592ca..cc0f4a426f4 100644
--- a/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
+++ b/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe Projects::DesignManagement::Designs::ResizedImageController do
# (the record that represents the design at a specific version), to
# verify that the correct file is being returned.
def etag(action)
- ActionDispatch::TestResponse.new.send(:generate_weak_etag, [action.cache_key, ''])
+ ActionDispatch::TestResponse.new.send(:generate_weak_etag, [action.cache_key])
end
specify { expect(newest_version.sha).not_to eq(oldest_version.sha) }
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 977879b453c..0b3bd4d78ac 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1411,39 +1411,42 @@ RSpec.describe Projects::IssuesController do
stub_application_setting(issues_create_limit: 5)
end
- it 'prevents from creating more issues', :request_store do
- 5.times { post_new_issue }
-
- expect { post_new_issue }
- .to change { Gitlab::GitalyClient.get_request_count }.by(1) # creates 1 projects and 0 issues
-
- post_new_issue
- expect(response.body).to eq(_('This endpoint has been requested too many times. Try again later.'))
- expect(response).to have_gitlab_http_status(:too_many_requests)
- end
-
- it 'logs the event on auth.log' do
- attributes = {
- message: 'Application_Rate_Limiter_Request',
- env: :issues_create_request_limit,
- remote_ip: '0.0.0.0',
- request_method: 'POST',
- path: "/#{project.full_path}/-/issues",
- user_id: user.id,
- username: user.username
- }
+ context 'when issue creation limits imposed' do
+ it 'prevents from creating more issues', :request_store do
+ 5.times { post_new_issue }
- expect(Gitlab::AuthLogger).to receive(:error).with(attributes).once
+ expect { post_new_issue }
+ .to change { Gitlab::GitalyClient.get_request_count }.by(1) # creates 1 projects and 0 issues
- project.add_developer(user)
- sign_in(user)
+ post_new_issue
- 6.times do
- post :create, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- issue: { title: 'Title', description: 'Description' }
+ expect(response.body).to eq(_('This endpoint has been requested too many times. Try again later.'))
+ expect(response).to have_gitlab_http_status(:too_many_requests)
+ end
+
+ it 'logs the event on auth.log' do
+ attributes = {
+ message: 'Application_Rate_Limiter_Request',
+ env: :issues_create_request_limit,
+ remote_ip: '0.0.0.0',
+ request_method: 'POST',
+ path: "/#{project.full_path}/-/issues",
+ user_id: user.id,
+ username: user.username
}
+
+ expect(Gitlab::AuthLogger).to receive(:error).with(attributes).once
+
+ project.add_developer(user)
+ sign_in(user)
+
+ 6.times do
+ post :create, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ issue: { title: 'Title', description: 'Description' }
+ }
+ end
end
end
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 0da8a30611c..438fc2f2106 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1876,8 +1876,7 @@ RSpec.describe Projects::MergeRequestsController do
let(:sha) { forked.commit.sha }
let(:environment) { create(:environment, project: forked) }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: forked) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
- let!(:deployment) { create(:deployment, :succeed, environment: environment, sha: sha, ref: 'master', deployable: build) }
+ let!(:build) { create(:ci_build, :with_deployment, environment: environment.name, pipeline: pipeline) }
let(:merge_request) do
create(:merge_request, source_project: forked, target_project: project, target_branch: 'master', head_pipeline: pipeline)
@@ -1901,8 +1900,7 @@ RSpec.describe Projects::MergeRequestsController do
let(:source_environment) { create(:environment, project: project) }
let(:merge_commit_sha) { project.repository.merge(user, forked.commit.id, merge_request, "merged in test") }
let(:post_merge_pipeline) { create(:ci_pipeline, sha: merge_commit_sha, project: project) }
- let(:post_merge_build) { create(:ci_build, pipeline: post_merge_pipeline) }
- let!(:source_deployment) { create(:deployment, :succeed, environment: source_environment, sha: merge_commit_sha, ref: 'master', deployable: post_merge_build) }
+ let!(:post_merge_build) { create(:ci_build, :with_deployment, environment: source_environment.name, pipeline: post_merge_pipeline) }
before do
merge_request.update!(merge_commit_sha: merge_commit_sha)
@@ -1944,9 +1942,6 @@ RSpec.describe Projects::MergeRequestsController do
context 'when a merge request has multiple environments with deployments' do
let(:sha) { merge_request.diff_head_sha }
- let(:ref) { merge_request.source_branch }
-
- let!(:build) { create(:ci_build, pipeline: pipeline) }
let!(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
let!(:environment) { create(:environment, name: 'env_a', project: project) }
let!(:another_environment) { create(:environment, name: 'env_b', project: project) }
@@ -1954,8 +1949,8 @@ RSpec.describe Projects::MergeRequestsController do
before do
merge_request.update_head_pipeline
- create(:deployment, :succeed, environment: environment, sha: sha, ref: ref, deployable: build)
- create(:deployment, :succeed, environment: another_environment, sha: sha, ref: ref, deployable: build)
+ create(:ci_build, :with_deployment, environment: environment.name, pipeline: pipeline)
+ create(:ci_build, :with_deployment, environment: another_environment.name, pipeline: pipeline)
end
it 'exposes multiple environment statuses' do
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
index 27a3e95896a..d86f38c1f0b 100644
--- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -397,7 +397,7 @@ RSpec.describe Projects::PipelineSchedulesController do
end
end
- describe 'POST #play', :clean_gitlab_redis_cache do
+ describe 'POST #play', :clean_gitlab_redis_rate_limiting do
let(:ref) { 'master' }
before do
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index 2c25c7e20ea..a81173ccaac 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Projects::RawController do
include_examples 'single Gitaly request'
end
- context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_cache do
+ context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_rate_limiting do
let(:file_path) { 'master/README.md' }
before do
diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb
index 0685e5a2055..a5faaaf5969 100644
--- a/spec/controllers/projects/registry/repositories_controller_spec.rb
+++ b/spec/controllers/projects/registry/repositories_controller_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Projects::Registry::RepositoriesController do
before do
sign_in(user)
stub_container_registry_config(enabled: true)
+ stub_container_registry_info
end
context 'when user has access to registry' do
@@ -30,6 +31,18 @@ RSpec.describe Projects::Registry::RepositoriesController do
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ [ContainerRegistry::Path::InvalidRegistryPathError, Faraday::Error].each do |error_class|
+ context "when there is a #{error_class}" do
+ it 'displays a connection error message' do
+ expect(::ContainerRegistry::Client).to receive(:registry_info).and_raise(error_class, nil, nil)
+
+ go_to_index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
end
shared_examples 'renders a list of repositories' do
diff --git a/spec/controllers/projects/tags_controller_spec.rb b/spec/controllers/projects/tags_controller_spec.rb
index efb57494f82..d0719643b7f 100644
--- a/spec/controllers/projects/tags_controller_spec.rb
+++ b/spec/controllers/projects/tags_controller_spec.rb
@@ -17,6 +17,25 @@ RSpec.describe Projects::TagsController do
expect(assigns(:tags).map(&:name)).to include('v1.1.0', 'v1.0.0')
end
+ context 'when Gitaly is unavailable' do
+ where(:format) do
+ [:html, :atom]
+ end
+
+ with_them do
+ it 'returns 503 status code' do
+ expect_next_instance_of(TagsFinder) do |finder|
+ expect(finder).to receive(:execute).and_return([[], Gitlab::Git::CommandError.new])
+ end
+
+ get :index, params: { namespace_id: project.namespace.to_param, project_id: project }, format: format
+
+ expect(assigns(:tags)).to eq([])
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ end
+ end
+ end
+
it 'returns releases matching those tags' do
subject
diff --git a/spec/controllers/projects/usage_quotas_controller_spec.rb b/spec/controllers/projects/usage_quotas_controller_spec.rb
new file mode 100644
index 00000000000..6125ba13f96
--- /dev/null
+++ b/spec/controllers/projects/usage_quotas_controller_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::UsageQuotasController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ describe 'GET #index' do
+ render_views
+
+ it 'does not render search settings partial' do
+ sign_in(user)
+ get(:index, params: { namespace_id: user.namespace, project_id: project })
+
+ expect(response).to render_template('index')
+ expect(response).not_to render_template('shared/search_settings')
+ end
+ end
+end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 9d070061850..3d966848c5b 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -312,6 +312,17 @@ RSpec.describe ProjectsController do
expect { get_show }.not_to change { Gitlab::GitalyClient.get_request_count }
end
+
+ it "renders files even with invalid license" do
+ controller.instance_variable_set(:@project, public_project)
+ expect(public_project.repository).to receive(:license_key).and_return('woozle wuzzle').at_least(:once)
+
+ get_show
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('_files')
+ expect(response.body).to have_content('LICENSE') # would be 'MIT license' if stub not works
+ end
end
context "when the url contains .atom" do
@@ -409,42 +420,66 @@ RSpec.describe ProjectsController do
end
describe 'POST create' do
- let!(:params) do
- {
- path: 'foo',
- description: 'bar',
- import_url: project.http_url_to_repo,
- namespace_id: user.namespace.id
- }
- end
-
subject { post :create, params: { project: params } }
before do
sign_in(user)
end
- context 'when import by url is disabled' do
- before do
- stub_application_setting(import_sources: [])
+ context 'on import' do
+ let(:params) do
+ {
+ path: 'foo',
+ description: 'bar',
+ namespace_id: user.namespace.id,
+ import_url: project.http_url_to_repo
+ }
+ end
+
+ context 'when import by url is disabled' do
+ before do
+ stub_application_setting(import_sources: [])
+ end
+
+ it 'does not create project and reports an error' do
+ expect { subject }.not_to change { Project.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
- it 'does not create project and reports an error' do
- expect { subject }.not_to change { Project.count }
+ context 'when import by url is enabled' do
+ before do
+ stub_application_setting(import_sources: ['git'])
+ end
+
+ it 'creates project' do
+ expect { subject }.to change { Project.count }
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
end
end
- context 'when import by url is enabled' do
- before do
- stub_application_setting(import_sources: ['git'])
+ context 'with new_project_sast_enabled', :experiment do
+ let(:params) do
+ {
+ path: 'foo',
+ description: 'bar',
+ namespace_id: user.namespace.id,
+ initialize_with_sast: '1'
+ }
end
- it 'creates project' do
- expect { subject }.to change { Project.count }
+ it 'tracks an event on project creation' do
+ expect(experiment(:new_project_sast_enabled)).to track(:created,
+ property: 'blank',
+ checked: true,
+ project: an_instance_of(Project),
+ namespace: user.namespace
+ ).on_next_instance.with_context(user: user)
- expect(response).to have_gitlab_http_status(:redirect)
+ post :create, params: { project: params }
end
end
end
@@ -1373,12 +1408,12 @@ RSpec.describe ProjectsController do
end
end
- context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_cache do
+ context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_rate_limiting do
include_examples 'rate limits project export endpoint'
end
end
- describe '#download_export', :clean_gitlab_redis_cache do
+ describe '#download_export', :clean_gitlab_redis_rate_limiting do
let(:action) { :download_export }
context 'object storage enabled' do
@@ -1413,7 +1448,7 @@ RSpec.describe ProjectsController do
end
end
- context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_cache do
+ context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_rate_limiting do
before do
allow(Gitlab::ApplicationRateLimiter)
.to receive(:increment)
@@ -1485,7 +1520,7 @@ RSpec.describe ProjectsController do
end
end
- context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_cache do
+ context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_rate_limiting do
include_examples 'rate limits project export endpoint'
end
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 5edd60ebc79..a25c597edb2 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -602,6 +602,22 @@ RSpec.describe RegistrationsController do
end
end
+ context 'when user did not accept app terms' do
+ let(:user) { create(:user, accepted_term: nil) }
+
+ before do
+ stub_application_setting(password_authentication_enabled_for_web: false)
+ stub_application_setting(password_authentication_enabled_for_git: false)
+ stub_application_setting(enforce_terms: true)
+ end
+
+ it 'fails with message' do
+ post :destroy, params: { username: user.username }
+
+ expect_failure(s_('Profiles|You must accept the Terms of Service in order to perform this action.'))
+ end
+ end
+
it 'sets the username and caller_id in the context' do
expect(controller).to receive(:destroy).and_wrap_original do |m, *args|
m.call(*args)
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index 04d5008cb34..b5cd14154a3 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -7,12 +7,33 @@ RSpec.describe Repositories::GitHttpController do
let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository) }
let_it_be(:project_snippet) { create(:project_snippet, :public, :repository, project: project) }
+ shared_examples 'handles unavailable Gitaly' do
+ let(:params) { super().merge(service: 'git-upload-pack') }
+
+ before do
+ request.headers.merge! auth_env(user.username, user.password, nil)
+ end
+
+ context 'when Gitaly is unavailable' do
+ it 'responds with a 503 message' do
+ expect(Gitlab::GitalyClient).to receive(:call).and_raise(GRPC::Unavailable)
+
+ get :info_refs, params: params
+
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ expect(response.body).to eq('The git server, Gitaly, is not available at this time. Please contact your administrator.')
+ end
+ end
+ end
+
context 'when repository container is a project' do
it_behaves_like Repositories::GitHttpController do
let(:container) { project }
let(:user) { project.owner }
let(:access_checker_class) { Gitlab::GitAccess }
+ it_behaves_like 'handles unavailable Gitaly'
+
describe 'POST #git_upload_pack' do
before do
allow(controller).to receive(:verify_workhorse_api!).and_return(true)
@@ -84,6 +105,8 @@ RSpec.describe Repositories::GitHttpController do
let(:container) { personal_snippet }
let(:user) { personal_snippet.author }
let(:access_checker_class) { Gitlab::GitAccessSnippet }
+
+ it_behaves_like 'handles unavailable Gitaly'
end
end
@@ -92,6 +115,8 @@ RSpec.describe Repositories::GitHttpController do
let(:container) { project_snippet }
let(:user) { project_snippet.author }
let(:access_checker_class) { Gitlab::GitAccessSnippet }
+
+ it_behaves_like 'handles unavailable Gitaly'
end
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 4e87a9fc1ba..73e8e0c7dd4 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -215,6 +215,16 @@ RSpec.describe SearchController do
end
end
+ it 'strips surrounding whitespace from search query' do
+ get :show, params: { scope: 'notes', search: ' foobar ' }
+ expect(assigns[:search_term]).to eq 'foobar'
+ end
+
+ it 'strips surrounding whitespace from autocomplete term' do
+ expect(controller).to receive(:search_autocomplete_opts).with('youcompleteme')
+ get :autocomplete, params: { term: ' youcompleteme ' }
+ end
+
it 'finds issue comments' do
project = create(:project, :public)
note = create(:note_on_issue, project: project)
@@ -305,7 +315,7 @@ RSpec.describe SearchController do
expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Cache-Control']).to eq('no-store')
+ expect(response.headers['Cache-Control']).to eq('private, no-store')
end
end
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index 2aa9b86b20e..8442c214cd3 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -599,6 +599,46 @@ RSpec.describe UploadsController do
end
end
+ context "when viewing a topic avatar" do
+ let!(:topic) { create(:topic, avatar: fixture_file_upload("spec/fixtures/dk.png", "image/png")) }
+
+ context "when signed in" do
+ before do
+ sign_in(user)
+ end
+
+ it "responds with status 200" do
+ get :show, params: { model: "projects/topic", mounted_as: "avatar", id: topic.id, filename: "dk.png" }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it_behaves_like 'content publicly cached' do
+ subject do
+ get :show, params: { model: "projects/topic", mounted_as: "avatar", id: topic.id, filename: "dk.png" }
+
+ response
+ end
+ end
+ end
+
+ context "when not signed in" do
+ it "responds with status 200" do
+ get :show, params: { model: "projects/topic", mounted_as: "avatar", id: topic.id, filename: "dk.png" }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it_behaves_like 'content publicly cached' do
+ subject do
+ get :show, params: { model: "projects/topic", mounted_as: "avatar", id: topic.id, filename: "dk.png" }
+
+ response
+ end
+ end
+ end
+ end
+
context 'Appearance' do
context 'when viewing a custom header logo' do
let!(:appearance) { create :appearance, header_logo: fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index c7739e2ff5f..5eccb0b46ef 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -67,6 +67,8 @@ RSpec.describe 'Database schema' do
oauth_access_tokens: %w[resource_owner_id application_id],
oauth_applications: %w[owner_id],
open_project_tracker_data: %w[closed_status_id],
+ packages_build_infos: %w[pipeline_id],
+ packages_package_file_build_infos: %w[pipeline_id],
product_analytics_events_experimental: %w[event_id txn_id user_id],
project_group_links: %w[group_id],
project_statistics: %w[namespace_id],
@@ -82,6 +84,7 @@ RSpec.describe 'Database schema' do
subscriptions: %w[user_id subscribable_id],
suggestions: %w[commit_id],
taggings: %w[tag_id taggable_id tagger_id],
+ terraform_state_versions: %w[ci_build_id],
timelogs: %w[user_id],
todos: %w[target_id commit_id],
uploads: %w[model_id],
@@ -201,7 +204,8 @@ RSpec.describe 'Database schema' do
"Operations::FeatureFlags::Strategy" => %w[parameters],
"Packages::Composer::Metadatum" => %w[composer_json],
"RawUsageData" => %w[payload], # Usage data payload changes often, we cannot use one schema
- "Releases::Evidence" => %w[summary]
+ "Releases::Evidence" => %w[summary],
+ "Vulnerabilities::Finding::Evidence" => %w[data] # Validation work in progress
}.freeze
# We are skipping GEO models for now as it adds up complexity
diff --git a/spec/experiments/new_project_sast_enabled_experiment_spec.rb b/spec/experiments/new_project_sast_enabled_experiment_spec.rb
new file mode 100644
index 00000000000..dcf71bfffd7
--- /dev/null
+++ b/spec/experiments/new_project_sast_enabled_experiment_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NewProjectSastEnabledExperiment do
+ it "defines the expected behaviors and variants" do
+ expect(subject.behaviors.keys).to match_array(%w[control candidate free_indicator])
+ end
+
+ it "publishes to the database" do
+ expect(subject).to receive(:publish_to_database)
+
+ subject.publish
+ end
+end
diff --git a/spec/factories/bulk_import.rb b/spec/factories/bulk_import.rb
index 07907bab3df..748afc0c67c 100644
--- a/spec/factories/bulk_import.rb
+++ b/spec/factories/bulk_import.rb
@@ -4,6 +4,7 @@ FactoryBot.define do
factory :bulk_import, class: 'BulkImport' do
user
source_type { :gitlab }
+ source_version { BulkImport.min_gl_version_for_project_migration.to_s }
trait :created do
status { 0 }
diff --git a/spec/factories/ci/pending_builds.rb b/spec/factories/ci/pending_builds.rb
index 31e42e1bc9e..28258b0339f 100644
--- a/spec/factories/ci/pending_builds.rb
+++ b/spec/factories/ci/pending_builds.rb
@@ -9,5 +9,6 @@ FactoryBot.define do
namespace { project.namespace }
minutes_exceeded { false }
tag_ids { build.tags_ids }
+ namespace_traversal_ids { project.namespace.traversal_ids }
end
end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 4fc7d945881..ae3404a41a2 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -18,6 +18,10 @@ FactoryBot.define do
transient { child_of { nil } }
transient { upstream_of { nil } }
+ before(:create) do |pipeline, evaluator|
+ pipeline.ensure_project_iid!
+ end
+
after(:build) do |pipeline, evaluator|
if evaluator.child_of
pipeline.project = evaluator.child_of.project
diff --git a/spec/factories/ci/runner_projects.rb b/spec/factories/ci/runner_projects.rb
index ead9fe10f6e..31536275ff4 100644
--- a/spec/factories/ci/runner_projects.rb
+++ b/spec/factories/ci/runner_projects.rb
@@ -2,7 +2,14 @@
FactoryBot.define do
factory :ci_runner_project, class: 'Ci::RunnerProject' do
- runner factory: [:ci_runner, :project]
project
+
+ after(:build) do |runner_project, evaluator|
+ unless runner_project.runner.present?
+ runner_project.runner = build(
+ :ci_runner, :project, runner_projects: [runner_project]
+ )
+ end
+ end
end
end
diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb
index 30f78531324..d0853df4e4b 100644
--- a/spec/factories/ci/runners.rb
+++ b/spec/factories/ci/runners.rb
@@ -10,6 +10,16 @@ FactoryBot.define do
runner_type { :instance_type }
+ transient do
+ projects { [] }
+ end
+
+ after(:build) do |runner, evaluator|
+ evaluator.projects.each do |proj|
+ runner.runner_projects << build(:ci_runner_project, project: proj)
+ end
+ end
+
trait :online do
contacted_at { Time.now }
end
@@ -30,7 +40,9 @@ FactoryBot.define do
runner_type { :project_type }
after(:build) do |runner, evaluator|
- runner.projects << build(:project) if runner.projects.empty?
+ if runner.runner_projects.empty?
+ runner.runner_projects << build(:ci_runner_project)
+ end
end
end
diff --git a/spec/factories/dependency_proxy.rb b/spec/factories/dependency_proxy.rb
index c2873ce9b5e..836ee87e4d7 100644
--- a/spec/factories/dependency_proxy.rb
+++ b/spec/factories/dependency_proxy.rb
@@ -6,6 +6,11 @@ FactoryBot.define do
size { 1234 }
file { fixture_file_upload('spec/fixtures/dependency_proxy/a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4.gz') }
file_name { 'a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4.gz' }
+ status { :default }
+
+ trait :expired do
+ status { :expired }
+ end
end
factory :dependency_proxy_manifest, class: 'DependencyProxy::Manifest' do
@@ -13,7 +18,12 @@ FactoryBot.define do
size { 1234 }
file { fixture_file_upload('spec/fixtures/dependency_proxy/manifest') }
digest { 'sha256:d0710affa17fad5f466a70159cc458227bd25d4afb39514ef662ead3e6c99515' }
- file_name { 'alpine:latest.json' }
+ sequence(:file_name) { |n| "alpine:latest#{n}.json" }
content_type { 'application/vnd.docker.distribution.manifest.v2+json' }
+ status { :default }
+
+ trait :expired do
+ status { :expired }
+ end
end
end
diff --git a/spec/factories/dependency_proxy/group_settings.rb b/spec/factories/dependency_proxy/group_settings.rb
new file mode 100644
index 00000000000..c15cddf7430
--- /dev/null
+++ b/spec/factories/dependency_proxy/group_settings.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :dependency_proxy_group_setting, class: 'DependencyProxy::GroupSetting' do
+ group
+
+ enabled { true }
+ end
+end
diff --git a/spec/factories/dependency_proxy/image_ttl_group_policies.rb b/spec/factories/dependency_proxy/image_ttl_group_policies.rb
index 21e5dd44cf5..068c87f578c 100644
--- a/spec/factories/dependency_proxy/image_ttl_group_policies.rb
+++ b/spec/factories/dependency_proxy/image_ttl_group_policies.rb
@@ -6,5 +6,9 @@ FactoryBot.define do
enabled { true }
ttl { 90 }
+
+ trait :disabled do
+ enabled { false }
+ end
end
end
diff --git a/spec/factories/design_management/versions.rb b/spec/factories/design_management/versions.rb
index 247a385bd0e..e505a77d6bd 100644
--- a/spec/factories/design_management/versions.rb
+++ b/spec/factories/design_management/versions.rb
@@ -52,9 +52,9 @@ FactoryBot.define do
.where(design_id: evaluator.deleted_designs.map(&:id))
.update_all(event: events[:deletion])
- version.designs.reload
# Ensure version.issue == design.issue for all version.designs
version.designs.update_all(issue_id: version.issue_id)
+ version.designs.reload
needed = evaluator.designs_count
have = version.designs.size
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index bd6e37c1cef..859f381e4c1 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -4,7 +4,7 @@ FactoryBot.define do
factory :group, class: 'Group', parent: :namespace do
sequence(:name) { |n| "group#{n}" }
path { name.downcase.gsub(/\s/, '_') }
- type { 'Group' }
+ type { Group.sti_name }
owner { nil }
project_creation_level { ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS }
@@ -69,6 +69,20 @@ FactoryBot.define do
allow_descendants_override_disabled_shared_runners { true }
end
+ trait :disabled_and_unoverridable do
+ shared_runners_disabled
+ allow_descendants_override_disabled_shared_runners { false }
+ end
+
+ trait :disabled_with_override do
+ shared_runners_disabled
+ allow_descendants_override_disabled_shared_runners
+ end
+
+ trait :shared_runners_enabled do
+ shared_runners_enabled { true }
+ end
+
# Construct a hierarchy underneath the group.
# Each group will have `children` amount of children,
# and `depth` levels of descendants.
diff --git a/spec/factories/integration_data.rb b/spec/factories/integration_data.rb
index 4d0892556f8..7ff2f3ae846 100644
--- a/spec/factories/integration_data.rb
+++ b/spec/factories/integration_data.rb
@@ -18,12 +18,4 @@ FactoryBot.define do
factory :issue_tracker_data, class: 'Integrations::IssueTrackerData' do
integration
end
-
- factory :open_project_tracker_data, class: 'Integrations::OpenProjectTrackerData' do
- integration factory: :open_project_service
- url { 'http://openproject.example.com' }
- token { 'supersecret' }
- project_identifier_code { 'PRJ-1' }
- closed_status_id { '15' }
- end
end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index cb1c94c25c1..63f85c04ac7 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -166,26 +166,6 @@ FactoryBot.define do
external_wiki_url { 'http://external-wiki-url.com' }
end
- factory :open_project_service, class: 'Integrations::OpenProject' do
- project
- active { true }
-
- transient do
- url { 'http://openproject.example.com' }
- api_url { 'http://openproject.example.com/issues/:id' }
- token { 'supersecret' }
- closed_status_id { '15' }
- project_identifier_code { 'PRJ-1' }
- end
-
- after(:build) do |integration, evaluator|
- integration.open_project_tracker_data = build(:open_project_tracker_data,
- integration: integration, url: evaluator.url, api_url: evaluator.api_url, token: evaluator.token,
- closed_status_id: evaluator.closed_status_id, project_identifier_code: evaluator.project_identifier_code
- )
- end
- end
-
trait :jira_cloud_service do
url { 'https://mysite.atlassian.net' }
username { 'jira_user' }
diff --git a/spec/factories/namespaces.rb b/spec/factories/namespaces.rb
index 957ec88420d..959183f227d 100644
--- a/spec/factories/namespaces.rb
+++ b/spec/factories/namespaces.rb
@@ -5,6 +5,8 @@ FactoryBot.define do
sequence(:name) { |n| "namespace#{n}" }
path { name.downcase.gsub(/\s/, '_') }
+ # TODO: can this be moved into the :user_namespace factory?
+ # evaluate in issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070
owner { association(:user, strategy: :build, namespace: instance, username: path) }
trait :with_aggregation_schedule do
diff --git a/spec/factories/namespaces/project_namespaces.rb b/spec/factories/namespaces/project_namespaces.rb
index 10b86f48090..ca9fc5f8768 100644
--- a/spec/factories/namespaces/project_namespaces.rb
+++ b/spec/factories/namespaces/project_namespaces.rb
@@ -3,10 +3,11 @@
FactoryBot.define do
factory :project_namespace, class: 'Namespaces::ProjectNamespace' do
project
+ parent { project.namespace }
+ visibility_level { project.visibility_level }
name { project.name }
path { project.path }
type { Namespaces::ProjectNamespace.sti_name }
owner { nil }
- parent factory: :group
end
end
diff --git a/spec/factories/namespaces/user_namespaces.rb b/spec/factories/namespaces/user_namespaces.rb
new file mode 100644
index 00000000000..31c924462d7
--- /dev/null
+++ b/spec/factories/namespaces/user_namespaces.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :user_namespace, class: 'Namespaces::UserNamespace', parent: :namespace do
+ sequence(:name) { |n| "user_namespace#{n}" }
+ type { Namespaces::UserNamespace.sti_name }
+ end
+end
diff --git a/spec/factories/packages/build_info.rb b/spec/factories/packages/build_infos.rb
index dc6208d72a9..dc6208d72a9 100644
--- a/spec/factories/packages/build_info.rb
+++ b/spec/factories/packages/build_infos.rb
diff --git a/spec/factories/packages/composer/cache_files.rb b/spec/factories/packages/composer/cache_files.rb
new file mode 100644
index 00000000000..30c28ec175b
--- /dev/null
+++ b/spec/factories/packages/composer/cache_files.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+FactoryBot.define do
+ factory :composer_cache_file, class: 'Packages::Composer::CacheFile' do
+ group
+
+ file_sha256 { '1' * 64 }
+
+ transient do
+ file_fixture { 'spec/fixtures/packages/composer/package.json' }
+ end
+
+ after(:build) do |cache_file, evaluator|
+ cache_file.file = fixture_file_upload(evaluator.file_fixture)
+ end
+
+ trait(:object_storage) do
+ file_store { Packages::Composer::CacheUploader::Store::REMOTE }
+ end
+ end
+end
diff --git a/spec/factories/packages/composer/metadata.rb b/spec/factories/packages/composer/metadata.rb
new file mode 100644
index 00000000000..e4b38faa9ba
--- /dev/null
+++ b/spec/factories/packages/composer/metadata.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :composer_metadatum, class: 'Packages::Composer::Metadatum' do
+ package { association(:composer_package) }
+
+ target_sha { '123' }
+ composer_json { { name: 'foo' } }
+ end
+end
diff --git a/spec/factories/packages/conan/file_metadata.rb b/spec/factories/packages/conan/file_metadata.rb
new file mode 100644
index 00000000000..609f80e54c7
--- /dev/null
+++ b/spec/factories/packages/conan/file_metadata.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :conan_file_metadatum, class: 'Packages::Conan::FileMetadatum' do
+ package_file { association(:conan_package_file, :conan_recipe_file, without_loaded_metadatum: true) }
+ recipe_revision { '0' }
+ conan_file_type { 'recipe_file' }
+
+ trait(:recipe_file) do
+ conan_file_type { 'recipe_file' }
+ end
+
+ trait(:package_file) do
+ package_file { association(:conan_package_file, :conan_package, without_loaded_metadatum: true) }
+ conan_file_type { 'package_file' }
+ package_revision { '0' }
+ conan_package_reference { '123456789' }
+ end
+ end
+end
diff --git a/spec/factories/packages/conan/metadata.rb b/spec/factories/packages/conan/metadata.rb
new file mode 100644
index 00000000000..81ded799684
--- /dev/null
+++ b/spec/factories/packages/conan/metadata.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :conan_metadatum, class: 'Packages::Conan::Metadatum' do
+ association :package, factory: [:conan_package, :without_loaded_metadatum], without_package_files: true
+ package_username { 'username' }
+ package_channel { 'stable' }
+ end
+end
diff --git a/spec/factories/packages/dependencies.rb b/spec/factories/packages/dependencies.rb
new file mode 100644
index 00000000000..a62d48c2e73
--- /dev/null
+++ b/spec/factories/packages/dependencies.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :packages_dependency, class: 'Packages::Dependency' do
+ sequence(:name) { |n| "@test/package-#{n}"}
+ sequence(:version_pattern) { |n| "~6.2.#{n}" }
+
+ trait(:rubygems) do
+ sequence(:name) { |n| "gem-dependency-#{n}"}
+ end
+ end
+end
diff --git a/spec/factories/packages/dependency_links.rb b/spec/factories/packages/dependency_links.rb
new file mode 100644
index 00000000000..6470cbdc9a6
--- /dev/null
+++ b/spec/factories/packages/dependency_links.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :packages_dependency_link, class: 'Packages::DependencyLink' do
+ package { association(:nuget_package) }
+ dependency { association(:packages_dependency) }
+ dependency_type { :dependencies }
+
+ trait(:with_nuget_metadatum) do
+ after :build do |link|
+ link.nuget_metadatum = build(:nuget_dependency_link_metadatum)
+ end
+ end
+
+ trait(:rubygems) do
+ package { association(:rubygems_package) }
+ dependency { association(:packages_dependency, :rubygems) }
+ end
+ end
+end
diff --git a/spec/factories/packages/maven/maven_metadata.rb b/spec/factories/packages/maven/maven_metadata.rb
new file mode 100644
index 00000000000..861daab3a74
--- /dev/null
+++ b/spec/factories/packages/maven/maven_metadata.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :maven_metadatum, class: 'Packages::Maven::Metadatum' do
+ association :package, package_type: :maven
+ path { 'my/company/app/my-app/1.0-SNAPSHOT' }
+ app_group { 'my.company.app' }
+ app_name { 'my-app' }
+ app_version { '1.0-SNAPSHOT' }
+ end
+end
diff --git a/spec/factories/packages/nuget/dependency_link_metadata.rb b/spec/factories/packages/nuget/dependency_link_metadata.rb
new file mode 100644
index 00000000000..ed632e72cbf
--- /dev/null
+++ b/spec/factories/packages/nuget/dependency_link_metadata.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :nuget_dependency_link_metadatum, class: 'Packages::Nuget::DependencyLinkMetadatum' do
+ dependency_link { association(:packages_dependency_link) }
+ target_framework { '.NETStandard2.0' }
+ end
+end
diff --git a/spec/factories/packages/nuget/metadata.rb b/spec/factories/packages/nuget/metadata.rb
new file mode 100644
index 00000000000..d2a2a666928
--- /dev/null
+++ b/spec/factories/packages/nuget/metadata.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :nuget_metadatum, class: 'Packages::Nuget::Metadatum' do
+ package { association(:nuget_package) }
+
+ license_url { 'http://www.gitlab.com' }
+ project_url { 'http://www.gitlab.com' }
+ icon_url { 'http://www.gitlab.com' }
+ end
+end
diff --git a/spec/factories/packages/package_file.rb b/spec/factories/packages/package_files.rb
index d9afbac1048..d9afbac1048 100644
--- a/spec/factories/packages/package_file.rb
+++ b/spec/factories/packages/package_files.rb
diff --git a/spec/factories/packages/package_tags.rb b/spec/factories/packages/package_tags.rb
new file mode 100644
index 00000000000..3d2eea4a73b
--- /dev/null
+++ b/spec/factories/packages/package_tags.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :packages_tag, class: 'Packages::Tag' do
+ package
+ sequence(:name) { |n| "tag-#{n}"}
+ end
+end
diff --git a/spec/factories/packages.rb b/spec/factories/packages/packages.rb
index b04b7e691fe..bb9aa95fe08 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages/packages.rb
@@ -249,117 +249,4 @@ FactoryBot.define do
package_type { :generic }
end
end
-
- factory :composer_metadatum, class: 'Packages::Composer::Metadatum' do
- package { association(:composer_package) }
-
- target_sha { '123' }
- composer_json { { name: 'foo' } }
- end
-
- factory :composer_cache_file, class: 'Packages::Composer::CacheFile' do
- group
-
- file_sha256 { '1' * 64 }
-
- transient do
- file_fixture { 'spec/fixtures/packages/composer/package.json' }
- end
-
- after(:build) do |cache_file, evaluator|
- cache_file.file = fixture_file_upload(evaluator.file_fixture)
- end
-
- trait(:object_storage) do
- file_store { Packages::Composer::CacheUploader::Store::REMOTE }
- end
- end
-
- factory :maven_metadatum, class: 'Packages::Maven::Metadatum' do
- association :package, package_type: :maven
- path { 'my/company/app/my-app/1.0-SNAPSHOT' }
- app_group { 'my.company.app' }
- app_name { 'my-app' }
- app_version { '1.0-SNAPSHOT' }
- end
-
- factory :conan_metadatum, class: 'Packages::Conan::Metadatum' do
- association :package, factory: [:conan_package, :without_loaded_metadatum], without_package_files: true
- package_username { 'username' }
- package_channel { 'stable' }
- end
-
- factory :pypi_metadatum, class: 'Packages::Pypi::Metadatum' do
- package { association(:pypi_package, without_loaded_metadatum: true) }
- required_python { '>=2.7' }
- end
-
- factory :nuget_metadatum, class: 'Packages::Nuget::Metadatum' do
- package { association(:nuget_package) }
-
- license_url { 'http://www.gitlab.com' }
- project_url { 'http://www.gitlab.com' }
- icon_url { 'http://www.gitlab.com' }
- end
-
- factory :conan_file_metadatum, class: 'Packages::Conan::FileMetadatum' do
- package_file { association(:conan_package_file, :conan_recipe_file, without_loaded_metadatum: true) }
- recipe_revision { '0' }
- conan_file_type { 'recipe_file' }
-
- trait(:recipe_file) do
- conan_file_type { 'recipe_file' }
- end
-
- trait(:package_file) do
- package_file { association(:conan_package_file, :conan_package, without_loaded_metadatum: true) }
- conan_file_type { 'package_file' }
- package_revision { '0' }
- conan_package_reference { '123456789' }
- end
- end
-
- factory :packages_dependency, class: 'Packages::Dependency' do
- sequence(:name) { |n| "@test/package-#{n}"}
- sequence(:version_pattern) { |n| "~6.2.#{n}" }
-
- trait(:rubygems) do
- sequence(:name) { |n| "gem-dependency-#{n}"}
- end
- end
-
- factory :packages_dependency_link, class: 'Packages::DependencyLink' do
- package { association(:nuget_package) }
- dependency { association(:packages_dependency) }
- dependency_type { :dependencies }
-
- trait(:with_nuget_metadatum) do
- after :build do |link|
- link.nuget_metadatum = build(:nuget_dependency_link_metadatum)
- end
- end
-
- trait(:rubygems) do
- package { association(:rubygems_package) }
- dependency { association(:packages_dependency, :rubygems) }
- end
- end
-
- factory :nuget_dependency_link_metadatum, class: 'Packages::Nuget::DependencyLinkMetadatum' do
- dependency_link { association(:packages_dependency_link) }
- target_framework { '.NETStandard2.0' }
- end
-
- factory :packages_tag, class: 'Packages::Tag' do
- package
- sequence(:name) { |n| "tag-#{n}"}
- end
-
- factory :packages_build_info, class: 'Packages::BuildInfo' do
- package
-
- trait :with_pipeline do
- association :pipeline, factory: [:ci_pipeline, :with_job]
- end
- end
end
diff --git a/spec/factories/packages/pypi/metadata.rb b/spec/factories/packages/pypi/metadata.rb
new file mode 100644
index 00000000000..00abe403bd1
--- /dev/null
+++ b/spec/factories/packages/pypi/metadata.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :pypi_metadatum, class: 'Packages::Pypi::Metadatum' do
+ package { association(:pypi_package, without_loaded_metadatum: true) }
+ required_python { '>=2.7' }
+ end
+end
diff --git a/spec/factories/pages_domains.rb b/spec/factories/pages_domains.rb
index 2ba5cbb48bf..f3f2af79b76 100644
--- a/spec/factories/pages_domains.rb
+++ b/spec/factories/pages_domains.rb
@@ -258,6 +258,18 @@ ZDXgrA==
certificate_source { :gitlab_provided }
end
+ # This contains:
+ # webdioxide.com
+ # Let's Encrypt R3
+ # ISRG Root X1 (issued by DST Root CA X3)
+ #
+ # DST Root CA X3 expired on 2021-09-30, but ISRG Root X1 should be trusted on most systems.
+ trait :letsencrypt_expired_x3_root do
+ certificate do
+ File.read(Rails.root.join('spec/fixtures/ssl', 'letsencrypt_expired_x3.pem'))
+ end
+ end
+
trait :explicit_ecdsa do
certificate do
'-----BEGIN CERTIFICATE-----
diff --git a/spec/factories/project_error_tracking_settings.rb b/spec/factories/project_error_tracking_settings.rb
index 424f462e1a0..ed743d8283c 100644
--- a/spec/factories/project_error_tracking_settings.rb
+++ b/spec/factories/project_error_tracking_settings.rb
@@ -8,6 +8,7 @@ FactoryBot.define do
token { 'access_token_123' }
project_name { 'Sentry Project' }
organization_name { 'Sentry Org' }
+ integrated { false }
trait :disabled do
enabled { false }
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index c02bcfc2169..fc1f5d71f39 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -88,17 +88,9 @@ FactoryBot.define do
create(:cluster, :group, :disabled)
create(:cluster, :instance, :disabled)
- # Applications
- create(:clusters_applications_helm, :installed, cluster: gcp_cluster)
- create(:clusters_applications_ingress, :installed, cluster: gcp_cluster)
- create(:clusters_applications_cert_manager, :installed, cluster: gcp_cluster)
- create(:clusters_applications_prometheus, :installed, cluster: gcp_cluster)
- create(:clusters_applications_crossplane, :installed, cluster: gcp_cluster)
- create(:clusters_applications_runner, :installed, cluster: gcp_cluster)
- create(:clusters_applications_knative, :installed, cluster: gcp_cluster)
- create(:clusters_applications_elastic_stack, :installed, cluster: gcp_cluster)
- create(:clusters_applications_jupyter, :installed, cluster: gcp_cluster)
- create(:clusters_applications_cilium, :installed, cluster: gcp_cluster)
+ # Cluster Integrations
+ create(:clusters_integrations_prometheus, cluster: gcp_cluster)
+ create(:clusters_integrations_elastic_stack, cluster: gcp_cluster)
create(:grafana_integration, project: projects[0], enabled: true)
create(:grafana_integration, project: projects[1], enabled: true)
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 04bacbe14e7..325f62f6028 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -139,6 +139,8 @@ FactoryBot.define do
end
factory :omniauth_user do
+ password_automatically_set { true }
+
transient do
extern_uid { '123456' }
provider { 'ldapmain' }
diff --git a/spec/factories_spec.rb b/spec/factories_spec.rb
index 6c7c3776c4a..7dc38b25fac 100644
--- a/spec/factories_spec.rb
+++ b/spec/factories_spec.rb
@@ -29,6 +29,7 @@ RSpec.describe 'factories' do
[:pages_domain, :with_trusted_chain],
[:pages_domain, :with_trusted_expired_chain],
[:pages_domain, :explicit_ecdsa],
+ [:pages_domain, :letsencrypt_expired_x3_root],
[:project_member, :blocked],
[:remote_mirror, :ssh],
[:user_preference, :only_comments],
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index cd148642b90..cb69eac8035 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -34,6 +34,10 @@ RSpec.describe 'Admin Appearance' do
visit admin_application_settings_appearances_path
click_link "Sign-in page"
+ expect(find('#login')).to be_disabled
+ expect(find('#password')).to be_disabled
+ expect(find('button')).to be_disabled
+
expect_custom_sign_in_appearance(appearance)
end
diff --git a/spec/features/admin/admin_hook_logs_spec.rb b/spec/features/admin/admin_hook_logs_spec.rb
index 3f63bf9a15c..837cab49bd4 100644
--- a/spec/features/admin/admin_hook_logs_spec.rb
+++ b/spec/features/admin/admin_hook_logs_spec.rb
@@ -17,8 +17,8 @@ RSpec.describe 'Admin::HookLogs' do
hook_log
visit edit_admin_hook_path(system_hook)
- expect(page).to have_content('Recent Deliveries')
- expect(page).to have_content(hook_log.url)
+ expect(page).to have_content('Recent events')
+ expect(page).to have_link('View details', href: admin_hook_hook_log_path(system_hook, hook_log))
end
it 'show hook log details' do
diff --git a/spec/features/admin/admin_builds_spec.rb b/spec/features/admin/admin_jobs_spec.rb
index 42827dd5b49..36822f89c12 100644
--- a/spec/features/admin/admin_builds_spec.rb
+++ b/spec/features/admin/admin_jobs_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe 'Admin Builds' do
+RSpec.describe 'Admin Jobs' do
before do
admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
end
- describe 'GET /admin/builds' do
+ describe 'GET /admin/jobs' do
let(:pipeline) { create(:ci_pipeline) }
context 'All tab' do
@@ -22,7 +22,7 @@ RSpec.describe 'Admin Builds' do
visit admin_jobs_path
- expect(page).to have_selector('.nav-links li.active', text: 'All')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'All')
expect(page).to have_selector('.row-content-block', text: 'All jobs')
expect(page.all('.build-link').size).to eq(4)
expect(page).to have_button 'Stop all jobs'
@@ -37,7 +37,7 @@ RSpec.describe 'Admin Builds' do
it 'shows a message' do
visit admin_jobs_path
- expect(page).to have_selector('.nav-links li.active', text: 'All')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'All')
expect(page).to have_content 'No jobs to show'
expect(page).not_to have_button 'Stop all jobs'
end
@@ -54,7 +54,7 @@ RSpec.describe 'Admin Builds' do
visit admin_jobs_path(scope: :pending)
- expect(page).to have_selector('.nav-links li.active', text: 'Pending')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Pending')
expect(page.find('.build-link')).to have_content(build1.id)
expect(page.find('.build-link')).not_to have_content(build2.id)
expect(page.find('.build-link')).not_to have_content(build3.id)
@@ -69,7 +69,7 @@ RSpec.describe 'Admin Builds' do
visit admin_jobs_path(scope: :pending)
- expect(page).to have_selector('.nav-links li.active', text: 'Pending')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Pending')
expect(page).to have_content 'No jobs to show'
expect(page).not_to have_button 'Stop all jobs'
end
@@ -86,7 +86,7 @@ RSpec.describe 'Admin Builds' do
visit admin_jobs_path(scope: :running)
- expect(page).to have_selector('.nav-links li.active', text: 'Running')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Running')
expect(page.find('.build-link')).to have_content(build1.id)
expect(page.find('.build-link')).not_to have_content(build2.id)
expect(page.find('.build-link')).not_to have_content(build3.id)
@@ -101,7 +101,7 @@ RSpec.describe 'Admin Builds' do
visit admin_jobs_path(scope: :running)
- expect(page).to have_selector('.nav-links li.active', text: 'Running')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Running')
expect(page).to have_content 'No jobs to show'
expect(page).not_to have_button 'Stop all jobs'
end
@@ -117,7 +117,7 @@ RSpec.describe 'Admin Builds' do
visit admin_jobs_path(scope: :finished)
- expect(page).to have_selector('.nav-links li.active', text: 'Finished')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Finished')
expect(page.find('.build-link')).not_to have_content(build1.id)
expect(page.find('.build-link')).not_to have_content(build2.id)
expect(page.find('.build-link')).to have_content(build3.id)
@@ -131,7 +131,7 @@ RSpec.describe 'Admin Builds' do
visit admin_jobs_path(scope: :finished)
- expect(page).to have_selector('.nav-links li.active', text: 'Finished')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Finished')
expect(page).to have_content 'No jobs to show'
expect(page).to have_button 'Stop all jobs'
end
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index 5b2dfdb2941..c8ee6c14499 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -121,7 +121,7 @@ RSpec.describe 'Admin Mode Login' do
end
context 'when logging in via omniauth' do
- let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml')}
+ let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml', password_automatically_set: false)}
let(:mock_saml_response) do
File.read('spec/fixtures/authentication/saml_response.xml')
end
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index 15def00f354..a50ef34d327 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe "Admin::Projects" do
visit admin_project_path(project)
click_button 'Search for Namespace'
- click_link 'group: web'
+ click_button 'group: web'
click_button 'Transfer'
expect(page).to have_content("Web / #{project.name}")
diff --git a/spec/features/admin/admin_serverless_domains_spec.rb b/spec/features/admin/admin_serverless_domains_spec.rb
deleted file mode 100644
index 0312e82e1ba..00000000000
--- a/spec/features/admin/admin_serverless_domains_spec.rb
+++ /dev/null
@@ -1,89 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Admin Serverless Domains', :js do
- let(:sample_domain) { build(:pages_domain) }
-
- before do
- allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
- admin = create(:admin)
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- end
-
- it 'add domain with certificate' do
- visit admin_serverless_domains_path
-
- fill_in 'pages_domain[domain]', with: 'foo.com'
- fill_in 'pages_domain[user_provided_certificate]', with: sample_domain.certificate
- fill_in 'pages_domain[user_provided_key]', with: sample_domain.key
- click_button 'Add domain'
-
- expect(current_path).to eq admin_serverless_domains_path
-
- expect(page).to have_field('pages_domain[domain]', with: 'foo.com')
- expect(page).to have_field('serverless_domain_dns', with: /^\*\.foo\.com CNAME /)
- expect(page).to have_field('serverless_domain_verification', with: /^_gitlab-pages-verification-code.foo.com TXT /)
- expect(page).not_to have_field('pages_domain[user_provided_certificate]')
- expect(page).not_to have_field('pages_domain[user_provided_key]')
-
- expect(page).to have_content 'Unverified'
- expect(page).to have_content '/CN=test-certificate'
- end
-
- it 'update domain certificate' do
- visit admin_serverless_domains_path
-
- fill_in 'pages_domain[domain]', with: 'foo.com'
- fill_in 'pages_domain[user_provided_certificate]', with: sample_domain.certificate
- fill_in 'pages_domain[user_provided_key]', with: sample_domain.key
- click_button 'Add domain'
-
- expect(current_path).to eq admin_serverless_domains_path
-
- expect(page).not_to have_field('pages_domain[user_provided_certificate]')
- expect(page).not_to have_field('pages_domain[user_provided_key]')
-
- click_button 'Replace'
-
- expect(page).to have_field('pages_domain[user_provided_certificate]')
- expect(page).to have_field('pages_domain[user_provided_key]')
-
- fill_in 'pages_domain[user_provided_certificate]', with: sample_domain.certificate
- fill_in 'pages_domain[user_provided_key]', with: sample_domain.key
-
- click_button 'Save changes'
-
- expect(page).to have_content 'Domain was successfully updated'
- expect(page).to have_content '/CN=test-certificate'
- end
-
- context 'when domain exists' do
- let!(:domain) { create(:pages_domain, :instance_serverless) }
-
- it 'displays a modal when attempting to delete a domain' do
- visit admin_serverless_domains_path
-
- click_button 'Delete domain'
-
- page.within '#modal-delete-domain' do
- expect(page).to have_content "You are about to delete #{domain.domain} from your instance."
- expect(page).to have_link('Delete domain')
- end
- end
-
- it 'displays a modal with disabled button if unable to delete a domain' do
- create(:serverless_domain_cluster, pages_domain: domain)
-
- visit admin_serverless_domains_path
-
- click_button 'Delete domain'
-
- page.within '#modal-delete-domain' do
- expect(page).to have_content "You must disassociate #{domain.domain} from all clusters it is attached to before deletion."
- expect(page).to have_link('Delete domain')
- end
- end
- end
-end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index b25fc9f257a..1c50a7f891f 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -314,12 +314,14 @@ RSpec.describe 'Admin updates settings' do
check 'Default to Auto DevOps pipeline for all projects'
fill_in 'application_setting_auto_devops_domain', with: 'domain.com'
uncheck 'Keep the latest artifacts for all jobs in the latest successful pipelines'
+ uncheck 'Enable pipeline suggestion banner'
click_button 'Save changes'
end
expect(current_settings.auto_devops_enabled?).to be true
expect(current_settings.auto_devops_domain).to eq('domain.com')
expect(current_settings.keep_latest_artifact).to be false
+ expect(current_settings.suggest_pipeline_enabled).to be false
expect(page).to have_content "Application settings saved successfully"
end
@@ -450,14 +452,14 @@ RSpec.describe 'Admin updates settings' do
visit reporting_admin_application_settings_path
page.within('.as-spam') do
- fill_in 'reCAPTCHA Site Key', with: 'key'
- fill_in 'reCAPTCHA Private Key', with: 'key'
+ fill_in 'reCAPTCHA site key', with: 'key'
+ fill_in 'reCAPTCHA private key', with: 'key'
check 'Enable reCAPTCHA'
check 'Enable reCAPTCHA for login'
- fill_in 'IPs per user', with: 15
+ fill_in 'IP addresses per user', with: 15
check 'Enable Spam Check via external API endpoint'
fill_in 'URL of the external Spam Check endpoint', with: 'grpc://www.example.com/spamcheck'
- fill_in 'Spam Check API Key', with: 'SPAM_CHECK_API_KEY'
+ fill_in 'Spam Check API key', with: 'SPAM_CHECK_API_KEY'
click_button 'Save changes'
end
@@ -602,18 +604,54 @@ RSpec.describe 'Admin updates settings' do
expect(current_settings.issues_create_limit).to eq(0)
end
- it 'changes Files API rate limits settings' do
- visit network_admin_application_settings_path
+ shared_examples 'regular throttle rate limit settings' do
+ it 'changes rate limit settings' do
+ visit network_admin_application_settings_path
- page.within('[data-testid="files-limits-settings"]') do
- check 'Enable unauthenticated API request rate limit'
- fill_in 'Max unauthenticated API requests per period per IP', with: 10
- click_button 'Save changes'
+ page.within(".#{selector}") do
+ check 'Enable unauthenticated API request rate limit'
+ fill_in 'Maximum unauthenticated API requests per rate limit period per IP', with: 12
+ fill_in 'Unauthenticated API rate limit period in seconds', with: 34
+
+ check 'Enable authenticated API request rate limit'
+ fill_in 'Maximum authenticated API requests per rate limit period per user', with: 56
+ fill_in 'Authenticated API rate limit period in seconds', with: 78
+
+ click_button 'Save changes'
+ end
+
+ expect(page).to have_content "Application settings saved successfully"
+
+ expect(current_settings).to have_attributes(
+ "throttle_unauthenticated_#{fragment}_enabled" => true,
+ "throttle_unauthenticated_#{fragment}_requests_per_period" => 12,
+ "throttle_unauthenticated_#{fragment}_period_in_seconds" => 34,
+ "throttle_authenticated_#{fragment}_enabled" => true,
+ "throttle_authenticated_#{fragment}_requests_per_period" => 56,
+ "throttle_authenticated_#{fragment}_period_in_seconds" => 78
+ )
end
+ end
- expect(page).to have_content "Application settings saved successfully"
- expect(current_settings.throttle_unauthenticated_files_api_enabled).to be true
- expect(current_settings.throttle_unauthenticated_files_api_requests_per_period).to eq(10)
+ context 'Package Registry API rate limits' do
+ let(:selector) { 'as-packages-limits' }
+ let(:fragment) { :packages_api }
+
+ include_examples 'regular throttle rate limit settings'
+ end
+
+ context 'Files API rate limits' do
+ let(:selector) { 'as-files-limits' }
+ let(:fragment) { :files_api }
+
+ include_examples 'regular throttle rate limit settings'
+ end
+
+ context 'Deprecated API rate limits' do
+ let(:selector) { 'as-deprecated-limits' }
+ let(:fragment) { :deprecated_api }
+
+ include_examples 'regular throttle rate limit settings'
end
end
@@ -623,8 +661,6 @@ RSpec.describe 'Admin updates settings' do
end
it 'change Help page' do
- stub_feature_flags(help_page_documentation_redirect: true)
-
new_support_url = 'http://example.com/help'
new_documentation_url = 'https://docs.gitlab.com'
diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb
index e055e8092d4..f88d31bda88 100644
--- a/spec/features/boards/new_issue_spec.rb
+++ b/spec/features/boards/new_issue_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe 'Issue Boards new issue', :js do
end
end
- it 'creates new issue' do
+ it 'creates new issue and opens sidebar' do
page.within(first('.board')) do
click_button 'New issue'
end
@@ -68,7 +68,7 @@ RSpec.describe 'Issue Boards new issue', :js do
wait_for_requests
- page.within(first('.board .issue-count-badge-count')) do
+ page.within(first('.board [data-testid="issue-count-badge"]')) do
expect(page).to have_content('1')
end
@@ -78,20 +78,6 @@ RSpec.describe 'Issue Boards new issue', :js do
expect(page).to have_content(issue.to_reference)
expect(page).to have_link(issue.title, href: /#{issue_path(issue)}/)
end
- end
-
- # TODO https://gitlab.com/gitlab-org/gitlab/-/issues/323446
- xit 'shows sidebar when creating new issue' do
- page.within(first('.board')) do
- click_button 'New issue'
- end
-
- page.within(first('.board-new-issue-form')) do
- find('.form-control').set('bug')
- click_button 'Create issue'
- end
-
- wait_for_requests
expect(page).to have_selector('[data-testid="issue-boards-sidebar"]')
end
@@ -108,10 +94,6 @@ RSpec.describe 'Issue Boards new issue', :js do
wait_for_requests
- page.within(first('.board')) do
- find('.board-card').click
- end
-
page.within('[data-testid="sidebar-labels"]') do
click_button 'Edit'
diff --git a/spec/features/boards/reload_boards_on_browser_back_spec.rb b/spec/features/boards/reload_boards_on_browser_back_spec.rb
index 36682036d48..6a09e3c9506 100644
--- a/spec/features/boards/reload_boards_on_browser_back_spec.rb
+++ b/spec/features/boards/reload_boards_on_browser_back_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Ensure Boards do not show stale data on browser back', :js do
visit project_board_path(project, board)
wait_for_requests
- page.within(first('.board .issue-count-badge-count')) do
+ page.within(first('.board [data-testid="issue-count-badge"]')) do
expect(page).to have_content('0')
end
end
@@ -35,7 +35,7 @@ RSpec.describe 'Ensure Boards do not show stale data on browser back', :js do
page.go_back
wait_for_requests
- page.within(first('.board .issue-count-badge-count')) do
+ page.within(first('.board [data-testid="issue-count-badge"]')) do
expect(page).to have_content('1')
end
diff --git a/spec/features/boards/sidebar_labels_spec.rb b/spec/features/boards/sidebar_labels_spec.rb
index fa16f47f69a..511233b50c0 100644
--- a/spec/features/boards/sidebar_labels_spec.rb
+++ b/spec/features/boards/sidebar_labels_spec.rb
@@ -29,12 +29,11 @@ RSpec.describe 'Project issue boards sidebar labels', :js do
end
context 'labels' do
- # https://gitlab.com/gitlab-org/gitlab/-/issues/322725
- xit 'shows current labels when editing' do
+ it 'shows current labels when editing' do
click_card(card)
page.within('.labels') do
- click_link 'Edit'
+ click_button 'Edit'
wait_for_requests
@@ -54,9 +53,9 @@ RSpec.describe 'Project issue boards sidebar labels', :js do
wait_for_requests
- click_link bug.title
+ click_on bug.title
- find('[data-testid="close-icon"]').click
+ click_button 'Close'
wait_for_requests
@@ -79,11 +78,11 @@ RSpec.describe 'Project issue boards sidebar labels', :js do
wait_for_requests
- click_link bug.title
+ click_on bug.title
- click_link regression.title
+ click_on regression.title
- find('[data-testid="close-icon"]').click
+ click_button 'Close'
wait_for_requests
@@ -108,9 +107,9 @@ RSpec.describe 'Project issue boards sidebar labels', :js do
wait_for_requests
- click_link stretch.title
+ click_button stretch.title
- find('[data-testid="close-icon"]').click
+ click_button 'Close'
wait_for_requests
@@ -125,43 +124,22 @@ RSpec.describe 'Project issue boards sidebar labels', :js do
expect(card).not_to have_content(stretch.title)
end
- # https://gitlab.com/gitlab-org/gitlab/-/issues/324290
- xit 'creates project label' do
+ it 'creates project label' do
click_card(card)
page.within('.labels') do
- click_link 'Edit'
+ click_button 'Edit'
wait_for_requests
- click_link 'Create project label'
- fill_in 'new_label_name', with: 'test label'
+ click_on 'Create project label'
+ fill_in 'Name new label', with: 'test label'
first('.suggest-colors-dropdown a').click
click_button 'Create'
wait_for_requests
- expect(page).to have_link 'test label'
+ expect(page).to have_button 'test label'
end
expect(page).to have_selector('.board', count: 3)
end
-
- # https://gitlab.com/gitlab-org/gitlab/-/issues/324290
- xit 'creates project label and list' do
- click_card(card)
-
- page.within('.labels') do
- click_link 'Edit'
- wait_for_requests
-
- click_link 'Create project label'
- fill_in 'new_label_name', with: 'test label'
- first('.suggest-colors-dropdown a').click
- first('.js-add-list').click
- click_button 'Create'
- wait_for_requests
-
- expect(page).to have_link 'test label'
- end
- expect(page).to have_selector('.board', count: 4)
- end
end
end
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index cba8aaef1ef..06e3e00db7d 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'Clusterable > Show page' do
it 'does not show the environments tab' do
visit cluster_path
- expect(page).not_to have_selector('.js-cluster-nav-environments', text: 'Environments')
+ expect(page).not_to have_selector('[data-testid="cluster-environments-tab"]')
end
end
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index bec474f6cfe..34a55118cb3 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -7,10 +7,13 @@ RSpec.describe 'Value Stream Analytics', :js do
let_it_be(:guest) { create(:user) }
let_it_be(:stage_table_selector) { '[data-testid="vsa-stage-table"]' }
let_it_be(:stage_table_event_selector) { '[data-testid="vsa-stage-event"]' }
+ let_it_be(:stage_table_event_title_selector) { '[data-testid="vsa-stage-event-title"]' }
+ let_it_be(:stage_table_pagination_selector) { '[data-testid="vsa-stage-pagination"]' }
+ let_it_be(:stage_table_duration_column_header_selector) { '[data-testid="vsa-stage-header-duration"]' }
let_it_be(:metrics_selector) { "[data-testid='vsa-time-metrics']" }
let_it_be(:metric_value_selector) { "[data-testid='displayValue']" }
- let(:stage_table) { page.find(stage_table_selector) }
+ let(:stage_table) { find(stage_table_selector) }
let(:project) { create(:project, :repository) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
let(:milestone) { create(:milestone, project: project) }
@@ -53,6 +56,7 @@ RSpec.describe 'Value Stream Analytics', :js do
# So setting the date range to be the last 2 days should skip past the existing data
from = 2.days.ago.strftime("%Y-%m-%d")
to = 1.day.ago.strftime("%Y-%m-%d")
+ max_items_per_page = 20
around do |example|
travel_to(5.days.ago) { example.run }
@@ -60,9 +64,8 @@ RSpec.describe 'Value Stream Analytics', :js do
before do
project.add_maintainer(user)
- create_list(:issue, 2, project: project, created_at: 2.weeks.ago, milestone: milestone)
-
create_cycle(user, project, issue, mr, milestone, pipeline)
+ create_list(:issue, max_items_per_page, project: project, created_at: 2.weeks.ago, milestone: milestone)
deploy_master(user, project)
issue.metrics.update!(first_mentioned_in_commit_at: issue.metrics.first_associated_with_milestone_at + 1.hour)
@@ -81,6 +84,8 @@ RSpec.describe 'Value Stream Analytics', :js do
wait_for_requests
end
+ let(:stage_table_events) { stage_table.all(stage_table_event_selector) }
+
it 'displays metrics' do
metrics_tiles = page.find(metrics_selector)
@@ -112,20 +117,62 @@ RSpec.describe 'Value Stream Analytics', :js do
end
it 'can filter the issues by date' do
- expect(stage_table.all(stage_table_event_selector).length).to eq(3)
+ expect(page).to have_selector(stage_table_event_selector)
set_daterange(from, to)
- expect(stage_table.all(stage_table_event_selector).length).to eq(0)
+ expect(page).not_to have_selector(stage_table_event_selector)
+ expect(page).not_to have_selector(stage_table_pagination_selector)
end
it 'can filter the metrics by date' do
- expect(metrics_values).to eq(["3.0", "2.0", "1.0", "0.0"])
+ expect(metrics_values).to match_array(["21.0", "2.0", "1.0", "0.0"])
set_daterange(from, to)
expect(metrics_values).to eq(['-'] * 4)
end
+
+ it 'can sort records' do
+ # NOTE: checking that the string changes should suffice
+ # depending on the order the tests are run we might run into problems with hard coded strings
+ original_first_title = first_stage_title
+ stage_time_column.click
+
+ expect_to_be_sorted "descending"
+ expect(first_stage_title).not_to have_text(original_first_title, exact: true)
+
+ stage_time_column.click
+
+ expect_to_be_sorted "ascending"
+ expect(first_stage_title).to have_text(original_first_title, exact: true)
+ end
+
+ it 'paginates the results' do
+ original_first_title = first_stage_title
+
+ expect(page).to have_selector(stage_table_pagination_selector)
+
+ go_to_next_page
+
+ expect(page).not_to have_text(original_first_title, exact: true)
+ end
+
+ def stage_time_column
+ stage_table.find(stage_table_duration_column_header_selector).ancestor("th")
+ end
+
+ def first_stage_title
+ stage_table.all(stage_table_event_title_selector).first.text
+ end
+
+ def expect_to_be_sorted(direction)
+ expect(stage_time_column['aria-sort']).to eq(direction)
+ end
+
+ def go_to_next_page
+ page.find(stage_table_pagination_selector).find_link("Next").click
+ end
end
end
diff --git a/spec/features/dashboard/activity_spec.rb b/spec/features/dashboard/activity_spec.rb
index e75e661b513..7390edc3c47 100644
--- a/spec/features/dashboard/activity_spec.rb
+++ b/spec/features/dashboard/activity_spec.rb
@@ -13,19 +13,19 @@ RSpec.describe 'Dashboard > Activity' do
it 'shows Your Projects' do
visit activity_dashboard_path
- expect(find('.top-area .nav-tabs li.active')).to have_content('Your projects')
+ expect(find('[data-testid="dashboard-activity-tabs"] a.active')).to have_content('Your projects')
end
it 'shows Starred Projects' do
visit activity_dashboard_path(filter: 'starred')
- expect(find('.top-area .nav-tabs li.active')).to have_content('Starred projects')
+ expect(find('[data-testid="dashboard-activity-tabs"] a.active')).to have_content('Starred projects')
end
it 'shows Followed Projects' do
visit activity_dashboard_path(filter: 'followed')
- expect(find('.top-area .nav-tabs li.active')).to have_content('Followed users')
+ expect(find('[data-testid="dashboard-activity-tabs"] a.active')).to have_content('Followed users')
end
end
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index d4c6b6faa79..8e938fef155 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
end
def expect_counters(issuable_type, count)
- dashboard_count = find('.nav-links li.active')
+ dashboard_count = find('.gl-tabs-nav li a.active')
nav_count = find(".dashboard-shortcuts-#{issuable_type}")
header_count = find(".header-content .#{issuable_type.tr('_', '-')}-count")
diff --git a/spec/features/groups/board_spec.rb b/spec/features/groups/board_spec.rb
index afe36dabcb5..aece6d790b5 100644
--- a/spec/features/groups/board_spec.rb
+++ b/spec/features/groups/board_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Group Boards' do
it 'adds an issue to the backlog' do
page.within(find('.board', match: :first)) do
issue_title = 'New Issue'
- find(:css, '.issue-count-badge-add-button').click
+ click_button 'New issue'
wait_for_requests
diff --git a/spec/features/groups/container_registry_spec.rb b/spec/features/groups/container_registry_spec.rb
index 65374263f45..098559dc3f8 100644
--- a/spec/features/groups/container_registry_spec.rb
+++ b/spec/features/groups/container_registry_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'Container Registry', :js do
sign_in(user)
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
+ stub_container_registry_info
end
it 'has a page title set' do
@@ -57,6 +58,16 @@ RSpec.describe 'Container Registry', :js do
expect(page).to have_content 'latest'
end
+ [ContainerRegistry::Path::InvalidRegistryPathError, Faraday::Error].each do |error_class|
+ context "when there is a #{error_class}" do
+ before do
+ expect(::ContainerRegistry::Client).to receive(:registry_info).and_raise(error_class, nil, nil)
+ end
+
+ it_behaves_like 'handling feature network errors with the container registry'
+ end
+ end
+
describe 'image repo details' do
before do
visit_container_registry_details 'my/image'
@@ -81,6 +92,7 @@ RSpec.describe 'Container Registry', :js do
expect(service).to receive(:execute).with(container_repository) { { status: :success } }
expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['latest']) { service }
+ first('[data-testid="additional-actions"]').click
first('[data-testid="single-delete-button"]').click
expect(find('.modal .modal-title')).to have_content _('Remove tag')
find('.modal .modal-footer .btn-danger').click
diff --git a/spec/features/groups/dependency_proxy_for_containers_spec.rb b/spec/features/groups/dependency_proxy_for_containers_spec.rb
new file mode 100644
index 00000000000..a4cd6d0f503
--- /dev/null
+++ b/spec/features/groups/dependency_proxy_for_containers_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Group Dependency Proxy for containers', :js do
+ include DependencyProxyHelpers
+
+ include_context 'file upload requests helpers'
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:sha) { 'a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4' }
+ let_it_be(:content) { fixture_file_upload("spec/fixtures/dependency_proxy/#{sha}.gz").read }
+
+ let(:image) { 'alpine' }
+ let(:url) { capybara_url("/v2/#{group.full_path}/dependency_proxy/containers/#{image}/blobs/sha256:#{sha}") }
+ let(:token) { 'token' }
+ let(:headers) { { 'Authorization' => "Bearer #{build_jwt(user).encoded}" } }
+
+ subject do
+ HTTParty.get(url, headers: headers)
+ end
+
+ def run_server(handler)
+ default_server = Capybara.server
+
+ Capybara.server = Capybara.servers[:puma]
+ server = Capybara::Server.new(handler)
+ server.boot
+ server
+ ensure
+ Capybara.server = default_server
+ end
+
+ let_it_be(:external_server) do
+ handler = lambda do |env|
+ if env['REQUEST_PATH'] == '/token'
+ [200, {}, [{ token: 'token' }.to_json]]
+ else
+ [200, {}, [content]]
+ end
+ end
+
+ run_server(handler)
+ end
+
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+ stub_config(dependency_proxy: { enabled: true })
+ group.add_developer(user)
+
+ stub_const("DependencyProxy::Registry::AUTH_URL", external_server.base_url)
+ stub_const("DependencyProxy::Registry::LIBRARY_URL", external_server.base_url)
+ end
+
+ shared_examples 'responds with the file' do
+ it 'sends file' do
+ expect(subject.code).to eq(200)
+ expect(subject.body).to eq(content)
+ expect(subject.headers.to_h).to include(
+ "content-type" => ["application/gzip"],
+ "content-disposition" => ["attachment; filename=\"#{sha}.gz\"; filename*=UTF-8''#{sha}.gz"],
+ "content-length" => ["32"]
+ )
+ end
+ end
+
+ shared_examples 'caches the file' do
+ it 'caches the file' do
+ expect { subject }.to change {
+ group.dependency_proxy_blobs.count
+ }.from(0).to(1)
+
+ expect(subject.code).to eq(200)
+ expect(group.dependency_proxy_blobs.first.file.read).to eq(content)
+ end
+ end
+
+ context 'fetching a blob' do
+ context 'when the blob is cached for the group' do
+ let!(:dependency_proxy_blob) { create(:dependency_proxy_blob, group: group) }
+
+ it_behaves_like 'responds with the file'
+
+ context 'dependency_proxy_workhorse feature flag disabled' do
+ before do
+ stub_feature_flags({ dependency_proxy_workhorse: false })
+ end
+
+ it_behaves_like 'responds with the file'
+ end
+ end
+ end
+
+ context 'when the blob must be downloaded' do
+ it_behaves_like 'responds with the file'
+ it_behaves_like 'caches the file'
+
+ context 'dependency_proxy_workhorse feature flag disabled' do
+ before do
+ stub_feature_flags({ dependency_proxy_workhorse: false })
+ end
+
+ it_behaves_like 'responds with the file'
+ it_behaves_like 'caches the file'
+ end
+ end
+end
diff --git a/spec/features/groups/dependency_proxy_spec.rb b/spec/features/groups/dependency_proxy_spec.rb
index 51371ddc532..d6b0bdc8ea4 100644
--- a/spec/features/groups/dependency_proxy_spec.rb
+++ b/spec/features/groups/dependency_proxy_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
RSpec.describe 'Group Dependency Proxy' do
- let(:developer) { create(:user) }
+ let(:owner) { create(:user) }
let(:reporter) { create(:user) }
let(:group) { create(:group) }
let(:path) { group_dependency_proxy_path(group) }
+ let(:settings_path) { group_settings_packages_and_registries_path(group) }
before do
- group.add_developer(developer)
+ group.add_owner(owner)
group.add_reporter(reporter)
enable_feature
@@ -22,42 +23,46 @@ RSpec.describe 'Group Dependency Proxy' do
visit path
- expect(page).not_to have_css('.js-dependency-proxy-toggle-area')
- expect(page).not_to have_css('.js-dependency-proxy-url')
+ expect(page).not_to have_css('[data-testid="proxy-url"]')
end
end
context 'feature is available', :js do
- context 'when logged in as group developer' do
+ context 'when logged in as group owner' do
before do
- sign_in(developer)
- visit path
+ sign_in(owner)
end
it 'sidebar menu is open' do
+ visit path
+
sidebar = find('.nav-sidebar')
expect(sidebar).to have_link _('Dependency Proxy')
end
it 'toggles defaults to enabled' do
- page.within('.js-dependency-proxy-toggle-area') do
- expect(find('.js-project-feature-toggle-input', visible: false).value).to eq('true')
- end
+ visit path
+
+ expect(page).to have_css('[data-testid="proxy-url"]')
end
it 'shows the proxy URL' do
- page.within('.edit_dependency_proxy_group_setting') do
- expect(find('.js-dependency-proxy-url').value).to have_content('/dependency_proxy/containers')
- end
+ visit path
+
+ expect(find('input[data-testid="proxy-url"]').value).to have_content('/dependency_proxy/containers')
end
it 'hides the proxy URL when feature is disabled' do
- page.within('.edit_dependency_proxy_group_setting') do
- find('.js-project-feature-toggle').click
- end
+ visit settings_path
+ wait_for_requests
+
+ click_button 'Enable Proxy'
+
+ expect(page).to have_button 'Enable Proxy', class: '!is-checked'
+
+ visit path
- expect(page).not_to have_css('.js-dependency-proxy-url')
- expect(find('.js-project-feature-toggle-input', visible: false).value).to eq('false')
+ expect(page).not_to have_css('input[data-testid="proxy-url"]')
end
end
@@ -68,18 +73,17 @@ RSpec.describe 'Group Dependency Proxy' do
end
it 'does not show the feature toggle but shows the proxy URL' do
- expect(page).not_to have_css('.js-dependency-proxy-toggle-area')
- expect(find('.js-dependency-proxy-url').value).to have_content('/dependency_proxy/containers')
+ expect(find('input[data-testid="proxy-url"]').value).to have_content('/dependency_proxy/containers')
end
end
end
context 'feature is not avaible' do
before do
- sign_in(developer)
+ sign_in(owner)
end
- context 'feature flag is disabled' do
+ context 'feature flag is disabled', :js do
before do
stub_feature_flags(dependency_proxy_for_private_groups: false)
end
@@ -90,7 +94,7 @@ RSpec.describe 'Group Dependency Proxy' do
it 'informs user that feature is only available for public groups' do
visit path
- expect(page).to have_content('Dependency proxy feature is limited to public groups for now.')
+ expect(page).to have_content('Dependency Proxy feature is limited to public groups for now.')
end
end
end
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index cf893e444c4..552b599a3f3 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -19,34 +19,12 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
end
context 'when the user provides valid credentials' do
+ source_url = 'https://gitlab.com'
+
+ include_context 'bulk imports requests context', source_url
+
it 'successfully connects to remote instance' do
- source_url = 'https://gitlab.com'
pat = 'demo-pat'
- stub_path = 'stub-group'
- total = 37
-
- stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=20&top_level_only=true&min_access_level=50&search=" % { url: source_url }).to_return(
- body: [{
- id: 2595438,
- web_url: 'https://gitlab.com/groups/auto-breakfast',
- name: 'Stub',
- path: stub_path,
- full_name: 'Stub',
- full_path: stub_path
- }].to_json,
- headers: {
- 'Content-Type' => 'application/json',
- 'X-Next-Page' => 2,
- 'X-Page' => 1,
- 'X-Per-Page' => 20,
- 'X-Total' => total,
- 'X-Total-Pages' => 2
- }
- )
-
- allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:validate_instance_version!).and_return(true)
- end
expect(page).to have_content 'Import groups from another instance of GitLab'
expect(page).to have_content 'Not all related objects are migrated'
@@ -56,8 +34,8 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
click_on 'Connect instance'
- expect(page).to have_content 'Showing 1-1 of %{total} groups from %{url}' % { url: source_url, total: total }
- expect(page).to have_content stub_path
+ expect(page).to have_content 'Showing 1-1 of 42 groups from %{url}' % { url: source_url }
+ expect(page).to have_content 'stub-group'
visit '/'
diff --git a/spec/features/groups/import_export/migration_history_spec.rb b/spec/features/groups/import_export/migration_history_spec.rb
new file mode 100644
index 00000000000..243bdcc13a9
--- /dev/null
+++ b/spec/features/groups/import_export/migration_history_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Import/Export - GitLab migration history', :js do
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:user_import_1) { create(:bulk_import, user: user) }
+ let_it_be(:finished_entity_1) { create(:bulk_import_entity, :finished, bulk_import: user_import_1) }
+
+ let_it_be(:user_import_2) { create(:bulk_import, user: user) }
+ let_it_be(:failed_entity_2) { create(:bulk_import_entity, :failed, bulk_import: user_import_2) }
+
+ before do
+ gitlab_sign_in(user)
+
+ visit new_group_path
+
+ click_link 'Import group'
+ end
+
+ it 'successfully displays import history' do
+ click_link 'History'
+
+ wait_for_requests
+
+ expect(page).to have_content 'Group import history'
+ expect(page.find('tbody')).to have_css('tr', count: 2)
+ end
+end
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index 2dfcd941b4f..d822a5ea871 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -63,6 +63,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
context 'when group link exists' do
let_it_be(:shared_with_group) { create(:group) }
let_it_be(:shared_group) { create(:group) }
+ let_it_be(:expiration_date) { 5.days.from_now.to_date }
let(:additional_link_attrs) { {} }
@@ -115,29 +116,29 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
click_groups_tab
page.within first_row do
- fill_in 'Expiration date', with: 5.days.from_now.to_date
+ fill_in 'Expiration date', with: expiration_date
find_field('Expiration date').native.send_keys :enter
wait_for_requests
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: expiration_date)
end
end
context 'when expiry date is set' do
- let(:additional_link_attrs) { { expires_at: 5.days.from_now.to_date } }
+ let(:additional_link_attrs) { { expires_at: expiration_date } }
it 'clears expiry date' do
click_groups_tab
page.within first_row do
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: expiration_date)
find('[data-testid="clear-button"]').click
wait_for_requests
- expect(page).to have_content('No expiration set')
+ expect(page).to have_field('Expiration date', with: '')
end
end
end
diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
index ddf3c6d8f9b..86185b8dd32 100644
--- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
let_it_be(:user1) { create(:user, name: 'John Doe') }
let_it_be(:group) { create(:group) }
+ let_it_be(:expiration_date) { 5.days.from_now.to_date }
let(:new_member) { create(:user, name: 'Mary Jane') }
@@ -19,10 +20,10 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
it 'expiration date is displayed in the members list' do
visit group_group_members_path(group)
- invite_member(new_member.name, role: 'Guest', expires_at: 5.days.from_now.to_date)
+ invite_member(new_member.name, role: 'Guest', expires_at: expiration_date)
page.within second_row do
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: expiration_date)
end
end
@@ -31,27 +32,27 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
visit group_group_members_path(group)
page.within second_row do
- fill_in 'Expiration date', with: 5.days.from_now.to_date
+ fill_in 'Expiration date', with: expiration_date
find_field('Expiration date').native.send_keys :enter
wait_for_requests
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: expiration_date)
end
end
it 'clears expiration date' do
- create(:group_member, :developer, user: new_member, group: group, expires_at: 5.days.from_now.to_date)
+ create(:group_member, :developer, user: new_member, group: group, expires_at: expiration_date)
visit group_group_members_path(group)
page.within second_row do
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: expiration_date)
find('[data-testid="clear-button"]').click
wait_for_requests
- expect(page).to have_content('No expiration set')
+ expect(page).to have_field('Expiration date', with: '')
end
end
end
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index c51ee250331..4edf27e8fa4 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -98,9 +98,11 @@ RSpec.describe 'Group milestones' do
end
it 'counts milestones correctly' do
- expect(find('.top-area .active .badge').text).to eq("3")
- expect(find('.top-area .closed .badge').text).to eq("3")
- expect(find('.top-area .all .badge').text).to eq("6")
+ page.within '[data-testid="milestones-filter"]' do
+ expect(page).to have_content('Open 3')
+ expect(page).to have_content('Closed 3')
+ expect(page).to have_content('All 6')
+ end
end
it 'lists group and project milestones' do
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
index 3c2ade6b274..0dfc7180187 100644
--- a/spec/features/groups/packages_spec.rb
+++ b/spec/features/groups/packages_spec.rb
@@ -28,6 +28,10 @@ RSpec.describe 'Group Packages' do
context 'when feature is available', :js do
before do
+ # we are simply setting the featrure flag to false because the new UI has nothing to test yet
+ # when the refactor is complete or almost complete we will turn on the feature tests
+ # see https://gitlab.com/gitlab-org/gitlab/-/issues/330846 for status of this work
+ stub_feature_flags(package_list_apollo: false)
visit_group_packages
end
diff --git a/spec/features/issues/related_issues_spec.rb b/spec/features/issues/related_issues_spec.rb
index 837859bbe26..a95229d4f1b 100644
--- a/spec/features/issues/related_issues_spec.rb
+++ b/spec/features/issues/related_issues_spec.rb
@@ -41,13 +41,13 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).not_to have_selector('.js-issue-count-badge-add-button')
+ expect(page).not_to have_button 'Add a related issue'
end
end
context 'when logged in but not a member' do
before do
- gitlab_sign_in(user)
+ sign_in(user)
end
it 'shows widget when internal project' do
@@ -57,7 +57,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).not_to have_selector('.js-issue-count-badge-add-button')
+ expect(page).not_to have_button 'Add a related issue'
end
it 'does not show widget when private project' do
@@ -76,7 +76,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).not_to have_selector('.js-issue-count-badge-add-button')
+ expect(page).not_to have_button 'Add a related issue'
end
it 'shows widget on their own public issue' do
@@ -86,13 +86,13 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).not_to have_selector('.js-issue-count-badge-add-button')
+ expect(page).not_to have_button 'Add a related issue'
end
end
context 'when logged in and a guest' do
before do
- gitlab_sign_in(user)
+ sign_in(user)
end
it 'shows widget when internal project' do
@@ -103,7 +103,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).not_to have_selector('.js-issue-count-badge-add-button')
+ expect(page).not_to have_button 'Add a related issue'
end
it 'shows widget when private project' do
@@ -114,7 +114,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).not_to have_selector('.js-issue-count-badge-add-button')
+ expect(page).not_to have_button 'Add a related issue'
end
it 'shows widget when public project' do
@@ -125,13 +125,13 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).not_to have_selector('.js-issue-count-badge-add-button')
+ expect(page).not_to have_button 'Add a related issue'
end
end
context 'when logged in and a reporter' do
before do
- gitlab_sign_in(user)
+ sign_in(user)
end
it 'shows widget when internal project' do
@@ -142,7 +142,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).to have_selector('.js-issue-count-badge-add-button')
+ expect(page).to have_button 'Add a related issue'
end
it 'shows widget when private project' do
@@ -153,7 +153,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).to have_selector('.js-issue-count-badge-add-button')
+ expect(page).to have_button 'Add a related issue'
end
it 'shows widget when public project' do
@@ -164,7 +164,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).to have_selector('.js-issue-count-badge-add-button')
+ expect(page).to have_button 'Add a related issue'
end
it 'shows widget on their own public issue' do
@@ -175,7 +175,7 @@ RSpec.describe 'Related issues', :js do
visit project_issue_path(project, issue)
expect(page).to have_css('.related-issues-block')
- expect(page).to have_selector('.js-issue-count-badge-add-button')
+ expect(page).to have_button 'Add a related issue'
end
end
end
@@ -186,7 +186,7 @@ RSpec.describe 'Related issues', :js do
before do
project.add_guest(user)
- gitlab_sign_in(user)
+ sign_in(user)
end
context 'visiting some issue someone else created' do
@@ -216,7 +216,7 @@ RSpec.describe 'Related issues', :js do
before do
project.add_maintainer(user)
project_b.add_maintainer(user)
- gitlab_sign_in(user)
+ sign_in(user)
end
context 'without existing related issues' do
@@ -230,9 +230,9 @@ RSpec.describe 'Related issues', :js do
end
it 'add related issue' do
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set "#{issue_b.to_reference(project)} "
- find('.js-add-issuable-form-add-button').click
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: "#{issue_b.to_reference(project)} "
+ click_button 'Add'
wait_for_requests
@@ -247,9 +247,9 @@ RSpec.describe 'Related issues', :js do
end
it 'add cross-project related issue' do
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set "#{issue_project_b_a.to_reference(project)} "
- find('.js-add-issuable-form-add-button').click
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: "#{issue_project_b_a.to_reference(project)} "
+ click_button 'Add'
wait_for_requests
@@ -261,9 +261,9 @@ RSpec.describe 'Related issues', :js do
end
it 'pressing enter should submit the form' do
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set "#{issue_project_b_a.to_reference(project)} "
- find('.js-add-issuable-form-input').native.send_key(:enter)
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: "#{issue_project_b_a.to_reference(project)} "
+ find_field('Paste issue link').native.send_key(:enter)
wait_for_requests
@@ -275,10 +275,10 @@ RSpec.describe 'Related issues', :js do
end
it 'disallows duplicate entries' do
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set 'duplicate duplicate duplicate'
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: 'duplicate duplicate duplicate'
- items = all('.js-add-issuable-form-token-list-item')
+ items = all('.issue-token')
expect(items.count).to eq(1)
expect(items[0].text).to eq('duplicate')
@@ -288,29 +288,35 @@ RSpec.describe 'Related issues', :js do
it 'allows us to remove pending issues' do
# Tests against https://gitlab.com/gitlab-org/gitlab/issues/11625
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set 'issue1 issue2 issue3 '
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: 'issue1 issue2 issue3 '
- items = all('.js-add-issuable-form-token-list-item')
+ items = all('.issue-token')
expect(items.count).to eq(3)
expect(items[0].text).to eq('issue1')
expect(items[1].text).to eq('issue2')
expect(items[2].text).to eq('issue3')
# Remove pending issues left to right to make sure none get stuck
- items[0].find('.js-issue-token-remove-button').click
- items = all('.js-add-issuable-form-token-list-item')
+ within items[0] do
+ click_button 'Remove'
+ end
+ items = all('.issue-token')
expect(items.count).to eq(2)
expect(items[0].text).to eq('issue2')
expect(items[1].text).to eq('issue3')
- items[0].find('.js-issue-token-remove-button').click
- items = all('.js-add-issuable-form-token-list-item')
+ within items[0] do
+ click_button 'Remove'
+ end
+ items = all('.issue-token')
expect(items.count).to eq(1)
expect(items[0].text).to eq('issue3')
- items[0].find('.js-issue-token-remove-button').click
- items = all('.js-add-issuable-form-token-list-item')
+ within items[0] do
+ click_button 'Remove'
+ end
+ items = all('.issue-token')
expect(items.count).to eq(0)
end
end
@@ -351,9 +357,9 @@ RSpec.describe 'Related issues', :js do
end
it 'add related issue' do
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set "##{issue_d.iid} "
- find('.js-add-issuable-form-add-button').click
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: "##{issue_d.iid} "
+ click_button 'Add'
wait_for_requests
@@ -367,9 +373,9 @@ RSpec.describe 'Related issues', :js do
end
it 'add invalid related issue' do
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set "#9999999 "
- find('.js-add-issuable-form-add-button').click
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: '#9999999 '
+ click_button 'Add'
wait_for_requests
@@ -382,9 +388,9 @@ RSpec.describe 'Related issues', :js do
end
it 'add unauthorized related issue' do
- find('.js-issue-count-badge-add-button').click
- find('.js-add-issuable-form-input').set "#{issue_project_unauthorized_a.to_reference(project)} "
- find('.js-add-issuable-form-add-button').click
+ click_button 'Add a related issue'
+ fill_in 'Paste issue link', with: "#{issue_project_unauthorized_a.to_reference(project)} "
+ click_button 'Add'
wait_for_requests
diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index c700f878df6..d3aaf339421 100644
--- a/spec/features/markdown/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
@@ -201,6 +201,15 @@ RSpec.describe 'Copy as GFM', :js do
GFM
)
+ aggregate_failures('CustomEmojiFilter') do
+ gfm = ':custom_emoji:'
+
+ html = '<img class="emoji" src="custom_emoji.svg" title=":custom_emoji:" height="20" width="20">'
+
+ output_gfm = html_to_gfm(html)
+ expect(output_gfm.strip).to eq(gfm.strip)
+ end
+
aggregate_failures('MathFilter: math as transformed from HTML to KaTeX') do
gfm = '$`c = \pm\sqrt{a^2 + b^2}`$'
diff --git a/spec/features/markdown/markdown_spec.rb b/spec/features/markdown/markdown_spec.rb
index 3208ad82c03..9eff02a8c1b 100644
--- a/spec/features/markdown/markdown_spec.rb
+++ b/spec/features/markdown/markdown_spec.rb
@@ -133,8 +133,9 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures do
expect(doc.at_css('td:contains("Baz")')['align']).to eq 'left'
end
+ # note that 2 are from the hardcoded <sup>, and 2 from footnotes
aggregate_failures 'permits superscript elements' do
- expect(doc).to have_selector('sup', count: 2)
+ expect(doc).to have_selector('sup', count: 4)
end
aggregate_failures 'permits subscript elements' do
@@ -148,6 +149,11 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures do
aggregate_failures "removes `href` from `a` elements if it's fishy" do
expect(doc).not_to have_selector('a[href*="javascript"]')
end
+
+ aggregate_failures 'permits footnotes' do
+ expect(doc).to have_selector('section.footnotes ol li p:contains("Footnote 1")')
+ expect(doc).to have_selector('section.footnotes ol li p:contains("Footnote with w")')
+ end
end
describe 'Escaping' do
diff --git a/spec/features/merge_request/user_merges_immediately_spec.rb b/spec/features/merge_request/user_merges_immediately_spec.rb
index bca6e6ceba5..3a05f35a671 100644
--- a/spec/features/merge_request/user_merges_immediately_spec.rb
+++ b/spec/features/merge_request/user_merges_immediately_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Merge requests > User merges immediately', :js do
Sidekiq::Testing.fake! do
click_button 'Merge immediately'
- expect(find('.accept-merge-request.btn-confirm')).to have_content('Merge in progress')
+ expect(find('.media-body h4')).to have_content('Merging!')
wait_for_requests
end
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index 03ab42aaccd..982e75760d7 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Merge request > User resolves conflicts', :js do
let(:user) { project.creator }
def create_merge_request(source_branch)
- create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start', source_project: project, merge_status: :unchecked) do |mr|
+ create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start', source_project: project, merge_status: :unchecked, reviewers: [user]) do |mr|
mr.mark_as_unmergeable
end
end
@@ -178,6 +178,23 @@ RSpec.describe 'Merge request > User resolves conflicts', :js do
end
end
+ context 'sidebar' do
+ let(:merge_request) { create_merge_request('conflict-resolvable') }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+
+ visit conflicts_project_merge_request_path(project, merge_request)
+ end
+
+ it 'displays reviewers' do
+ page.within '.issuable-sidebar' do
+ expect(page).to have_selector('[data-testid="reviewer"]', count: 1)
+ end
+ end
+ end
+
unresolvable_conflicts = {
'conflict-too-large' => 'when the conflicts contain a large file',
'conflict-binary-file' => 'when the conflicts contain a binary file',
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index 1e547d504ef..873cc0a89c6 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
let(:sha) { project.commit(ref).id }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project, ref: ref) }
let!(:manual) { }
+ let(:build) { create(:ci_build, :with_deployment, environment: environment.name, pipeline: pipeline) }
+ let!(:deployment) { build.deployment }
before do
merge_request.update!(merge_commit_sha: sha)
@@ -21,8 +23,9 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
end
context 'when deployment succeeded' do
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
- let!(:deployment) { create(:deployment, :succeed, environment: environment, sha: sha, ref: ref, deployable: build) }
+ before do
+ build.success!
+ end
it 'displays that the environment is deployed' do
visit project_merge_request_path(project, merge_request)
@@ -34,9 +37,8 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
context 'when a user created a new merge request with the same SHA' do
let(:pipeline2) { create(:ci_pipeline, sha: sha, project: project, ref: 'video') }
- let(:build2) { create(:ci_build, :success, pipeline: pipeline2) }
let(:environment2) { create(:environment, project: project) }
- let!(:deployment2) { create(:deployment, environment: environment2, sha: sha, ref: 'video', deployable: build2) }
+ let!(:build2) { create(:ci_build, :with_deployment, :success, environment: environment2.name, pipeline: pipeline2) }
it 'displays one environment which is related to the pipeline' do
visit project_merge_request_path(project, merge_request)
@@ -50,8 +52,9 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
end
context 'when deployment failed' do
- let(:build) { create(:ci_build, :failed, pipeline: pipeline) }
- let!(:deployment) { create(:deployment, :failed, environment: environment, sha: sha, ref: ref, deployable: build) }
+ before do
+ build.drop!
+ end
it 'displays that the deployment failed' do
visit project_merge_request_path(project, merge_request)
@@ -63,8 +66,9 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
end
context 'when deployment running' do
- let(:build) { create(:ci_build, :running, pipeline: pipeline) }
- let!(:deployment) { create(:deployment, :running, environment: environment, sha: sha, ref: ref, deployable: build) }
+ before do
+ build.run!
+ end
it 'displays that the running deployment' do
visit project_merge_request_path(project, merge_request)
@@ -76,8 +80,8 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
end
context 'when deployment will happen' do
- let(:build) { create(:ci_build, :created, pipeline: pipeline) }
- let!(:deployment) { create(:deployment, environment: environment, sha: sha, ref: ref, deployable: build) }
+ let(:build) { create(:ci_build, :with_deployment, environment: environment.name, pipeline: pipeline) }
+ let!(:deployment) { build.deployment }
it 'displays that the environment name' do
visit project_merge_request_path(project, merge_request)
@@ -89,8 +93,9 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
end
context 'when deployment was cancelled' do
- let(:build) { create(:ci_build, :canceled, pipeline: pipeline) }
- let!(:deployment) { create(:deployment, :canceled, environment: environment, sha: sha, ref: ref, deployable: build) }
+ before do
+ build.cancel!
+ end
it 'displays that the environment name' do
visit project_merge_request_path(project, merge_request)
@@ -102,11 +107,10 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
end
context 'with stop action' do
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
- let!(:deployment) { create(:deployment, :succeed, environment: environment, sha: sha, ref: ref, deployable: build) }
let(:manual) { create(:ci_build, :manual, pipeline: pipeline, name: 'close_app') }
before do
+ build.success!
deployment.update!(on_stop: manual.name)
visit project_merge_request_path(project, merge_request)
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 2f7758143a1..f74b097ab3e 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -45,18 +45,12 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
let!(:environment) { create(:environment, project: project) }
let(:sha) { project.commit(merge_request.source_branch).sha }
let(:pipeline) { create(:ci_pipeline, status: 'success', sha: sha, project: project, ref: merge_request.source_branch) }
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
-
- let!(:deployment) do
- create(:deployment, :succeed,
- environment: environment,
- ref: merge_request.source_branch,
- deployable: build,
- sha: sha)
- end
+ let!(:build) { create(:ci_build, :with_deployment, :success, environment: environment.name, pipeline: pipeline) }
+ let!(:deployment) { build.deployment }
before do
merge_request.update!(head_pipeline: pipeline)
+ deployment.update!(status: :success)
visit project_merge_request_path(project, merge_request)
end
diff --git a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
index 4bb6c3265a4..3893a9cdf28 100644
--- a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
+++ b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
@@ -6,9 +6,10 @@ RSpec.describe 'Merge request > User sees suggest pipeline', :js do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.source_project }
let(:user) { project.creator }
+ let(:suggest_pipeline_enabled) { true }
before do
- stub_application_setting(auto_devops_enabled: false)
+ stub_application_setting(suggest_pipeline_enabled: suggest_pipeline_enabled, auto_devops_enabled: false)
project.add_maintainer(user)
sign_in(user)
visit project_merge_request_path(project, merge_request)
@@ -66,4 +67,12 @@ RSpec.describe 'Merge request > User sees suggest pipeline', :js do
# nudge 4
expect(page).to have_content("That's it, well done!")
end
+
+ context 'when feature setting is disabled' do
+ let(:suggest_pipeline_enabled) { false }
+
+ it 'does not show the suggest pipeline widget' do
+ expect(page).not_to have_content('Are you adding technical debt or code vulnerabilities?')
+ end
+ end
end
diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
index 275a87ca391..d2bde320c54 100644
--- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe 'Merge request > User selects branches for new MR', :js do
click_button "Check out branch"
- expect(page).to have_content 'git checkout -b "orphaned-branch" "origin/orphaned-branch"'
+ expect(page).to have_content 'git checkout -b \'orphaned-branch\' \'origin/orphaned-branch\''
end
it 'allows filtering multiple dropdowns' do
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index dbc88d0cce2..690a292937a 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -159,7 +159,12 @@ RSpec.describe 'User comments on a diff', :js do
wait_for_requests
expect(page).to have_content('Remove from batch')
- expect(page).to have_content("Apply suggestions #{index + 1}")
+
+ if index < 1
+ expect(page).to have_content("Apply suggestion")
+ else
+ expect(page).to have_content("Apply #{index + 1} suggestions")
+ end
end
end
@@ -167,13 +172,12 @@ RSpec.describe 'User comments on a diff', :js do
click_button('Remove from batch')
wait_for_requests
- expect(page).to have_content('Apply suggestion')
expect(page).to have_content('Add suggestion to batch')
end
page.within("[id='#{files[1][:hash]}']") do
expect(page).to have_content('Remove from batch')
- expect(page).to have_content('Apply suggestions 1')
+ expect(page).to have_content('Apply suggestion')
end
end
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 893dd2c76e0..7059697354d 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe 'Profile > Password' do
shared_examples 'user enters an incorrect current password' do
subject do
page.within '.update-password' do
- fill_in 'user_current_password', with: user_current_password
+ fill_in 'user_password', with: user_current_password
fill_passwords(new_password, new_password)
end
end
@@ -131,7 +131,7 @@ RSpec.describe 'Profile > Password' do
end
context 'when current password is incorrect' do
- let(:user_current_password) {'invalid' }
+ let(:user_current_password) { 'invalid' }
it_behaves_like 'user enters an incorrect current password'
end
@@ -139,7 +139,7 @@ RSpec.describe 'Profile > Password' do
context 'when the password reset is successful' do
subject do
page.within '.update-password' do
- fill_in "user_current_password", with: user.password
+ fill_in "user_password", with: user.password
fill_passwords(new_password, new_password)
end
end
@@ -169,8 +169,8 @@ RSpec.describe 'Profile > Password' do
expect(current_path).to eq new_profile_password_path
- fill_in :user_current_password, with: user.password
- fill_in :user_password, with: '12345678'
+ fill_in :user_password, with: user.password
+ fill_in :user_new_password, with: '12345678'
fill_in :user_password_confirmation, with: '12345678'
click_button 'Set new password'
diff --git a/spec/features/profiles/two_factor_auths_spec.rb b/spec/features/profiles/two_factor_auths_spec.rb
index 7f3ce617846..3f5789e119a 100644
--- a/spec/features/profiles/two_factor_auths_spec.rb
+++ b/spec/features/profiles/two_factor_auths_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe 'Two factor auths' do
end
context 'when user authenticates with an external service' do
- let_it_be(:user) { create(:omniauth_user, password_automatically_set: true) }
+ let_it_be(:user) { create(:omniauth_user) }
it 'does not require the current password to set up two factor authentication', :js do
visit profile_two_factor_auth_path
@@ -88,7 +88,7 @@ RSpec.describe 'Two factor auths' do
end
context 'when user authenticates with an external service' do
- let_it_be(:user) { create(:omniauth_user, :two_factor, password_automatically_set: true) }
+ let_it_be(:user) { create(:omniauth_user, :two_factor) }
it 'does not require the current_password to disable two-factor authentication', :js do
visit profile_two_factor_auth_path
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index af085b63155..026da5814e3 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -19,6 +19,17 @@ RSpec.describe 'User edit profile' do
wait_for_requests if respond_to?(:wait_for_requests)
end
+ def update_user_email
+ fill_in 'user_email', with: 'new-email@example.com'
+ click_button 'Update profile settings'
+ end
+
+ def confirm_password(password)
+ fill_in 'password-confirmation', with: password
+ click_button 'Confirm password'
+ wait_for_requests if respond_to?(:wait_for_requests)
+ end
+
def visit_user
visit user_path(user)
wait_for_requests
@@ -88,16 +99,42 @@ RSpec.describe 'User edit profile' do
expect(page).to have_content('Website url is not a valid URL')
end
- describe 'when I change my email' do
+ describe 'when I change my email', :js do
before do
user.send_reset_password_instructions
end
+ it 'will prompt to confirm my password' do
+ expect(user.reset_password_token?).to be true
+
+ update_user_email
+
+ expect(page).to have_selector('[data-testid="password-prompt-modal"]')
+ end
+
+ context 'when prompted to confirm password' do
+ before do
+ update_user_email
+ end
+
+ it 'with the correct password successfully updates' do
+ confirm_password(user.password)
+
+ expect(page).to have_text("Profile was successfully updated")
+ end
+
+ it 'with the incorrect password fails to update' do
+ confirm_password("Fake password")
+
+ expect(page).to have_text("Invalid password")
+ end
+ end
+
it 'clears the reset password token' do
expect(user.reset_password_token?).to be true
- fill_in 'user_email', with: 'new-email@example.com'
- submit_settings
+ update_user_email
+ confirm_password(user.password)
user.reload
expect(user.confirmation_token).not_to be_nil
@@ -524,14 +561,11 @@ RSpec.describe 'User edit profile' do
page.find("a", text: "Nuku'alofa").click
- tz = page.find('.user-time-preferences #user_timezone', visible: false)
-
- expect(tz.value).to eq('Pacific/Tongatapu')
+ expect(page).to have_field(:user_timezone, with: 'Pacific/Tongatapu', type: :hidden)
end
- it 'timezone defaults to servers default' do
- timezone_name = Time.zone.tzinfo.name
- expect(page.find('.user-time-preferences #user_timezone', visible: false).value).to eq(timezone_name)
+ it 'timezone defaults to empty' do
+ expect(page).to have_field(:user_timezone, with: '', type: :hidden)
end
end
end
diff --git a/spec/features/projects/badges/coverage_spec.rb b/spec/features/projects/badges/coverage_spec.rb
index 1760ec880bc..5c1bc1ad239 100644
--- a/spec/features/projects/badges/coverage_spec.rb
+++ b/spec/features/projects/badges/coverage_spec.rb
@@ -12,6 +12,120 @@ RSpec.describe 'test coverage badge' do
sign_in(user)
end
+ it 'user requests coverage badge image for pipeline with custom limits - 80% good' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 80, name: 'test:1')
+ end
+
+ show_test_coverage_badge(min_good: 75, min_acceptable: 50, min_medium: 25)
+
+ expect_coverage_badge_color(:good)
+ expect_coverage_badge('80.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 74% - bad config' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 74, name: 'test:1')
+ end
+ # User sets a minimum good value that is lower than min acceptable and min medium,
+ # in which case we force the min acceptable value to be min good -1 and min medium value to be min acceptable -1
+ show_test_coverage_badge(min_good: 75, min_acceptable: 76, min_medium: 77)
+
+ expect_coverage_badge_color(:acceptable)
+ expect_coverage_badge('74.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 73% - bad config' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 73, name: 'test:1')
+ end
+ # User sets a minimum good value that is lower than min acceptable and min medium,
+ # in which case we force the min acceptable value to be min good -1 and min medium value to be min acceptable -1
+ show_test_coverage_badge(min_good: 75, min_acceptable: 76, min_medium: 77)
+
+ expect_coverage_badge_color(:medium)
+ expect_coverage_badge('73.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 72% - partial config - low' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 72, name: 'test:1')
+ end
+ # User only sets good to 75 and leaves the others on the default settings,
+ # in which case we force the min acceptable value to be min good -1 and min medium value to be min acceptable -1
+ show_test_coverage_badge(min_good: 75)
+
+ expect_coverage_badge_color(:low)
+ expect_coverage_badge('72.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 72% - partial config - medium' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 72, name: 'test:1')
+ end
+ # User only sets good to 74 and leaves the others on the default settings,
+ # in which case we force the min acceptable value to be min good -1 and min medium value to be min acceptable -1
+ show_test_coverage_badge(min_good: 74)
+
+ expect_coverage_badge_color(:medium)
+ expect_coverage_badge('72.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 72% - partial config - medium v2' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 72, name: 'test:1')
+ end
+ # User only sets medium to 72 and leaves the others on the defaults good as 95 and acceptable as 90
+ show_test_coverage_badge(min_medium: 72)
+
+ expect_coverage_badge_color(:medium)
+ expect_coverage_badge('72.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 70% acceptable' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 70, name: 'test:1')
+ end
+
+ show_test_coverage_badge(min_good: 75, min_acceptable: 50, min_medium: 25)
+
+ expect_coverage_badge_color(:acceptable)
+ expect_coverage_badge('70.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 30% medium' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 30, name: 'test:1')
+ end
+
+ show_test_coverage_badge(min_good: 75, min_acceptable: 50, min_medium: 25)
+
+ expect_coverage_badge_color(:medium)
+ expect_coverage_badge('30.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - 20% low' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 20, name: 'test:1')
+ end
+
+ show_test_coverage_badge(min_good: 75, min_acceptable: 50, min_medium: 25)
+
+ expect_coverage_badge_color(:low)
+ expect_coverage_badge('20.00%')
+ end
+
+ it 'user requests coverage badge image for pipeline with custom limits - nonsense values which use the defaults' do
+ create_pipeline do |pipeline|
+ create_build(pipeline, coverage: 92, name: 'test:1')
+ end
+
+ show_test_coverage_badge(min_good: "nonsense", min_acceptable: "rubbish", min_medium: "NaN")
+
+ expect_coverage_badge_color(:acceptable)
+ expect_coverage_badge('92.00%')
+ end
+
it 'user requests coverage badge image for pipeline' do
create_pipeline do |pipeline|
create_build(pipeline, coverage: 100, name: 'test:1')
@@ -20,6 +134,7 @@ RSpec.describe 'test coverage badge' do
show_test_coverage_badge
+ expect_coverage_badge_color(:good)
expect_coverage_badge('95.00%')
end
@@ -32,6 +147,7 @@ RSpec.describe 'test coverage badge' do
show_test_coverage_badge(job: 'coverage')
+ expect_coverage_badge_color(:medium)
expect_coverage_badge('85.00%')
end
@@ -73,8 +189,9 @@ RSpec.describe 'test coverage badge' do
create(:ci_build, :success, opts)
end
- def show_test_coverage_badge(job: nil)
- visit coverage_project_badges_path(project, ref: :master, job: job, format: :svg)
+ def show_test_coverage_badge(job: nil, min_good: nil, min_acceptable: nil, min_medium: nil)
+ visit coverage_project_badges_path(project, ref: :master, job: job, min_good: min_good,
+ min_acceptable: min_acceptable, min_medium: min_medium, format: :svg)
end
def expect_coverage_badge(coverage)
@@ -82,4 +199,12 @@ RSpec.describe 'test coverage badge' do
expect(page.response_headers['Content-Type']).to include('image/svg+xml')
expect(svg.at(%Q{text:contains("#{coverage}")})).to be_truthy
end
+
+ def expect_coverage_badge_color(color)
+ svg = Nokogiri::HTML(page.body)
+ expect(page.response_headers['Content-Type']).to include('image/svg+xml')
+ badge_color = svg.xpath("//path[starts-with(@d, 'M62')]")[0].attributes['fill'].to_s
+ expected_badge_color = Gitlab::Ci::Badge::Coverage::Template::STATUS_COLOR[color]
+ expect(badge_color).to eq(expected_badge_color)
+ end
end
diff --git a/spec/features/projects/badges/pipeline_badge_spec.rb b/spec/features/projects/badges/pipeline_badge_spec.rb
index 9d8f9872a1a..e3a01ab6fa2 100644
--- a/spec/features/projects/badges/pipeline_badge_spec.rb
+++ b/spec/features/projects/badges/pipeline_badge_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'Pipeline Badge' do
visit pipeline_project_badges_path(project, ref: ref, format: :svg)
expect(page.status_code).to eq(200)
- expect(page.response_headers['Cache-Control']).to eq('no-store')
+ expect(page.response_headers['Cache-Control']).to eq('private, no-store')
end
end
diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index 0d9ea6331a7..7f10c6afcd5 100644
--- a/spec/features/projects/ci/lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'CI Lint', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297782' do
+RSpec.describe 'CI Lint', :js do
include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
let(:project) { create(:project, :repository) }
diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb
index 40d0260eafd..eec50c3a66a 100644
--- a/spec/features/projects/container_registry_spec.rb
+++ b/spec/features/projects/container_registry_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe 'Container Registry', :js do
sign_in(user)
project.add_developer(user)
stub_container_registry_config(enabled: true)
+ stub_container_registry_info
stub_container_registry_tags(repository: :any, tags: [])
end
@@ -96,6 +97,7 @@ RSpec.describe 'Container Registry', :js do
expect(service).to receive(:execute).with(container_repository) { { status: :success } }
expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['1']) { service }
+ first('[data-testid="additional-actions"]').click
first('[data-testid="single-delete-button"]').click
expect(find('.modal .modal-title')).to have_content _('Remove tag')
find('.modal .modal-footer .btn-danger').click
@@ -121,6 +123,16 @@ RSpec.describe 'Container Registry', :js do
expect(page).to have_content('Digest: N/A')
end
end
+
+ [ContainerRegistry::Path::InvalidRegistryPathError, Faraday::Error].each do |error_class|
+ context "when there is a #{error_class}" do
+ before do
+ expect(::ContainerRegistry::Client).to receive(:registry_info).and_raise(error_class, nil, nil)
+ end
+
+ it_behaves_like 'handling feature network errors with the container registry'
+ end
+ end
end
describe 'image repo details when image has no name' do
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 9413fae02e0..34e2ca7c8a7 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -226,6 +226,7 @@ RSpec.describe 'Environments page', :js do
end
it 'does not show terminal button' do
+ expect(page).not_to have_button(_('More actions'))
expect(page).not_to have_terminal_button
end
@@ -273,6 +274,7 @@ RSpec.describe 'Environments page', :js do
let(:role) { :maintainer }
it 'shows the terminal button' do
+ click_button(_('More actions'))
expect(page).to have_terminal_button
end
end
@@ -281,6 +283,7 @@ RSpec.describe 'Environments page', :js do
let(:role) { :developer }
it 'does not show terminal button' do
+ expect(page).not_to have_button(_('More actions'))
expect(page).not_to have_terminal_button
end
end
@@ -515,7 +518,7 @@ RSpec.describe 'Environments page', :js do
end
def have_terminal_button
- have_link(nil, href: terminal_project_environment_path(project, environment))
+ have_link(_('Terminal'), href: terminal_project_environment_path(project, environment))
end
def visit_environments(project, **opts)
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index 46b93d738e1..5ad7641a5be 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -98,12 +98,14 @@ RSpec.describe 'Projects > Files > User creates a directory', :js do
expect(page).to have_content(fork_message)
find('.add-to-tree').click
+ wait_for_requests
click_link('New directory')
fill_in(:dir_name, with: 'new_directory')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Create directory')
fork = user.fork_of(project2.reload)
+ wait_for_requests
expect(current_path).to eq(project_new_merge_request_path(fork))
end
diff --git a/spec/features/projects/files/user_uploads_files_spec.rb b/spec/features/projects/files/user_uploads_files_spec.rb
index 54e816d3d13..cc621dfd9f8 100644
--- a/spec/features/projects/files/user_uploads_files_spec.rb
+++ b/spec/features/projects/files/user_uploads_files_spec.rb
@@ -19,13 +19,15 @@ RSpec.describe 'Projects > Files > User uploads files' do
wait_for_requests
end
- include_examples 'it uploads and commits a new text file'
+ [true, false].each do |value|
+ include_examples 'it uploads and commits a new text file', drop: value
- include_examples 'it uploads and commits a new image file'
+ include_examples 'it uploads and commits a new image file', drop: value
- include_examples 'it uploads and commits a new pdf file'
+ include_examples 'it uploads and commits a new pdf file', drop: value
- include_examples 'it uploads a file to a sub-directory'
+ include_examples 'it uploads a file to a sub-directory', drop: value
+ end
end
context 'when a user does not have write access' do
@@ -35,6 +37,8 @@ RSpec.describe 'Projects > Files > User uploads files' do
visit(project_tree_path(project2))
end
- include_examples 'it uploads and commits a new file to a forked project'
+ [true, false].each do |value|
+ include_examples 'it uploads and commits a new file to a forked project', drop: value
+ end
end
end
diff --git a/spec/features/projects/infrastructure_registry_spec.rb b/spec/features/projects/infrastructure_registry_spec.rb
index 16dd96e6c02..ee35e02b5e8 100644
--- a/spec/features/projects/infrastructure_registry_spec.rb
+++ b/spec/features/projects/infrastructure_registry_spec.rb
@@ -45,10 +45,8 @@ RSpec.describe 'Infrastructure Registry' do
expect(page).to have_css('.packages-app h1[data-testid="title"]', text: terraform_module.name)
- page.within(%Q([name="#{terraform_module.name}"])) do
- expect(page).to have_content('Provision instructions')
- expect(page).to have_content('Registry setup')
- end
+ expect(page).to have_content('Provision instructions')
+ expect(page).to have_content('Registry setup')
end
end
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index dbcd7b5caf5..8538b894869 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -2,36 +2,276 @@
require 'spec_helper'
+def visit_jobs_page
+ visit(project_jobs_path(project))
+
+ wait_for_requests
+end
+
RSpec.describe 'User browses jobs' do
- let!(:build) { create(:ci_build, :coverage, pipeline: pipeline) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') }
- let(:project) { create(:project, :repository, namespace: user.namespace) }
- let(:user) { create(:user) }
+ describe 'with jobs_table_vue feature flag turned off' do
+ let!(:build) { create(:ci_build, :coverage, pipeline: pipeline) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') }
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:user) { create(:user) }
- before do
- stub_feature_flags(jobs_table_vue: false)
- project.add_maintainer(user)
- project.enable_ci
- project.update_attribute(:build_coverage_regex, /Coverage (\d+)%/)
+ before do
+ stub_feature_flags(jobs_table_vue: false)
+ project.add_maintainer(user)
+ project.enable_ci
+ project.update_attribute(:build_coverage_regex, /Coverage (\d+)%/)
- sign_in(user)
+ sign_in(user)
- visit(project_jobs_path(project))
- end
+ visit(project_jobs_path(project))
+ end
- it 'shows the coverage' do
- page.within('td.coverage') do
- expect(page).to have_content('99.9%')
+ it 'shows the coverage' do
+ page.within('td.coverage') do
+ expect(page).to have_content('99.9%')
+ end
+ end
+
+ context 'with a failed job' do
+ let!(:build) { create(:ci_build, :coverage, :failed, pipeline: pipeline) }
+
+ it 'displays a tooltip with the failure reason' do
+ page.within('.ci-table') do
+ failed_job_link = page.find('.ci-failed')
+ expect(failed_job_link[:title]).to eq('Failed - (unknown failure)')
+ end
+ end
end
end
- context 'with a failed job' do
- let!(:build) { create(:ci_build, :coverage, :failed, pipeline: pipeline) }
+ describe 'with jobs_table_vue feature flag turned on', :js do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(jobs_table_vue: true)
+
+ project.add_maintainer(user)
+ project.enable_ci
+
+ sign_in(user)
+ end
+
+ describe 'header tabs' do
+ before do
+ visit_jobs_page
+ end
+
+ it 'shows a tab for All jobs and count' do
+ expect(page.find('[data-testid="jobs-all-tab"]').text).to include('All')
+ expect(page.find('[data-testid="jobs-all-tab"] .badge').text).to include('0')
+ end
+
+ it 'shows a tab for Pending jobs and count' do
+ expect(page.find('[data-testid="jobs-pending-tab"]').text).to include('Pending')
+ expect(page.find('[data-testid="jobs-pending-tab"] .badge').text).to include('0')
+ end
+
+ it 'shows a tab for Running jobs and count' do
+ expect(page.find('[data-testid="jobs-running-tab"]').text).to include('Running')
+ expect(page.find('[data-testid="jobs-running-tab"] .badge').text).to include('0')
+ end
+
+ it 'shows a tab for Finished jobs and count' do
+ expect(page.find('[data-testid="jobs-finished-tab"]').text).to include('Finished')
+ expect(page.find('[data-testid="jobs-finished-tab"] .badge').text).to include('0')
+ end
+
+ it 'updates the content when tab is clicked' do
+ page.find('[data-testid="jobs-finished-tab"]').click
+ wait_for_requests
+
+ expect(page).to have_content('No jobs to show')
+ end
+ end
+
+ describe 'Empty state' do
+ before do
+ visit_jobs_page
+ end
+
+ it 'renders an empty state' do
+ expect(page).to have_content 'Use jobs to automate your tasks'
+ expect(page).to have_content 'Create CI/CD configuration file'
+ end
+ end
+
+ describe 'Job actions' do
+ let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: 'master') }
+
+ context 'when a job can be canceled' do
+ let!(:job) do
+ create(:ci_build, pipeline: pipeline,
+ stage: 'test')
+ end
+
+ before do
+ job.run
+
+ visit_jobs_page
+ end
+
+ it 'cancels a job successfully' do
+ page.find('[data-testid="cancel-button"]').click
+
+ wait_for_requests
+
+ expect(page).to have_selector('.ci-canceled')
+ end
+ end
+
+ context 'when a job can be retried' do
+ let!(:job) do
+ create(:ci_build, pipeline: pipeline,
+ stage: 'test')
+ end
+
+ before do
+ job.drop
+
+ visit_jobs_page
+ end
+
+ it 'retries a job successfully' do
+ page.find('[data-testid="retry"]').click
+
+ wait_for_requests
+
+ expect(page).to have_selector('.ci-pending')
+ end
+ end
+
+ context 'with a scheduled job' do
+ let!(:scheduled_job) { create(:ci_build, :scheduled, pipeline: pipeline, name: 'build') }
+
+ before do
+ visit_jobs_page
+ end
+
+ it 'plays a job successfully' do
+ page.find('[data-testid="play-scheduled"]').click
+
+ page.within '#play-job-modal' do
+ page.find_button('OK').click
+ end
+
+ wait_for_requests
+
+ expect(page).to have_selector('.ci-pending')
+ end
+
+ it 'unschedules a job successfully' do
+ page.find('[data-testid="unschedule"]').click
+
+ wait_for_requests
+
+ expect(page).to have_selector('.ci-manual')
+ end
+ end
+
+ context 'with downloadable artifacts' do
+ let!(:with_artifacts) do
+ build = create(:ci_build, :success,
+ pipeline: pipeline,
+ name: 'rspec tests',
+ stage: 'test')
+
+ create(:ci_job_artifact, :codequality, job: build)
+ end
+
+ before do
+ visit_jobs_page
+ end
+
+ it 'shows the download artifacts button' do
+ expect(page).to have_selector('[data-testid="download-artifacts"]')
+ end
+ end
+
+ context 'with artifacts expired' do
+ let!(:with_artifacts_expired) do
+ create(:ci_build, :expired, :success,
+ pipeline: pipeline,
+ name: 'rspec',
+ stage: 'test')
+ end
+
+ before do
+ visit_jobs_page
+ end
+
+ it 'does not show the download artifacts button' do
+ expect(page).not_to have_selector('[data-testid="download-artifacts"]')
+ end
+ end
+ end
+
+ describe 'Jobs table' do
+ let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: 'master') }
+
+ context 'column links' do
+ let!(:job) do
+ create(:ci_build, pipeline: pipeline,
+ stage: 'test')
+ end
+
+ before do
+ job.run
+
+ visit_jobs_page
+ end
+
+ it 'contains a link to the pipeline' do
+ expect(page.find('[data-testid="pipeline-id"]')).to have_content "##{pipeline.id}"
+ end
+
+ it 'contains a link to the job sha' do
+ expect(page.find('[data-testid="job-sha"]')).to have_content "#{job.sha[0..7]}"
+ end
+
+ it 'contains a link to the job id' do
+ expect(page.find('[data-testid="job-id-link"]')).to have_content "#{job.id}"
+ end
+
+ it 'contains a link to the job ref' do
+ expect(page.find('[data-testid="job-ref"]')).to have_content "#{job.ref}"
+ end
+ end
+ end
+
+ describe 'when user is not logged in' do
+ before do
+ sign_out(user)
+ end
+
+ context 'when project is public' do
+ let(:public_project) { create(:project, :public, :repository) }
+
+ context 'without jobs' do
+ it 'shows an empty state' do
+ visit project_jobs_path(public_project)
+ wait_for_requests
+
+ expect(page).to have_content 'Use jobs to automate your tasks'
+ end
+ end
+ end
+
+ context 'when project is private' do
+ let(:private_project) { create(:project, :private, :repository) }
+
+ it 'redirects the user to sign_in and displays the flash alert' do
+ visit project_jobs_path(private_project)
+ wait_for_requests
- it 'displays a tooltip with the failure reason' do
- page.within('.ci-table') do
- failed_job_link = page.find('.ci-failed')
- expect(failed_job_link[:title]).to eq('Failed - (unknown failure)')
+ expect(page).to have_content 'You need to sign in'
+ expect(page.current_path).to eq("/users/sign_in")
+ end
end
end
end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index a1416f3f563..7ccd5c51493 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it "shows Pending tab jobs" do
- expect(page).to have_selector('.nav-links li.active', text: 'Pending')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Pending')
expect(page).to have_content job.short_sha
expect(page).to have_content job.ref
expect(page).to have_content job.name
@@ -60,7 +60,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it "shows Running tab jobs" do
- expect(page).to have_selector('.nav-links li.active', text: 'Running')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Running')
expect(page).to have_content job.short_sha
expect(page).to have_content job.ref
expect(page).to have_content job.name
@@ -74,7 +74,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it "shows Finished tab jobs" do
- expect(page).to have_selector('.nav-links li.active', text: 'Finished')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Finished')
expect(page).to have_content('Use jobs to automate your tasks')
end
end
@@ -86,7 +86,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it "shows All tab jobs" do
- expect(page).to have_selector('.nav-links li.active', text: 'All')
+ expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'All')
expect(page).to have_content job.short_sha
expect(page).to have_content job.ref
expect(page).to have_content job.name
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index 84a972b3027..eb32570448b 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public) }
+ let_it_be(:expiration_date) { 5.days.from_now.to_date }
let(:additional_link_attrs) { {} }
let!(:group_link) { create(:project_group_link, project: project, group: group, **additional_link_attrs) }
@@ -37,27 +38,27 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
it 'updates expiry date' do
page.within find_group_row(group) do
- fill_in 'Expiration date', with: 5.days.from_now.to_date
+ fill_in 'Expiration date', with: expiration_date
find_field('Expiration date').native.send_keys :enter
wait_for_requests
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: expiration_date)
end
end
context 'when link has expiry date set' do
- let(:additional_link_attrs) { { expires_at: 5.days.from_now.to_date } }
+ let(:additional_link_attrs) { { expires_at: expiration_date } }
it 'clears expiry date' do
page.within find_group_row(group) do
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: expiration_date)
find('[data-testid="clear-button"]').click
wait_for_requests
- expect(page).to have_content('No expiration set')
+ expect(page).to have_field('Expiration date', with: '')
end
end
end
diff --git a/spec/features/projects/members/invite_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb
index 8c3646125a5..b674cad0312 100644
--- a/spec/features/projects/members/invite_group_spec.rb
+++ b/spec/features/projects/members/invite_group_spec.rb
@@ -165,6 +165,8 @@ RSpec.describe 'Project > Members > Invite group', :js do
let(:project) { create(:project) }
let!(:group) { create(:group) }
+ let_it_be(:expiration_date) { 5.days.from_now.to_date }
+
around do |example|
freeze_time { example.run }
end
@@ -176,15 +178,14 @@ RSpec.describe 'Project > Members > Invite group', :js do
visit project_project_members_path(project)
- invite_group(group.name, role: 'Guest', expires_at: 5.days.from_now)
+ invite_group(group.name, role: 'Guest', expires_at: expiration_date)
end
it 'the group link shows the expiration time with a warning class' do
setup
click_link 'Groups'
- expect(find_group_row(group)).to have_content(/in \d days/)
- expect(find_group_row(group)).to have_selector('.gl-text-orange-500')
+ expect(page).to have_field('Expiration date', with: expiration_date)
end
end
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index c1b14cf60e7..830ada29a2e 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
let_it_be(:maintainer) { create(:user) }
let_it_be(:project) { create(:project) }
+ let_it_be(:three_days_from_now) { 3.days.from_now.to_date }
+ let_it_be(:five_days_from_now) { 5.days.from_now.to_date }
let(:new_member) { create(:user) }
@@ -22,39 +24,39 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
it 'expiration date is displayed in the members list' do
visit project_project_members_path(project)
- invite_member(new_member.name, role: 'Guest', expires_at: 5.days.from_now.to_date)
+ invite_member(new_member.name, role: 'Guest', expires_at: five_days_from_now)
page.within find_member_row(new_member) do
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: five_days_from_now)
end
end
it 'changes expiration date' do
- project.team.add_users([new_member.id], :developer, expires_at: 3.days.from_now.to_date)
+ project.team.add_users([new_member.id], :developer, expires_at: three_days_from_now)
visit project_project_members_path(project)
page.within find_member_row(new_member) do
- fill_in 'Expiration date', with: 5.days.from_now.to_date
+ fill_in 'Expiration date', with: five_days_from_now
find_field('Expiration date').native.send_keys :enter
wait_for_requests
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: five_days_from_now)
end
end
it 'clears expiration date' do
- project.team.add_users([new_member.id], :developer, expires_at: 5.days.from_now.to_date)
+ project.team.add_users([new_member.id], :developer, expires_at: five_days_from_now)
visit project_project_members_path(project)
page.within find_member_row(new_member) do
- expect(page).to have_content(/in \d days/)
+ expect(page).to have_field('Expiration date', with: five_days_from_now)
find('[data-testid="clear-button"]').click
wait_for_requests
- expect(page).to have_content('No expiration set')
+ expect(page).to have_field('Expiration date', with: '')
end
end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 876bc82d16c..f61eaccf5b9 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'Project navbar' do
stub_config(registry: { enabled: false })
insert_package_nav(_('Infrastructure'))
insert_infrastructure_registry_nav
+ insert_infrastructure_google_cloud_nav
end
it_behaves_like 'verified navigation bar' do
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 39f9d3b331b..dacbaa826a0 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -296,12 +296,16 @@ RSpec.describe 'New project', :js do
expect(git_import_instructions).to have_content 'Git repository URL'
end
- it 'reports error if repo URL does not end with .git' do
+ it 'reports error if repo URL is not a valid Git repository' do
+ stub_request(:get, "http://foo/bar/info/refs?service=git-upload-pack").to_return(status: 200, body: "not-a-git-repo")
+
fill_in 'project_import_url', with: 'http://foo/bar'
# simulate blur event
find('body').click
- expect(page).to have_text('A repository URL usually ends in a .git suffix')
+ wait_for_requests
+
+ expect(page).to have_text('There is not a valid Git repository at this URL')
end
it 'keeps "Import project" tab open after form validation error' do
diff --git a/spec/features/projects/packages_spec.rb b/spec/features/projects/packages_spec.rb
index 7fcc8200b1c..9b1e87192f5 100644
--- a/spec/features/projects/packages_spec.rb
+++ b/spec/features/projects/packages_spec.rb
@@ -27,6 +27,10 @@ RSpec.describe 'Packages' do
context 'when feature is available', :js do
before do
+ # we are simply setting the featrure flag to false because the new UI has nothing to test yet
+ # when the refactor is complete or almost complete we will turn on the feature tests
+ # see https://gitlab.com/gitlab-org/gitlab/-/issues/330846 for status of this work
+ stub_feature_flags(package_list_apollo: false)
visit_project_packages
end
diff --git a/spec/features/projects/settings/monitor_settings_spec.rb b/spec/features/projects/settings/monitor_settings_spec.rb
index e3d75c30e5e..3f6c4646f00 100644
--- a/spec/features/projects/settings/monitor_settings_spec.rb
+++ b/spec/features/projects/settings/monitor_settings_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Projects > Settings > For a forked project', :js do
let_it_be(:project) { create(:project, :repository, create_templates: :issue) }
- let(:user) { project.owner}
+ let(:user) { project.owner }
before do
sign_in(user)
@@ -16,7 +16,8 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
visit project_path(project)
wait_for_requests
- expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Monitor"]', text: 'Monitor', visible: false)
+ expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Monitor"]',
+ text: 'Monitor', visible: :hidden)
end
end
@@ -42,7 +43,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
expect(find_field(send_email)).to be_checked
end
- it 'updates form values', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333665' do
+ it 'updates form values' do
check(create_issue)
uncheck(send_email)
click_on('No template selected')
@@ -52,10 +53,8 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
click_settings_tab
expect(find_field(create_issue)).to be_checked
- expect(page).to have_selector(:id, 'alert-integration-settings-issue-template', text: 'bug')
-
- click_settings_tab
expect(find_field(send_email)).not_to be_checked
+ expect(page).to have_selector(:id, 'alert-integration-settings-issue-template', text: 'bug')
end
def click_settings_tab
@@ -68,13 +67,15 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
page.within '[data-testid="alert-integration-settings"]' do
click_button 'Save changes'
end
+
+ wait_for_all_requests
end
end
- context 'error tracking settings form' do
+ describe 'error tracking settings form' do
let(:sentry_list_projects_url) { 'http://sentry.example.com/api/0/projects/' }
- context 'success path' do
+ context 'when project dropdown is loaded' do
let(:projects_sample_response) do
Gitlab::Utils.deep_indifferent_access(
Gitlab::Json.parse(fixture_file('sentry/list_projects_sample_response.json'))
@@ -97,7 +98,9 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
within '.js-error-tracking-settings' do
click_button('Expand')
+ choose('cloud-hosted Sentry')
end
+
expect(page).to have_content('Sentry API URL')
expect(page.body).to include('Error Tracking')
expect(page).to have_button('Connect')
@@ -121,7 +124,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
end
end
- context 'project dropdown fails to load' do
+ context 'when project dropdown fails to load' do
before do
WebMock.stub_request(:get, sentry_list_projects_url)
.to_return(
@@ -140,8 +143,10 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
within '.js-error-tracking-settings' do
click_button('Expand')
+ choose('cloud-hosted Sentry')
+ check('Active')
end
- check('Active')
+
fill_in('error-tracking-api-host', with: 'http://sentry.example.com')
fill_in('error-tracking-token', with: 'token')
@@ -151,7 +156,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
end
end
- context 'integrated error tracking backend' do
+ context 'with integrated error tracking backend' do
it 'successfully fills and submits the form' do
visit project_settings_operations_path(project)
@@ -175,11 +180,17 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
wait_for_requests
assert_text('Your changes have been saved')
+
+ within '.js-error-tracking-settings' do
+ click_button('Expand')
+ end
+
+ expect(page).to have_content('Paste this DSN into your Sentry SDK')
end
end
end
- context 'grafana integration settings form' do
+ describe 'grafana integration settings form' do
it 'successfully fills and completes the form' do
visit project_settings_operations_path(project)
diff --git a/spec/features/projects/settings/webhooks_settings_spec.rb b/spec/features/projects/settings/webhooks_settings_spec.rb
index 528fd58cbe6..8d73ffecd46 100644
--- a/spec/features/projects/settings/webhooks_settings_spec.rb
+++ b/spec/features/projects/settings/webhooks_settings_spec.rb
@@ -115,8 +115,8 @@ RSpec.describe 'Projects > Settings > Webhook Settings' do
hook_log
visit edit_project_hook_path(project, hook)
- expect(page).to have_content('Recent Deliveries')
- expect(page).to have_content(hook_log.url)
+ expect(page).to have_content('Recent events')
+ expect(page).to have_link('View details', href: hook_log.present.details_path)
end
it 'show hook log details' do
diff --git a/spec/features/projects/show/user_uploads_files_spec.rb b/spec/features/projects/show/user_uploads_files_spec.rb
index eb230082bfa..51e41397439 100644
--- a/spec/features/projects/show/user_uploads_files_spec.rb
+++ b/spec/features/projects/show/user_uploads_files_spec.rb
@@ -21,13 +21,15 @@ RSpec.describe 'Projects > Show > User uploads files' do
wait_for_requests
end
- include_examples 'it uploads and commits a new text file'
+ [true, false].each do |value|
+ include_examples 'it uploads and commits a new text file', drop: value
- include_examples 'it uploads and commits a new image file'
+ include_examples 'it uploads and commits a new image file', drop: value
- include_examples 'it uploads and commits a new pdf file'
+ include_examples 'it uploads and commits a new pdf file', drop: value
- include_examples 'it uploads a file to a sub-directory'
+ include_examples 'it uploads a file to a sub-directory', drop: value
+ end
end
context 'when a user does not have write access' do
@@ -37,7 +39,9 @@ RSpec.describe 'Projects > Show > User uploads files' do
visit(project_path(project2))
end
- include_examples 'it uploads and commits a new file to a forked project'
+ [true, false].each do |value|
+ include_examples 'it uploads and commits a new file to a forked project', drop: value
+ end
end
context 'when in the empty_repo_upload experiment' do
@@ -50,13 +54,17 @@ RSpec.describe 'Projects > Show > User uploads files' do
context 'with an empty repo' do
let(:project) { create(:project, :empty_repo, creator: user) }
- include_examples 'uploads and commits a new text file via "upload file" button'
+ [true, false].each do |value|
+ include_examples 'uploads and commits a new text file via "upload file" button', drop: value
+ end
end
context 'with a nonempty repo' do
let(:project) { create(:project, :repository, creator: user) }
- include_examples 'uploads and commits a new text file via "upload file" button'
+ [true, false].each do |value|
+ include_examples 'uploads and commits a new text file via "upload file" button', drop: value
+ end
end
end
end
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index 9f08759603e..5d482f9fbd0 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -33,6 +33,29 @@ RSpec.describe 'User creates a project', :js do
expect(page).to have_content(project.url_to_repo)
end
+ it 'creates a new project that is not blank' do
+ stub_experiments(new_project_sast_enabled: 'candidate')
+
+ visit(new_project_path)
+
+ find('[data-qa-panel-name="blank_project"]').click # rubocop:disable QA/SelectorUsage
+ fill_in(:project_name, with: 'With initial commits')
+
+ expect(page).to have_checked_field 'Initialize repository with a README'
+ expect(page).to have_checked_field 'Enable Static Application Security Testing (SAST)'
+
+ page.within('#content-body') do
+ click_button('Create project')
+ end
+
+ project = Project.last
+
+ expect(current_path).to eq(project_path(project))
+ expect(page).to have_content('With initial commits')
+ expect(page).to have_content('Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist')
+ expect(page).to have_content('README.md Initial commit')
+ end
+
context 'in a subgroup they do not own' do
let(:parent) { create(:group) }
let!(:subgroup) { create(:group, parent: parent) }
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index 9dcef13757a..4012a302196 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -553,6 +553,7 @@ RSpec.describe "Internal Project Access" do
before do
stub_container_registry_tags(repository: :any, tags: ['latest'])
stub_container_registry_config(enabled: true)
+ stub_container_registry_info
project.container_repositories << container_repository
end
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index 5a200bea80a..aa34ccce2c1 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -570,6 +570,7 @@ RSpec.describe "Private Project Access" do
before do
stub_container_registry_tags(repository: :any, tags: ['latest'])
stub_container_registry_config(enabled: true)
+ stub_container_registry_info
project.container_repositories << container_repository
end
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index 8ceb6920e77..abe128c6f78 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -552,6 +552,7 @@ RSpec.describe "Public Project Access" do
before do
stub_container_registry_tags(repository: :any, tags: ['latest'])
stub_container_registry_config(enabled: true)
+ stub_container_registry_info
project.container_repositories << container_repository
end
diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb
index e03f71c5352..fc88cd9205c 100644
--- a/spec/features/snippets/notes_on_personal_snippets_spec.rb
+++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb
@@ -70,8 +70,8 @@ RSpec.describe 'Comments on personal snippets', :js do
context 'when submitting a note' do
it 'shows a valid form' do
is_expected.to have_css('.js-main-target-form', visible: true, count: 1)
- expect(find('.js-main-target-form .js-comment-button').value)
- .to eq('Comment')
+ expect(find('.js-main-target-form .js-comment-button button', match: :first))
+ .to have_content('Comment')
page.within('.js-main-target-form') do
expect(page).not_to have_link('Cancel')
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 79c4057a8b9..10c1c2cb26e 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -171,6 +171,18 @@ RSpec.describe 'Login', :clean_gitlab_redis_shared_state do
end
end
+ describe 'with OneTrust authentication' do
+ before do
+ stub_config(extra: { one_trust_id: SecureRandom.uuid })
+ end
+
+ it 'has proper Content-Security-Policy headers' do
+ visit root_path
+
+ expect(response_headers['Content-Security-Policy']).to include('https://cdn.cookielaw.org https://*.onetrust.com')
+ end
+ end
+
describe 'with two-factor authentication', :js do
def enter_code(code)
fill_in 'user_otp_attempt', with: code
@@ -866,8 +878,8 @@ RSpec.describe 'Login', :clean_gitlab_redis_shared_state do
expect(current_path).to eq(new_profile_password_path)
- fill_in 'user_current_password', with: '12345678'
- fill_in 'user_password', with: 'new password'
+ fill_in 'user_password', with: '12345678'
+ fill_in 'user_new_password', with: 'new password'
fill_in 'user_password_confirmation', with: 'new password'
click_button 'Set new password'
@@ -875,7 +887,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_shared_state do
end
end
- context 'when the user does not have an email configured' do
+ context 'when the user does not have an email configured', :js do
let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml', email: 'temp-email-for-oauth-user@gitlab.localhost') }
before do
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index e629d329033..61672662fbe 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -81,6 +81,7 @@ RSpec.describe 'User page' do
context 'timezone' do
let_it_be(:timezone) { 'America/Los_Angeles' }
+ let_it_be(:local_time_selector) { '[data-testid="user-local-time"]' }
before do
travel_to Time.find_zone(timezone).local(2021, 7, 20, 15, 30, 45)
@@ -92,7 +93,19 @@ RSpec.describe 'User page' do
it 'shows local time' do
subject
- expect(page).to have_content('3:30 PM')
+ within local_time_selector do
+ expect(page).to have_content('3:30 PM')
+ end
+ end
+ end
+
+ context 'when timezone is not set' do
+ let_it_be(:user) { create(:user, timezone: nil) }
+
+ it 'does not show local time' do
+ subject
+
+ expect(page).not_to have_selector(local_time_selector)
end
end
@@ -102,7 +115,9 @@ RSpec.describe 'User page' do
it 'shows local time using the configured default timezone (UTC in this case)' do
subject
- expect(page).to have_content('10:30 PM')
+ within local_time_selector do
+ expect(page).to have_content('10:30 PM')
+ end
end
end
end
diff --git a/spec/finders/ci/commit_statuses_finder_spec.rb b/spec/finders/ci/commit_statuses_finder_spec.rb
index 2e26e38f4b4..9f66b53dd1f 100644
--- a/spec/finders/ci/commit_statuses_finder_spec.rb
+++ b/spec/finders/ci/commit_statuses_finder_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Ci::CommitStatusesFinder, '#execute' do
let_it_be(:user) { create(:user) }
context 'tag refs' do
- let_it_be(:tags) { TagsFinder.new(project.repository, {}).execute }
+ let_it_be(:tags) { project.repository.tags }
let(:subject) { described_class.new(project, project.repository, user, tags).execute }
@@ -131,7 +131,7 @@ RSpec.describe Ci::CommitStatusesFinder, '#execute' do
end
context 'CI pipelines visible to' do
- let_it_be(:tags) { TagsFinder.new(project.repository, {}).execute }
+ let_it_be(:tags) { project.repository.tags }
let(:subject) { described_class.new(project, project.repository, user, tags).execute }
@@ -161,7 +161,7 @@ RSpec.describe Ci::CommitStatusesFinder, '#execute' do
context 'when not a member of a private project' do
let(:private_project) { create(:project, :private, :repository) }
- let(:private_tags) { TagsFinder.new(private_tags.repository, {}).execute }
+ let(:private_tags) { private_tags.repository.tags }
let(:private_subject) { described_class.new(private_project, private_project.repository, user, tags).execute }
before do
diff --git a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
index 65f6dc0ba74..8a802e9660b 100644
--- a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
+++ b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
let(:actor) { developer_in_both }
it 'returns all pipelines' do
- is_expected.to eq([pipeline_in_fork, pipeline_in_parent])
+ is_expected.to match_array([pipeline_in_fork, pipeline_in_parent])
end
end
@@ -52,7 +52,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
let(:actor) { reporter_in_parent_and_developer_in_fork }
it 'returns all pipelines' do
- is_expected.to eq([pipeline_in_fork, pipeline_in_parent])
+ is_expected.to match_array([pipeline_in_fork, pipeline_in_parent])
end
end
@@ -60,7 +60,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
let(:actor) { developer_in_parent }
it 'returns pipelines in parent' do
- is_expected.to eq([pipeline_in_parent])
+ is_expected.to match_array([pipeline_in_parent])
end
end
@@ -68,7 +68,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
let(:actor) { developer_in_fork }
it 'returns pipelines in fork' do
- is_expected.to eq([pipeline_in_fork])
+ is_expected.to match_array([pipeline_in_fork])
end
end
@@ -97,7 +97,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
shared_examples 'returning pipelines with proper ordering' do
let!(:all_pipelines) do
- merge_request.all_commit_shas.map do |sha|
+ merge_request.recent_diff_head_shas.map do |sha|
create(:ci_empty_pipeline,
project: project, sha: sha, ref: merge_request.source_branch)
end
@@ -135,12 +135,92 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
end
context 'when pipelines exist for the branch and merge request' do
+ shared_examples 'returns all pipelines for merge request' do
+ it 'returns merge request pipeline first' do
+ expect(subject.all).to eq([detached_merge_request_pipeline, branch_pipeline])
+ end
+
+ context 'when there are a branch pipeline and a merge request pipeline' do
+ let!(:branch_pipeline_2) do
+ create(:ci_pipeline, source: :push, project: project,
+ ref: source_ref, sha: shas.first)
+ end
+
+ let!(:detached_merge_request_pipeline_2) do
+ create(:ci_pipeline, source: :merge_request_event, project: project,
+ ref: source_ref, sha: shas.first, merge_request: merge_request)
+ end
+
+ it 'returns merge request pipelines first' do
+ expect(subject.all)
+ .to eq([detached_merge_request_pipeline_2,
+ detached_merge_request_pipeline,
+ branch_pipeline_2,
+ branch_pipeline])
+ end
+ end
+
+ context 'when there are multiple merge request pipelines from the same branch' do
+ let!(:branch_pipeline_2) do
+ create(:ci_pipeline, source: :push, project: project,
+ ref: source_ref, sha: shas.first)
+ end
+
+ let!(:branch_pipeline_with_sha_not_belonging_to_merge_request) do
+ create(:ci_pipeline, source: :push, project: project, ref: source_ref)
+ end
+
+ let!(:detached_merge_request_pipeline_2) do
+ create(:ci_pipeline, source: :merge_request_event, project: project,
+ ref: source_ref, sha: shas.first, merge_request: merge_request_2)
+ end
+
+ let(:merge_request_2) do
+ create(:merge_request, source_project: project, source_branch: source_ref,
+ target_project: project, target_branch: 'stable')
+ end
+
+ before do
+ shas.each.with_index do |sha, index|
+ create(:merge_request_diff_commit,
+ merge_request_diff: merge_request_2.merge_request_diff,
+ sha: sha, relative_order: index)
+ end
+ end
+
+ it 'returns only related merge request pipelines' do
+ expect(subject.all)
+ .to eq([detached_merge_request_pipeline,
+ branch_pipeline_2,
+ branch_pipeline])
+
+ expect(described_class.new(merge_request_2, nil).all)
+ .to match_array([detached_merge_request_pipeline_2, branch_pipeline_2, branch_pipeline])
+ end
+ end
+
+ context 'when detached merge request pipeline is run on head ref of the merge request' do
+ let!(:detached_merge_request_pipeline) do
+ create(:ci_pipeline, source: :merge_request_event, project: project,
+ ref: merge_request.ref_path, sha: shas.second, merge_request: merge_request)
+ end
+
+ it 'sets the head ref of the merge request to the pipeline ref' do
+ expect(detached_merge_request_pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
+ end
+
+ it 'includes the detached merge request pipeline even though the ref is custom path' do
+ expect(merge_request.all_pipelines).to include(detached_merge_request_pipeline)
+ end
+ end
+ end
+
let(:source_ref) { 'feature' }
let(:target_ref) { 'master' }
let!(:branch_pipeline) do
create(:ci_pipeline, source: :push, project: project,
- ref: source_ref, sha: shas.second)
+ ref: source_ref, sha: merge_request.merge_request_diff.head_commit_sha)
end
let!(:tag_pipeline) do
@@ -149,97 +229,31 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
let!(:detached_merge_request_pipeline) do
create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: source_ref, sha: shas.second, merge_request: merge_request)
+ ref: source_ref, sha: shas.second, merge_request: merge_request)
end
let(:merge_request) do
create(:merge_request, source_project: project, source_branch: source_ref,
- target_project: project, target_branch: target_ref)
+ target_project: project, target_branch: target_ref)
end
let(:project) { create(:project, :repository) }
let(:shas) { project.repository.commits(source_ref, limit: 2).map(&:id) }
- before do
- create(:merge_request_diff_commit,
- merge_request_diff: merge_request.merge_request_diff,
- sha: shas.second, relative_order: 1)
- end
-
- it 'returns merge request pipeline first' do
- expect(subject.all).to eq([detached_merge_request_pipeline, branch_pipeline])
- end
-
- context 'when there are a branch pipeline and a merge request pipeline' do
- let!(:branch_pipeline_2) do
- create(:ci_pipeline, source: :push, project: project,
- ref: source_ref, sha: shas.first)
- end
-
- let!(:detached_merge_request_pipeline_2) do
- create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: source_ref, sha: shas.first, merge_request: merge_request)
- end
-
- it 'returns merge request pipelines first' do
- expect(subject.all)
- .to eq([detached_merge_request_pipeline_2,
- detached_merge_request_pipeline,
- branch_pipeline_2,
- branch_pipeline])
- end
- end
-
- context 'when there are multiple merge request pipelines from the same branch' do
- let!(:branch_pipeline_2) do
- create(:ci_pipeline, source: :push, project: project,
- ref: source_ref, sha: shas.first)
- end
-
- let!(:detached_merge_request_pipeline_2) do
- create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: source_ref, sha: shas.first, merge_request: merge_request_2)
- end
-
- let(:merge_request_2) do
- create(:merge_request, source_project: project, source_branch: source_ref,
- target_project: project, target_branch: 'stable')
- end
-
+ context 'when `decomposed_ci_query_in_pipelines_for_merge_request_finder` feature flag enabled' do
before do
- shas.each.with_index do |sha, index|
- create(:merge_request_diff_commit,
- merge_request_diff: merge_request_2.merge_request_diff,
- sha: sha, relative_order: index)
- end
+ stub_feature_flags(decomposed_ci_query_in_pipelines_for_merge_request_finder: merge_request.target_project)
end
- it 'returns only related merge request pipelines' do
- expect(subject.all)
- .to eq([detached_merge_request_pipeline,
- branch_pipeline_2,
- branch_pipeline])
-
- expect(described_class.new(merge_request_2, nil).all)
- .to eq([detached_merge_request_pipeline_2,
- branch_pipeline_2,
- branch_pipeline])
- end
+ it_behaves_like 'returns all pipelines for merge request'
end
- context 'when detached merge request pipeline is run on head ref of the merge request' do
- let!(:detached_merge_request_pipeline) do
- create(:ci_pipeline, source: :merge_request_event, project: project,
- ref: merge_request.ref_path, sha: shas.second, merge_request: merge_request)
- end
-
- it 'sets the head ref of the merge request to the pipeline ref' do
- expect(detached_merge_request_pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
+ context 'when `decomposed_ci_query_in_pipelines_for_merge_request_finder` feature flag disabled' do
+ before do
+ stub_feature_flags(decomposed_ci_query_in_pipelines_for_merge_request_finder: false)
end
- it 'includes the detached merge request pipeline even though the ref is custom path' do
- expect(merge_request.all_pipelines).to include(detached_merge_request_pipeline)
- end
+ it_behaves_like 'returns all pipelines for merge request'
end
end
end
diff --git a/spec/finders/clusters/agents_finder_spec.rb b/spec/finders/clusters/agents_finder_spec.rb
new file mode 100644
index 00000000000..0996d76b723
--- /dev/null
+++ b/spec/finders/clusters/agents_finder_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::AgentsFinder do
+ describe '#execute' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user, maintainer_projects: [project]) }
+
+ let!(:matching_agent) { create(:cluster_agent, project: project) }
+ let!(:wrong_project) { create(:cluster_agent) }
+
+ subject { described_class.new(project, user).execute }
+
+ it { is_expected.to contain_exactly(matching_agent) }
+
+ context 'user does not have permission' do
+ let(:user) { create(:user, developer_projects: [project]) }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'filtering by name' do
+ let(:params) { Hash(name: name_param) }
+
+ subject { described_class.new(project, user, params: params).execute }
+
+ context 'name does not match' do
+ let(:name_param) { 'other-name' }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'name does match' do
+ let(:name_param) { matching_agent.name }
+
+ it { is_expected.to contain_exactly(matching_agent) }
+ end
+ end
+ end
+end
diff --git a/spec/finders/error_tracking/errors_finder_spec.rb b/spec/finders/error_tracking/errors_finder_spec.rb
index 29053054f9d..66eb7769a4c 100644
--- a/spec/finders/error_tracking/errors_finder_spec.rb
+++ b/spec/finders/error_tracking/errors_finder_spec.rb
@@ -29,14 +29,25 @@ RSpec.describe ErrorTracking::ErrorsFinder do
context 'with sort parameter' do
let(:params) { { status: 'unresolved', sort: 'first_seen' } }
- it { is_expected.to eq([error, error_yesterday]) }
+ it { expect(subject.to_a).to eq([error, error_yesterday]) }
end
- context 'with limit parameter' do
+ context 'pagination' do
let(:params) { { limit: '1', sort: 'first_seen' } }
# Sort by first_seen is DESC by default, so the most recent error is `error`
it { is_expected.to contain_exactly(error) }
+
+ it { expect(subject.has_next_page?).to be_truthy }
+
+ it 'returns next page by cursor' do
+ params_with_cursor = params.merge(cursor: subject.cursor_for_next_page)
+ errors = described_class.new(user, project, params_with_cursor).execute
+
+ expect(errors).to contain_exactly(error_resolved)
+ expect(errors.has_next_page?).to be_truthy
+ expect(errors.has_previous_page?).to be_truthy
+ end
end
end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index ed35d75720c..9f12308013e 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -1199,6 +1199,14 @@ RSpec.describe IssuesFinder do
end
end
+ context 'when a non-simple sort is given' do
+ let(:params) { { search: 'foo', attempt_project_search_optimizations: true, sort: 'popularity' } }
+
+ it 'returns false' do
+ expect(finder.use_cte_for_search?).to be_falsey
+ end
+ end
+
context 'when all conditions are met' do
context "uses group search optimization" do
let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
@@ -1217,6 +1225,24 @@ RSpec.describe IssuesFinder do
expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
end
end
+
+ context 'with simple sort' do
+ let(:params) { { search: 'foo', attempt_project_search_optimizations: true, sort: 'updated_desc' } }
+
+ it 'returns true' do
+ expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
+
+ context 'with simple sort as a symbol' do
+ let(:params) { { search: 'foo', attempt_project_search_optimizations: true, sort: :updated_desc } }
+
+ it 'returns true' do
+ expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
end
end
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index d25e1b9ca4b..749e319f9c7 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -161,42 +161,54 @@ RSpec.describe MembersFinder, '#execute' do
end
context 'when :invited_groups is passed' do
- subject { described_class.new(project, user2).execute(include_relations: [:inherited, :direct, :invited_groups]) }
+ shared_examples 'with invited_groups param' do
+ subject { described_class.new(project, user2).execute(include_relations: [:inherited, :direct, :invited_groups]) }
- let_it_be(:linked_group) { create(:group, :public) }
- let_it_be(:nested_linked_group) { create(:group, parent: linked_group) }
- let_it_be(:linked_group_member) { linked_group.add_guest(user1) }
- let_it_be(:nested_linked_group_member) { nested_linked_group.add_guest(user2) }
+ let_it_be(:linked_group) { create(:group, :public) }
+ let_it_be(:nested_linked_group) { create(:group, parent: linked_group) }
+ let_it_be(:linked_group_member) { linked_group.add_guest(user1) }
+ let_it_be(:nested_linked_group_member) { nested_linked_group.add_guest(user2) }
- it 'includes all the invited_groups members including members inherited from ancestor groups' do
- create(:project_group_link, project: project, group: nested_linked_group)
+ it 'includes all the invited_groups members including members inherited from ancestor groups' do
+ create(:project_group_link, project: project, group: nested_linked_group)
- expect(subject).to contain_exactly(linked_group_member, nested_linked_group_member)
- end
+ expect(subject).to contain_exactly(linked_group_member, nested_linked_group_member)
+ end
- it 'includes all the invited_groups members' do
- create(:project_group_link, project: project, group: linked_group)
+ it 'includes all the invited_groups members' do
+ create(:project_group_link, project: project, group: linked_group)
- expect(subject).to contain_exactly(linked_group_member)
- end
+ expect(subject).to contain_exactly(linked_group_member)
+ end
- it 'excludes group_members not visible to the user' do
- create(:project_group_link, project: project, group: linked_group)
- private_linked_group = create(:group, :private)
- private_linked_group.add_developer(user3)
- create(:project_group_link, project: project, group: private_linked_group)
+ it 'excludes group_members not visible to the user' do
+ create(:project_group_link, project: project, group: linked_group)
+ private_linked_group = create(:group, :private)
+ private_linked_group.add_developer(user3)
+ create(:project_group_link, project: project, group: private_linked_group)
- expect(subject).to contain_exactly(linked_group_member)
+ expect(subject).to contain_exactly(linked_group_member)
+ end
+
+ context 'when the user is a member of invited group and ancestor groups' do
+ it 'returns the highest access_level for the user limited by project_group_link.group_access', :nested_groups do
+ create(:project_group_link, project: project, group: nested_linked_group, group_access: Gitlab::Access::REPORTER)
+ nested_linked_group.add_developer(user1)
+
+ expect(subject.map(&:user)).to contain_exactly(user1, user2)
+ expect(subject.max_by(&:access_level).access_level).to eq(Gitlab::Access::REPORTER)
+ end
+ end
end
- context 'when the user is a member of invited group and ancestor groups' do
- it 'returns the highest access_level for the user limited by project_group_link.group_access', :nested_groups do
- create(:project_group_link, project: project, group: nested_linked_group, group_access: Gitlab::Access::REPORTER)
- nested_linked_group.add_developer(user1)
+ it_behaves_like 'with invited_groups param'
- expect(subject.map(&:user)).to contain_exactly(user1, user2)
- expect(subject.max_by(&:access_level).access_level).to eq(Gitlab::Access::REPORTER)
+ context 'when feature flag :linear_members_finder_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_members_finder_ancestor_scopes: false)
end
+
+ it_behaves_like 'with invited_groups param'
end
end
end
diff --git a/spec/finders/packages/group_packages_finder_spec.rb b/spec/finders/packages/group_packages_finder_spec.rb
index d7f62bdfbb4..3254c436674 100644
--- a/spec/finders/packages/group_packages_finder_spec.rb
+++ b/spec/finders/packages/group_packages_finder_spec.rb
@@ -147,6 +147,19 @@ RSpec.describe Packages::GroupPackagesFinder do
end
end
+ context 'with exact package_name' do
+ let_it_be(:named_package) { create(:maven_package, project: project, name: 'maven') }
+ let_it_be(:other_package) { create(:maven_package, project: project, name: 'maventoo') }
+
+ let(:params) { { exact_name: true, package_name: package_name } }
+
+ context 'as complete name' do
+ let(:package_name) { 'maven' }
+
+ it { is_expected.to match_array([named_package]) }
+ end
+ end
+
it_behaves_like 'concerning versionless param'
it_behaves_like 'concerning package statuses'
end
diff --git a/spec/finders/projects/members/effective_access_level_finder_spec.rb b/spec/finders/projects/members/effective_access_level_finder_spec.rb
index 1112dbd0d6e..33fbb5aca30 100644
--- a/spec/finders/projects/members/effective_access_level_finder_spec.rb
+++ b/spec/finders/projects/members/effective_access_level_finder_spec.rb
@@ -194,6 +194,7 @@ RSpec.describe Projects::Members::EffectiveAccessLevelFinder, '#execute' do
context 'for a project that is shared with other group(s)' do
let_it_be(:shared_with_group) { create(:group) }
let_it_be(:user_from_shared_with_group) { create(:user) }
+ let_it_be(:project) { create(:project, group: create(:group)) }
before do
create(:project_group_link, :developer, project: project, group: shared_with_group)
@@ -211,9 +212,24 @@ RSpec.describe Projects::Members::EffectiveAccessLevelFinder, '#execute' do
)
end
- context 'when the group containing the project has forbidden group shares for any of its projects' do
- let_it_be(:project) { create(:project, group: create(:group)) }
+ context 'even when the `lock_memberships_to_ldap` setting has been turned ON' do
+ before do
+ stub_application_setting(lock_memberships_to_ldap: true)
+ end
+ it 'includes the least among the specified access levels' do
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => user_from_shared_with_group.id,
+ 'access_level' => Gitlab::Access::DEVELOPER
+ )
+ )
+ )
+ end
+ end
+
+ context 'when the group containing the project has forbidden group shares for any of its projects' do
before do
project.namespace.update!(share_with_group_lock: true)
end
diff --git a/spec/finders/projects/topics_finder_spec.rb b/spec/finders/projects/topics_finder_spec.rb
new file mode 100644
index 00000000000..28802c5d49e
--- /dev/null
+++ b/spec/finders/projects/topics_finder_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::TopicsFinder do
+ let_it_be(:user) { create(:user) }
+
+ let!(:topic1) { create(:topic, name: 'topicB') }
+ let!(:topic2) { create(:topic, name: 'topicC') }
+ let!(:topic3) { create(:topic, name: 'topicA') }
+
+ let!(:project1) { create(:project, namespace: user.namespace, topic_list: 'topicC, topicA, topicB') }
+ let!(:project2) { create(:project, namespace: user.namespace, topic_list: 'topicC, topicA') }
+ let!(:project3) { create(:project, namespace: user.namespace, topic_list: 'topicC') }
+
+ describe '#execute' do
+ it 'returns topics' do
+ topics = described_class.new.execute
+
+ expect(topics).to eq([topic2, topic3, topic1])
+ end
+
+ context 'filter by name' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:search, :result) do
+ 'topic' | %w[topicC topicA topicB]
+ 'pic' | %w[topicC topicA topicB]
+ 'B' | %w[]
+ 'cB' | %w[]
+ 'icB' | %w[topicB]
+ 'topicA' | %w[topicA]
+ 'topica' | %w[topicA]
+ end
+
+ with_them do
+ it 'returns filtered topics' do
+ topics = described_class.new(params: { search: search }).execute
+
+ expect(topics.map(&:name)).to eq(result)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/tags_finder_spec.rb b/spec/finders/tags_finder_spec.rb
index 08978a32e50..fe015d53ac9 100644
--- a/spec/finders/tags_finder_spec.rb
+++ b/spec/finders/tags_finder_spec.rb
@@ -3,93 +3,76 @@
require 'spec_helper'
RSpec.describe TagsFinder do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+
+ def load_tags(params)
+ tags_finder = described_class.new(repository, params)
+ tags, error = tags_finder.execute
+
+ expect(error).to eq(nil)
+
+ tags
+ end
describe '#execute' do
context 'sort only' do
it 'sorts by name' do
- tags_finder = described_class.new(repository, {})
-
- result = tags_finder.execute
-
- expect(result.first.name).to eq("v1.0.0")
+ expect(load_tags({}).first.name).to eq("v1.0.0")
end
it 'sorts by recently_updated' do
- tags_finder = described_class.new(repository, { sort: 'updated_desc' })
-
- result = tags_finder.execute
recently_updated_tag = repository.tags.max do |a, b|
repository.commit(a.dereferenced_target).committed_date <=> repository.commit(b.dereferenced_target).committed_date
end
- expect(result.first.name).to eq(recently_updated_tag.name)
+ params = { sort: 'updated_desc' }
+
+ expect(load_tags(params).first.name).to eq(recently_updated_tag.name)
end
it 'sorts by last_updated' do
- tags_finder = described_class.new(repository, { sort: 'updated_asc' })
-
- result = tags_finder.execute
+ params = { sort: 'updated_asc' }
- expect(result.first.name).to eq('v1.0.0')
+ expect(load_tags(params).first.name).to eq('v1.0.0')
end
end
context 'filter only' do
it 'filters tags by name' do
- tags_finder = described_class.new(repository, { search: '1.0.0' })
-
- result = tags_finder.execute
+ result = load_tags({ search: '1.0.0' })
expect(result.first.name).to eq('v1.0.0')
expect(result.count).to eq(1)
end
it 'does not find any tags with that name' do
- tags_finder = described_class.new(repository, { search: 'hey' })
-
- result = tags_finder.execute
-
- expect(result.count).to eq(0)
+ expect(load_tags({ search: 'hey' }).count).to eq(0)
end
it 'filters tags by name that begins with' do
- params = { search: '^v1.0' }
- tags_finder = described_class.new(repository, params)
-
- result = tags_finder.execute
+ result = load_tags({ search: '^v1.0' })
expect(result.first.name).to eq('v1.0.0')
expect(result.count).to eq(1)
end
it 'filters tags by name that ends with' do
- params = { search: '0.0$' }
- tags_finder = described_class.new(repository, params)
-
- result = tags_finder.execute
+ result = load_tags({ search: '0.0$' })
expect(result.first.name).to eq('v1.0.0')
expect(result.count).to eq(1)
end
it 'filters tags by nonexistent name that begins with' do
- params = { search: '^nope' }
- tags_finder = described_class.new(repository, params)
-
- result = tags_finder.execute
+ result = load_tags({ search: '^nope' })
expect(result.count).to eq(0)
end
it 'filters tags by nonexistent name that ends with' do
- params = { search: 'nope$' }
- tags_finder = described_class.new(repository, params)
-
- result = tags_finder.execute
-
+ result = load_tags({ search: 'nope$' })
expect(result.count).to eq(0)
end
end
@@ -97,7 +80,7 @@ RSpec.describe TagsFinder do
context 'filter and sort' do
let(:tags_to_compare) { %w[v1.0.0 v1.1.0] }
- subject { described_class.new(repository, params).execute.select { |tag| tags_to_compare.include?(tag.name) } }
+ subject { load_tags(params).select { |tag| tags_to_compare.include?(tag.name) } }
context 'when sort by updated_desc' do
let(:params) { { sort: 'updated_desc', search: 'v1' } }
@@ -117,5 +100,17 @@ RSpec.describe TagsFinder do
end
end
end
+
+ context 'when Gitaly is unavailable' do
+ it 'returns empty list of tags' do
+ expect(Gitlab::GitalyClient).to receive(:call).and_raise(GRPC::Unavailable)
+
+ tags_finder = described_class.new(repository, {})
+ tags, error = tags_finder.execute
+
+ expect(error).to be_a(Gitlab::Git::CommandError)
+ expect(tags).to eq([])
+ end
+ end
end
end
diff --git a/spec/fixtures/api/schemas/entities/member.json b/spec/fixtures/api/schemas/entities/member.json
index f06687f9809..dec98123e85 100644
--- a/spec/fixtures/api/schemas/entities/member.json
+++ b/spec/fixtures/api/schemas/entities/member.json
@@ -56,13 +56,15 @@
{ "$ref": "member_user.json" }
]
},
+ "state": { "type": "integer" },
"invite": {
"type": "object",
- "required": ["email", "avatar_url", "can_resend"],
+ "required": ["email", "avatar_url", "can_resend", "user_state"],
"properties": {
"email": { "type": "string" },
"avatar_url": { "type": "string" },
- "can_resend": { "type": "boolean" }
+ "can_resend": { "type": "boolean" },
+ "user_state": { "type": "string" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/external_validation.json b/spec/fixtures/api/schemas/external_validation.json
index 280b77b221a..e95909a2922 100644
--- a/spec/fixtures/api/schemas/external_validation.json
+++ b/spec/fixtures/api/schemas/external_validation.json
@@ -34,7 +34,8 @@
"email": { "type": "string" },
"created_at": { "type": ["string", "null"], "format": "date-time" },
"current_sign_in_ip": { "type": ["string", "null"] },
- "last_sign_in_ip": { "type": ["string", "null"] }
+ "last_sign_in_ip": { "type": ["string", "null"] },
+ "sign_in_count": { "type": "integer" }
}
},
"pipeline": {
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_details.json b/spec/fixtures/api/schemas/graphql/packages/package_details.json
index 9e8bf7c52d0..2824ca64325 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_details.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_details.json
@@ -13,7 +13,8 @@
"pipelines",
"versions",
"metadata",
- "status"
+ "status",
+ "canDestroy"
],
"properties": {
"id": {
@@ -31,6 +32,9 @@
"version": {
"type": ["string", "null"]
},
+ "canDestroy": {
+ "type": ["boolean"]
+ },
"packageType": {
"type": ["string"],
"enum": [
@@ -50,6 +54,7 @@
"type": "object",
"additionalProperties": false,
"properties": {
+ "count": { "type": "integer" },
"pageInfo": { "type": "object" },
"edges": { "type": "array" },
"nodes": { "type": "array" }
@@ -72,6 +77,7 @@
"type": "object",
"additionalProperties": false,
"properties": {
+ "count": { "type": "integer" },
"pageInfo": { "type": "object" },
"edges": { "type": "array" },
"nodes": { "type": "array" }
@@ -91,6 +97,7 @@
"type": "object",
"additionalProperties": false,
"properties": {
+ "count": { "type": "integer" },
"pageInfo": { "type": "object" },
"edges": { "type": "array" },
"nodes": { "type": "array" }
@@ -106,6 +113,7 @@
"properties": {
"pageInfo": { "type": "object" },
"edges": { "type": "array" },
+ "count": { "type": "integer" },
"nodes": {
"type": "array",
"items": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/environment.json b/spec/fixtures/api/schemas/public_api/v4/environment.json
index b90bfe8de55..30104adaf5c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/environment.json
+++ b/spec/fixtures/api/schemas/public_api/v4/environment.json
@@ -5,7 +5,9 @@
"name",
"slug",
"external_url",
- "last_deployment"
+ "state",
+ "created_at",
+ "updated_at"
],
"properties": {
"id": { "type": "integer" },
@@ -19,6 +21,9 @@
]
},
"state": { "type": "string" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
+ "project": { "$ref": "project.json" },
"enable_advanced_logs_querying": { "type": "boolean" },
"logs_api_path": { "type": "string" },
"gitlab_managed_apps_logs_path": { "type": "string" }
diff --git a/spec/fixtures/api/schemas/public_api/v4/service.json b/spec/fixtures/api/schemas/public_api/v4/integration.json
index b6f13d1cfe7..b6f13d1cfe7 100644
--- a/spec/fixtures/api/schemas/public_api/v4/service.json
+++ b/spec/fixtures/api/schemas/public_api/v4/integration.json
diff --git a/spec/fixtures/api/schemas/public_api/v4/integrations.json b/spec/fixtures/api/schemas/public_api/v4/integrations.json
new file mode 100644
index 00000000000..e7ebe7652c9
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/integrations.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "integration.json" }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/services.json b/spec/fixtures/api/schemas/public_api/v4/services.json
deleted file mode 100644
index 78c59ecfa10..00000000000
--- a/spec/fixtures/api/schemas/public_api/v4/services.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "type": "array",
- "items": { "$ref": "service.json" }
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index 1072e63b20b..fd4c2d55124 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -7759,7 +7759,29 @@
"created_at": "2019-06-06T14:01:06.204Z",
"updated_at": "2019-06-06T14:22:37.045Z",
"name": "TestBoardABC",
- "milestone_id": null,
+ "milestone": {
+ "id": 1,
+ "title": "test milestone",
+ "project_id": 8,
+ "description": "test milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "events": [
+ {
+ "id": 487,
+ "target_type": "Milestone",
+ "target_id": 1,
+ "project_id": 46,
+ "created_at": "2016-06-14T15:02:04.418Z",
+ "updated_at": "2016-06-14T15:02:04.418Z",
+ "action": 1,
+ "author_id": 18
+ }
+ ]
+ },
"group_id": null,
"weight": null,
"lists": [
@@ -7772,7 +7794,29 @@
"created_at": "2019-06-06T14:01:06.214Z",
"updated_at": "2019-06-06T14:01:06.214Z",
"user_id": null,
- "milestone_id": null
+ "milestone": {
+ "id": 1,
+ "title": "test milestone",
+ "project_id": 8,
+ "description": "test milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "events": [
+ {
+ "id": 487,
+ "target_type": "Milestone",
+ "target_id": 1,
+ "project_id": 46,
+ "created_at": "2016-06-14T15:02:04.418Z",
+ "updated_at": "2016-06-14T15:02:04.418Z",
+ "action": 1,
+ "author_id": 18
+ }
+ ]
+ }
},
{
"id": 61,
@@ -7783,7 +7827,29 @@
"created_at": "2019-06-06T14:01:43.197Z",
"updated_at": "2019-06-06T14:01:43.197Z",
"user_id": null,
- "milestone_id": null,
+ "milestone": {
+ "id": 1,
+ "title": "test milestone",
+ "project_id": 8,
+ "description": "test milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "events": [
+ {
+ "id": 487,
+ "target_type": "Milestone",
+ "target_id": 1,
+ "project_id": 46,
+ "created_at": "2016-06-14T15:02:04.418Z",
+ "updated_at": "2016-06-14T15:02:04.418Z",
+ "action": 1,
+ "author_id": 18
+ }
+ ]
+ },
"label": {
"id": 20,
"title": "testlabel",
@@ -7807,7 +7873,29 @@
"created_at": "2019-06-06T14:01:06.221Z",
"updated_at": "2019-06-06T14:01:06.221Z",
"user_id": null,
- "milestone_id": null
+ "milestone": {
+ "id": 1,
+ "title": "test milestone",
+ "project_id": 8,
+ "description": "test milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "events": [
+ {
+ "id": 487,
+ "target_type": "Milestone",
+ "target_id": 1,
+ "project_id": 46,
+ "created_at": "2016-06-14T15:02:04.418Z",
+ "updated_at": "2016-06-14T15:02:04.418Z",
+ "action": 1,
+ "author_id": 18
+ }
+ ]
+ }
}
]
}
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree/project.json b/spec/fixtures/lib/gitlab/import_export/complex/tree/project.json
index 203b0264f9e..2c5045ce806 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/tree/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/tree/project.json
@@ -1 +1,10 @@
-{"description":"Nisi et repellendus ut enim quo accusamus vel magnam.","import_type":"gitlab_project","creator_id":123,"visibility_level":10,"archived":false,"deploy_keys":[],"hooks":[]}
+{
+ "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
+ "import_type": "gitlab_project",
+ "creator_id": 123,
+ "visibility_level": 10,
+ "archived": false,
+ "deploy_keys": [],
+ "hooks": [],
+ "shared_runners_enabled": true
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/boards.ndjson b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/boards.ndjson
index ef18af69c9b..a63b583f087 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/boards.ndjson
+++ b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/boards.ndjson
@@ -1 +1 @@
-{"id":29,"project_id":49,"created_at":"2019-06-06T14:01:06.204Z","updated_at":"2019-06-06T14:22:37.045Z","name":"TestBoardABC","milestone_id":null,"group_id":null,"weight":null,"lists":[{"id":59,"board_id":29,"label_id":null,"list_type":"backlog","position":null,"created_at":"2019-06-06T14:01:06.214Z","updated_at":"2019-06-06T14:01:06.214Z","user_id":null,"milestone_id":null},{"id":61,"board_id":29,"label_id":20,"list_type":"label","position":0,"created_at":"2019-06-06T14:01:43.197Z","updated_at":"2019-06-06T14:01:43.197Z","user_id":null,"milestone_id":null,"label":{"id":20,"title":"testlabel","color":"#0033CC","project_id":49,"created_at":"2019-06-06T14:01:19.698Z","updated_at":"2019-06-06T14:01:19.698Z","template":false,"description":null,"group_id":null,"type":"ProjectLabel","priorities":[]}},{"id":60,"board_id":29,"label_id":null,"list_type":"closed","position":null,"created_at":"2019-06-06T14:01:06.221Z","updated_at":"2019-06-06T14:01:06.221Z","user_id":null,"milestone_id":null}]}
+{"id":29,"project_id":49,"created_at":"2019-06-06T14:01:06.204Z","updated_at":"2019-06-06T14:22:37.045Z","name":"TestBoardABC","group_id":null,"weight":null,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]},"lists":[{"id":59,"board_id":29,"label_id":null,"list_type":"backlog","position":null,"created_at":"2019-06-06T14:01:06.214Z","updated_at":"2019-06-06T14:01:06.214Z","user_id":null,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]}},{"id":61,"board_id":29,"label_id":20,"list_type":"label","position":0,"created_at":"2019-06-06T14:01:43.197Z","updated_at":"2019-06-06T14:01:43.197Z","user_id":null,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]},"label":{"id":20,"title":"testlabel","color":"#0033CC","project_id":49,"created_at":"2019-06-06T14:01:19.698Z","updated_at":"2019-06-06T14:01:19.698Z","template":false,"description":null,"group_id":null,"type":"ProjectLabel","priorities":[]}},{"id":60,"board_id":29,"label_id":null,"list_type":"closed","position":null,"created_at":"2019-06-06T14:01:06.221Z","updated_at":"2019-06-06T14:01:06.221Z","user_id":null,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]}}]}
diff --git a/spec/fixtures/lib/gitlab/performance_bar/peek_data.json b/spec/fixtures/lib/gitlab/performance_bar/peek_data.json
index c60e787ddb1..69512c52cbd 100644
--- a/spec/fixtures/lib/gitlab/performance_bar/peek_data.json
+++ b/spec/fixtures/lib/gitlab/performance_bar/peek_data.json
@@ -64,9 +64,90 @@
"warnings": []
},
"gitaly": {
- "duration": "0ms",
- "calls": 0,
- "details": [],
+ "duration": "30ms",
+ "calls": 2,
+ "details": [
+ {
+ "start": 6301.575665897,
+ "feature": "commit_service#get_tree_entries",
+ "duration": 23.709,
+ "request": "{:repository=>\n {:storage_name=>\"nfs-file-cny01\",\n :relative_path=>\n \"@hashed/a6/80/a68072e80f075e89bc74a300101a9e71e8363bdb542182580162553462480a52.git\",\n :git_object_directory=>\"\",\n :git_alternate_object_directories=>[],\n :gl_repository=>\"project-278964\",\n :gl_project_path=>\"gitlab-org/gitlab\"},\n :revision=>\"master\",\n :path=>\".\",\n :sort=>:TREES_FIRST,\n :pagination_params=>{:page_token=>\"\", :limit=>100}}\n",
+ "rpc": "get_tree_entries",
+ "backtrace": [
+ "lib/gitlab/gitaly_client/call.rb:48:in `block in instrument_stream'",
+ "lib/gitlab/gitaly_client/commit_service.rb:128:in `each'",
+ "lib/gitlab/gitaly_client/commit_service.rb:128:in `each'",
+ "lib/gitlab/gitaly_client/commit_service.rb:128:in `flat_map'",
+ "lib/gitlab/gitaly_client/commit_service.rb:128:in `tree_entries'",
+ "lib/gitlab/git/tree.rb:26:in `block in tree_entries'",
+ "lib/gitlab/git/wraps_gitaly_errors.rb:7:in `wrapped_gitaly_errors'",
+ "lib/gitlab/git/tree.rb:25:in `tree_entries'",
+ "lib/gitlab/git/rugged_impl/tree.rb:29:in `tree_entries'",
+ "lib/gitlab/git/tree.rb:21:in `where'",
+ "app/models/tree.rb:17:in `initialize'",
+ "app/models/repository.rb:681:in `new'",
+ "app/models/repository.rb:681:in `tree'",
+ "app/graphql/resolvers/paginated_tree_resolver.rb:35:in `resolve'",
+ "lib/gitlab/graphql/present/field_extension.rb:18:in `resolve'",
+ "lib/gitlab/graphql/extensions/externally_paginated_array_extension.rb:7:in `resolve'",
+ "lib/gitlab/graphql/generic_tracing.rb:40:in `with_labkit_tracing'",
+ "lib/gitlab/graphql/generic_tracing.rb:30:in `platform_trace'",
+ "lib/gitlab/graphql/generic_tracing.rb:40:in `with_labkit_tracing'",
+ "lib/gitlab/graphql/generic_tracing.rb:30:in `platform_trace'",
+ "lib/gitlab/graphql/generic_tracing.rb:40:in `with_labkit_tracing'",
+ "lib/gitlab/graphql/generic_tracing.rb:30:in `platform_trace'",
+ "app/graphql/gitlab_schema.rb:40:in `multiplex'",
+ "app/controllers/graphql_controller.rb:110:in `execute_multiplex'",
+ "app/controllers/graphql_controller.rb:41:in `execute'",
+ "ee/lib/gitlab/ip_address_state.rb:10:in `with'",
+ "ee/app/controllers/ee/application_controller.rb:44:in `set_current_ip_address'",
+ "app/controllers/application_controller.rb:497:in `set_current_admin'",
+ "lib/gitlab/session.rb:11:in `with_session'",
+ "app/controllers/application_controller.rb:488:in `set_session_storage'",
+ "app/controllers/application_controller.rb:482:in `set_locale'",
+ "app/controllers/application_controller.rb:476:in `set_current_context'",
+ "ee/lib/omni_auth/strategies/group_saml.rb:41:in `other_phase'",
+ "lib/gitlab/jira/middleware.rb:19:in `call'"
+ ],
+ "warnings": []
+ }, {
+ "start": 9081.502219885,
+ "feature": "commit_service#find_commit",
+ "duration": 6.678,
+ "request": "{:repository=>\n {:storage_name=>\"nfs-file-cny01\",\n :relative_path=>\n \"@hashed/a6/80/a68072e80f075e89bc74a300101a9e71e8363bdb542182580162553462480a52.git\",\n :git_object_directory=>\"\",\n :git_alternate_object_directories=>[],\n :gl_repository=>\"project-278964\",\n :gl_project_path=>\"gitlab-org/gitlab\"},\n :revision=>\"master\"}\n",
+ "rpc": "find_commit",
+ "backtrace": [
+ "lib/gitlab/gitaly_client/call.rb:30:in `call'",
+ "lib/gitlab/gitaly_client.rb:167:in `call'",
+ "lib/gitlab/gitaly_client/commit_service.rb:520:in `call_find_commit'",
+ "lib/gitlab/gitaly_client/commit_service.rb:354:in `find_commit'",
+ "lib/gitlab/git/commit.rb:74:in `block in find_commit'",
+ "lib/gitlab/git/wraps_gitaly_errors.rb:7:in `wrapped_gitaly_errors'",
+ "lib/gitlab/git/commit.rb:73:in `find_commit'",
+ "lib/gitlab/git/rugged_impl/commit.rb:41:in `find_commit'",
+ "lib/gitlab/git/commit.rb:65:in `find'",
+ "lib/gitlab/git/repository.rb:789:in `commit'",
+ "app/services/branches/diverging_commit_counts_service.rb:21:in `diverging_commit_counts'",
+ "app/services/branches/diverging_commit_counts_service.rb:11:in `call'",
+ "app/controllers/projects/branches_controller.rb:57:in `block (4 levels) in diverging_commit_counts'",
+ "app/controllers/projects/branches_controller.rb:57:in `to_h'",
+ "app/controllers/projects/branches_controller.rb:57:in `block (3 levels) in diverging_commit_counts'",
+ "lib/gitlab/gitaly_client.rb:325:in `allow_n_plus_1_calls'",
+ "app/controllers/projects/branches_controller.rb:56:in `block (2 levels) in diverging_commit_counts'",
+ "app/controllers/projects/branches_controller.rb:51:in `diverging_commit_counts'",
+ "ee/lib/gitlab/ip_address_state.rb:10:in `with'",
+ "ee/app/controllers/ee/application_controller.rb:44:in `set_current_ip_address'",
+ "app/controllers/application_controller.rb:497:in `set_current_admin'",
+ "lib/gitlab/session.rb:11:in `with_session'",
+ "app/controllers/application_controller.rb:488:in `set_session_storage'",
+ "app/controllers/application_controller.rb:482:in `set_locale'",
+ "app/controllers/application_controller.rb:476:in `set_current_context'",
+ "ee/lib/omni_auth/strategies/group_saml.rb:41:in `other_phase'",
+ "lib/gitlab/jira/middleware.rb:19:in `call'"
+ ],
+ "warnings": []
+ }
+ ],
"warnings": []
},
"redis": {
diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb
index 100d17cc16e..2da16408fbc 100644
--- a/spec/fixtures/markdown.md.erb
+++ b/spec/fixtures/markdown.md.erb
@@ -52,6 +52,15 @@ Redcarpet supports this superscript syntax ( x^2 ).
This (C<sub>6</sub>H<sub>12</sub>O<sub>6</sub>) is an example of subscripts in Markdown.
+### Footnotes
+
+This is footnote 1.[^f1]
+
+A footnote with a `w` was failing.[^f2-w]
+
+[^f1]: Footnote 1
+[^f2-w]: Footnote with w
+
### Next step
After the Markdown has been turned into HTML, it gets passed through...
diff --git a/spec/fixtures/ssl/letsencrypt_expired_x3.pem b/spec/fixtures/ssl/letsencrypt_expired_x3.pem
new file mode 100644
index 00000000000..462df721ed7
--- /dev/null
+++ b/spec/fixtures/ssl/letsencrypt_expired_x3.pem
@@ -0,0 +1,98 @@
+-----BEGIN CERTIFICATE-----
+MIIGJDCCBQygAwIBAgISBOSAE/WwQGsTbDJI1vDL9+eKMA0GCSqGSIb3DQEBCwUA
+MDIxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBFbmNyeXB0MQswCQYDVQQD
+EwJSMzAeFw0yMTEwMDEyMjIxMTlaFw0yMTEyMzAyMjIxMThaMBkxFzAVBgNVBAMT
+DndlYmRpb3hpZGUuY29tMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA
+wf/TpE5AjzoLXMFQ+WHle7Dn5rlEe0bPee2JU386cZmMYnGFS5DR251FerSX28U4
+pqk2yS8oefHGi2PS6h8/MWxr+Zy/6hk3WkgwdIK3uPiUcfCdPV/btXDd4YqikEDm
+BoOE4fQlqKQwtLOnhEZu9y8FQoxxoQ+7DndHrDixDoMbpUloxpqUZwziQnH4QHXE
+32rQhq25+NUK/lVFGKOFnmZ2s/yUildKafqulHrLHOhumKMOEivzlFDZbtqP+RKt
+nsrJ3i9O+nSQz6j5dv3Du6eaResrtK7tT1MFDNhcg2cgjNW64VLXQdFXYXE1OYsw
+yAuXUnHNzWFhinyf80qeh2046YR21dlG8voIDQH4fGG5GmWLyu7glsWYVwQQ36VA
+TTxPmAoaqUTl8A7cnlJpAo+BJ00mS/9DwJ7pkgGC7dYOhJzWlI7lPqzEfmJ+o8pj
+CJlLIuqsn0vcCZQlmqCFMxK4asn+puLLnMjRLHIYEJKDNyPGHQEr2e5t4GUYZKaN
+MEpXMwJd97tUamUKWeBPNIND/kOuqexe+okbOTRp34VAsK5oCpawEJckoNkK+sv0
+OrSWFOdfLBHv66p9qsrz8LQXxmN5JUBUe51SBSUo1Ul4/vGYdhuKd/8KcLw9/Al+
+HJN2hAeo3v+2fVey4hgGna7XNe8e3+E+OEQb4zpQDLkCAwEAAaOCAkswggJHMA4G
+A1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYD
+VR0TAQH/BAIwADAdBgNVHQ4EFgQU4PbvqCKatjx6GZMXy7v9GwykZq4wHwYDVR0j
+BBgwFoAUFC6zF7dYVsuuUAlA5h+vnYsUwsYwVQYIKwYBBQUHAQEESTBHMCEGCCsG
+AQUFBzABhhVodHRwOi8vcjMuby5sZW5jci5vcmcwIgYIKwYBBQUHMAKGFmh0dHA6
+Ly9yMy5pLmxlbmNyLm9yZy8wGQYDVR0RBBIwEIIOd2ViZGlveGlkZS5jb20wTAYD
+VR0gBEUwQzAIBgZngQwBAgEwNwYLKwYBBAGC3xMBAQEwKDAmBggrBgEFBQcCARYa
+aHR0cDovL2Nwcy5sZXRzZW5jcnlwdC5vcmcwggEGBgorBgEEAdZ5AgQCBIH3BIH0
+APIAdwBc3EOS/uarRUSxXprUVuYQN/vV+kfcoXOUsl7m9scOygAAAXw+KYGHAAAE
+AwBIMEYCIQCqD6jMtHrGlE02Qh1FzFd4+qYzJTrChHmHBFIncPGQKAIhALeYk0Vf
+/Lw2tX2beVlKN4/h1o8srNJv+06xkr1N6XmiAHcAfT7y+I//iFVoJMLAyp5SiXkr
+xQ54CX8uapdomX4i8NcAAAF8PimBogAABAMASDBGAiEA0h883FFj1dSYKGym9+Wa
+XgJRj526X7YlkhkZ5J1TjioCIQDyjMPrbo5liVi/e5b8gfDw5Fd9WNiTu1W1LKKu
+UpE/qTANBgkqhkiG9w0BAQsFAAOCAQEAcx10nqp1kh2awwoqwf7Jo8Gycqx2bA2O
+E2rveQ/BK9UhwvrNeEpE9SG6liMsYJKxGar0vbbBHvxzuMU00bhGjXFtUT5XuQ8q
+FcU0OdycyZj8fjZmUNsJr82l8HvfJ50jfxFORTgj8Ln5MWVUFlbl0nD+06l28sDc
+V+r/B4394fkoMsKXtiTA4/ZeOD1tHNsdxQ7sNQtEfqCG0wFCYHK3rs7XTZ1K0F3c
+M051JShko1UKP/k5blrendOwVRwLtq+9pavGnJBeqNIVgugTER/IHlp4427WyhdY
+KYjKoytW+XQyWqxU/Mh/O4rxkD8cZaE+FdZpP67VZ185AuZMbn+LcQ==
+-----END CERTIFICATE-----
+
+-----BEGIN CERTIFICATE-----
+MIIFFjCCAv6gAwIBAgIRAJErCErPDBinU/bWLiWnX1owDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMjAwOTA0MDAwMDAw
+WhcNMjUwOTE1MTYwMDAwWjAyMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNTGV0J3Mg
+RW5jcnlwdDELMAkGA1UEAxMCUjMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC7AhUozPaglNMPEuyNVZLD+ILxmaZ6QoinXSaqtSu5xUyxr45r+XXIo9cP
+R5QUVTVXjJ6oojkZ9YI8QqlObvU7wy7bjcCwXPNZOOftz2nwWgsbvsCUJCWH+jdx
+sxPnHKzhm+/b5DtFUkWWqcFTzjTIUu61ru2P3mBw4qVUq7ZtDpelQDRrK9O8Zutm
+NHz6a4uPVymZ+DAXXbpyb/uBxa3Shlg9F8fnCbvxK/eG3MHacV3URuPMrSXBiLxg
+Z3Vms/EY96Jc5lP/Ooi2R6X/ExjqmAl3P51T+c8B5fWmcBcUr2Ok/5mzk53cU6cG
+/kiFHaFpriV1uxPMUgP17VGhi9sVAgMBAAGjggEIMIIBBDAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMBMBIGA1UdEwEB/wQIMAYB
+Af8CAQAwHQYDVR0OBBYEFBQusxe3WFbLrlAJQOYfr52LFMLGMB8GA1UdIwQYMBaA
+FHm0WeZ7tuXkAXOACIjIGlj26ZtuMDIGCCsGAQUFBwEBBCYwJDAiBggrBgEFBQcw
+AoYWaHR0cDovL3gxLmkubGVuY3Iub3JnLzAnBgNVHR8EIDAeMBygGqAYhhZodHRw
+Oi8veDEuYy5sZW5jci5vcmcvMCIGA1UdIAQbMBkwCAYGZ4EMAQIBMA0GCysGAQQB
+gt8TAQEBMA0GCSqGSIb3DQEBCwUAA4ICAQCFyk5HPqP3hUSFvNVneLKYY611TR6W
+PTNlclQtgaDqw+34IL9fzLdwALduO/ZelN7kIJ+m74uyA+eitRY8kc607TkC53wl
+ikfmZW4/RvTZ8M6UK+5UzhK8jCdLuMGYL6KvzXGRSgi3yLgjewQtCPkIVz6D2QQz
+CkcheAmCJ8MqyJu5zlzyZMjAvnnAT45tRAxekrsu94sQ4egdRCnbWSDtY7kh+BIm
+lJNXoB1lBMEKIq4QDUOXoRgffuDghje1WrG9ML+Hbisq/yFOGwXD9RiX8F6sw6W4
+avAuvDszue5L3sz85K+EC4Y/wFVDNvZo4TYXao6Z0f+lQKc0t8DQYzk1OXVu8rp2
+yJMC6alLbBfODALZvYH7n7do1AZls4I9d1P4jnkDrQoxB3UqQ9hVl3LEKQ73xF1O
+yK5GhDDX8oVfGKF5u+decIsH4YaTw7mP3GFxJSqv3+0lUFJoi5Lc5da149p90Ids
+hCExroL1+7mryIkXPeFM5TgO9r0rvZaBFOvV2z0gp35Z0+L4WPlbuEjN/lxPFin+
+HlUjr8gRsI3qfJOQFy/9rKIJR0Y/8Omwt/8oTWgy1mdeHmmjk7j1nYsvC9JSQ6Zv
+MldlTTKB3zhThV1+XWYp6rjd5JW1zbVWEkLNxE7GJThEUG3szgBVGP7pSWTUTsqX
+nLRbwHOoq7hHwg==
+-----END CERTIFICATE-----
+
+-----BEGIN CERTIFICATE-----
+MIIFYDCCBEigAwIBAgIQQAF3ITfU6UK47naqPGQKtzANBgkqhkiG9w0BAQsFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTIxMDEyMDE5MTQwM1oXDTI0MDkzMDE4MTQwM1ow
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCt6CRz9BQ385ueK1coHIe+3LffOJCMbjzmV6B493XC
+ov71am72AE8o295ohmxEk7axY/0UEmu/H9LqMZshftEzPLpI9d1537O4/xLxIZpL
+wYqGcWlKZmZsj348cL+tKSIG8+TA5oCu4kuPt5l+lAOf00eXfJlII1PoOK5PCm+D
+LtFJV4yAdLbaL9A4jXsDcCEbdfIwPPqPrt3aY6vrFk/CjhFLfs8L6P+1dy70sntK
+4EwSJQxwjQMpoOFTJOwT2e4ZvxCzSow/iaNhUd6shweU9GNx7C7ib1uYgeGJXDR5
+bHbvO5BieebbpJovJsXQEOEO3tkQjhb7t/eo98flAgeYjzYIlefiN5YNNnWe+w5y
+sR2bvAP5SQXYgd0FtCrWQemsAXaVCg/Y39W9Eh81LygXbNKYwagJZHduRze6zqxZ
+Xmidf3LWicUGQSk+WT7dJvUkyRGnWqNMQB9GoZm1pzpRboY7nn1ypxIFeFntPlF4
+FQsDj43QLwWyPntKHEtzBRL8xurgUBN8Q5N0s8p0544fAQjQMNRbcTa0B7rBMDBc
+SLeCO5imfWCKoqMpgsy6vYMEG6KDA0Gh1gXxG8K28Kh8hjtGqEgqiNx2mna/H2ql
+PRmP6zjzZN7IKw0KKP/32+IVQtQi0Cdd4Xn+GOdwiK1O5tmLOsbdJ1Fu/7xk9TND
+TwIDAQABo4IBRjCCAUIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+SwYIKwYBBQUHAQEEPzA9MDsGCCsGAQUFBzAChi9odHRwOi8vYXBwcy5pZGVudHJ1
+c3QuY29tL3Jvb3RzL2RzdHJvb3RjYXgzLnA3YzAfBgNVHSMEGDAWgBTEp7Gkeyxx
++tvhS5B1/8QVYIWJEDBUBgNVHSAETTBLMAgGBmeBDAECATA/BgsrBgEEAYLfEwEB
+ATAwMC4GCCsGAQUFBwIBFiJodHRwOi8vY3BzLnJvb3QteDEubGV0c2VuY3J5cHQu
+b3JnMDwGA1UdHwQ1MDMwMaAvoC2GK2h0dHA6Ly9jcmwuaWRlbnRydXN0LmNvbS9E
+U1RST09UQ0FYM0NSTC5jcmwwHQYDVR0OBBYEFHm0WeZ7tuXkAXOACIjIGlj26Ztu
+MA0GCSqGSIb3DQEBCwUAA4IBAQAKcwBslm7/DlLQrt2M51oGrS+o44+/yQoDFVDC
+5WxCu2+b9LRPwkSICHXM6webFGJueN7sJ7o5XPWioW5WlHAQU7G75K/QosMrAdSW
+9MUgNTP52GE24HGNtLi1qoJFlcDyqSMo59ahy2cI2qBDLKobkx/J3vWraV0T9VuG
+WCLKTVXkcGdtwlfFRjlBz4pYg1htmf5X6DYO8A4jqv2Il9DjXA6USbW1FzXSLr9O
+he8Y4IWS6wY7bCkjCWDcRQJMEhg76fsO3txE+FiYruq9RUWhiF1myv4Q6W+CyBFC
+Dfvp7OOGAN6dEOM4+qR9sdjoSYKEBpsr6GtPAQw4dy753ec5
+-----END CERTIFICATE-----
diff --git a/spec/frontend/.eslintrc.yml b/spec/frontend/.eslintrc.yml
index 145e6c8961a..e12c4e5e820 100644
--- a/spec/frontend/.eslintrc.yml
+++ b/spec/frontend/.eslintrc.yml
@@ -12,7 +12,6 @@ settings:
jest:
jestConfigFile: 'jest.config.js'
globals:
- getJSONFixture: false
loadFixtures: false
setFixtures: false
rules:
@@ -26,4 +25,9 @@ rules:
- off
"@gitlab/no-global-event-off":
- off
-
+ import/no-unresolved:
+ - error
+ # The test fixtures and graphql schema are dynamically generated in CI
+ # during the `frontend-fixtures` and `graphql-schema-dump` jobs.
+ # They may not be present during linting.
+ - ignore: ['^test_fixtures\/', 'tmp/tests/graphql/gitlab_schema.graphql']
diff --git a/spec/frontend/__helpers__/fixtures.js b/spec/frontend/__helpers__/fixtures.js
index 4b86724df93..d8054d32fae 100644
--- a/spec/frontend/__helpers__/fixtures.js
+++ b/spec/frontend/__helpers__/fixtures.js
@@ -20,6 +20,11 @@ Did you run bin/rake frontend:fixtures?`,
return fs.readFileSync(absolutePath, 'utf8');
}
+/**
+ * @deprecated Use `import` to load a JSON fixture instead.
+ * See https://docs.gitlab.com/ee/development/testing_guide/frontend_testing.html#use-fixtures,
+ * https://gitlab.com/gitlab-org/gitlab/-/issues/339346.
+ */
export const getJSONFixture = (relativePath) => JSON.parse(getFixture(relativePath));
export const resetHTMLFixture = () => {
diff --git a/spec/frontend/__helpers__/flush_promises.js b/spec/frontend/__helpers__/flush_promises.js
new file mode 100644
index 00000000000..5287a060753
--- /dev/null
+++ b/spec/frontend/__helpers__/flush_promises.js
@@ -0,0 +1,3 @@
+export default function flushPromises() {
+ return new Promise(setImmediate);
+}
diff --git a/spec/frontend/access_tokens/components/projects_token_selector_spec.js b/spec/frontend/access_tokens/components/projects_token_selector_spec.js
index 09f52fe9a5f..40aaf16d41f 100644
--- a/spec/frontend/access_tokens/components/projects_token_selector_spec.js
+++ b/spec/frontend/access_tokens/components/projects_token_selector_spec.js
@@ -11,7 +11,7 @@ import produce from 'immer';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { getJSONFixture } from 'helpers/fixtures';
+import getProjectsQueryResponse from 'test_fixtures/graphql/projects/access_tokens/get_projects.query.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -20,9 +20,6 @@ import getProjectsQuery from '~/access_tokens/graphql/queries/get_projects.query
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
describe('ProjectsTokenSelector', () => {
- const getProjectsQueryResponse = getJSONFixture(
- 'graphql/projects/access_tokens/get_projects.query.graphql.json',
- );
const getProjectsQueryResponsePage2 = produce(
getProjectsQueryResponse,
(getProjectsQueryResponseDraft) => {
diff --git a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
index 2832de98769..e7a20ae114c 100644
--- a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
+++ b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
@@ -1,12 +1,12 @@
import { GlModal, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json';
import AddReviewItemsModal from '~/add_context_commits_modal/components/add_context_commits_modal_wrapper.vue';
import * as actions from '~/add_context_commits_modal/store/actions';
import mutations from '~/add_context_commits_modal/store/mutations';
import defaultState from '~/add_context_commits_modal/store/state';
-import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -18,7 +18,7 @@ describe('AddContextCommitsModal', () => {
const removeContextCommits = jest.fn();
const resetModalState = jest.fn();
const searchCommits = jest.fn();
- const { commit } = getDiffWithCommit();
+ const { commit } = getDiffWithCommit;
const createWrapper = (props = {}) => {
store = new Vuex.Store({
diff --git a/spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js b/spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js
index 75f1cc41e23..85ecb4313c2 100644
--- a/spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js
+++ b/spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js
@@ -1,12 +1,12 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json';
import ReviewTabContainer from '~/add_context_commits_modal/components/review_tab_container.vue';
import CommitItem from '~/diffs/components/commit_item.vue';
-import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
describe('ReviewTabContainer', () => {
let wrapper;
- const { commit } = getDiffWithCommit();
+ const { commit } = getDiffWithCommit;
const createWrapper = (props = {}) => {
wrapper = shallowMount(ReviewTabContainer, {
diff --git a/spec/frontend/add_context_commits_modal/store/mutations_spec.js b/spec/frontend/add_context_commits_modal/store/mutations_spec.js
index 2331a4af1bc..7517c1c391e 100644
--- a/spec/frontend/add_context_commits_modal/store/mutations_spec.js
+++ b/spec/frontend/add_context_commits_modal/store/mutations_spec.js
@@ -1,10 +1,10 @@
+import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json';
import { TEST_HOST } from 'helpers/test_constants';
import * as types from '~/add_context_commits_modal/store/mutation_types';
import mutations from '~/add_context_commits_modal/store/mutations';
-import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
describe('AddContextCommitsModalStoreMutations', () => {
- const { commit } = getDiffWithCommit();
+ const { commit } = getDiffWithCommit;
describe('SET_BASE_CONFIG', () => {
it('should set contextCommitsPath, mergeRequestIid and projectId', () => {
const state = {};
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
index 4bb22feb913..5b4f954b672 100644
--- a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
+++ b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
@@ -35,9 +35,6 @@ describe('Signup Form', () => {
const findDenyListRawInputGroup = () => wrapper.findByTestId('domain-denylist-raw-input-group');
const findDenyListFileInputGroup = () => wrapper.findByTestId('domain-denylist-file-input-group');
-
- const findRequireAdminApprovalCheckbox = () =>
- wrapper.findByTestId('require-admin-approval-checkbox');
const findUserCapInput = () => wrapper.findByTestId('user-cap-input');
const findModal = () => wrapper.find(GlModal);
@@ -191,125 +188,6 @@ describe('Signup Form', () => {
});
describe('form submit button confirmation modal for side-effect of adding possibly unwanted new users', () => {
- it.each`
- requireAdminApprovalAction | userCapAction | pendingUserCount | buttonEffect
- ${'unchanged from true'} | ${'unchanged'} | ${0} | ${'submits form'}
- ${'unchanged from false'} | ${'unchanged'} | ${0} | ${'submits form'}
- ${'toggled off'} | ${'unchanged'} | ${1} | ${'shows confirmation modal'}
- ${'toggled off'} | ${'unchanged'} | ${0} | ${'submits form'}
- ${'toggled on'} | ${'unchanged'} | ${0} | ${'submits form'}
- ${'unchanged from false'} | ${'increased'} | ${1} | ${'shows confirmation modal'}
- ${'unchanged from true'} | ${'increased'} | ${0} | ${'submits form'}
- ${'toggled off'} | ${'increased'} | ${1} | ${'shows confirmation modal'}
- ${'toggled off'} | ${'increased'} | ${0} | ${'submits form'}
- ${'toggled on'} | ${'increased'} | ${1} | ${'shows confirmation modal'}
- ${'toggled on'} | ${'increased'} | ${0} | ${'submits form'}
- ${'toggled on'} | ${'decreased'} | ${0} | ${'submits form'}
- ${'toggled on'} | ${'decreased'} | ${1} | ${'submits form'}
- ${'unchanged from false'} | ${'changed from limited to unlimited'} | ${1} | ${'shows confirmation modal'}
- ${'unchanged from false'} | ${'changed from limited to unlimited'} | ${0} | ${'submits form'}
- ${'unchanged from false'} | ${'changed from unlimited to limited'} | ${0} | ${'submits form'}
- ${'unchanged from false'} | ${'unchanged from unlimited'} | ${0} | ${'submits form'}
- `(
- '$buttonEffect if require admin approval for new sign-ups is $requireAdminApprovalAction and the user cap is $userCapAction and pending user count is $pendingUserCount',
- async ({ requireAdminApprovalAction, userCapAction, pendingUserCount, buttonEffect }) => {
- let isModalDisplayed;
-
- switch (buttonEffect) {
- case 'shows confirmation modal':
- isModalDisplayed = true;
- break;
- case 'submits form':
- isModalDisplayed = false;
- break;
- default:
- isModalDisplayed = false;
- break;
- }
-
- const isFormSubmittedWhenClickingFormSubmitButton = !isModalDisplayed;
-
- const injectedProps = {
- pendingUserCount,
- };
-
- const USER_CAP_DEFAULT = 5;
-
- switch (userCapAction) {
- case 'changed from unlimited to limited':
- injectedProps.newUserSignupsCap = '';
- break;
- case 'unchanged from unlimited':
- injectedProps.newUserSignupsCap = '';
- break;
- default:
- injectedProps.newUserSignupsCap = USER_CAP_DEFAULT;
- break;
- }
-
- switch (requireAdminApprovalAction) {
- case 'unchanged from true':
- injectedProps.requireAdminApprovalAfterUserSignup = true;
- break;
- case 'unchanged from false':
- injectedProps.requireAdminApprovalAfterUserSignup = false;
- break;
- case 'toggled off':
- injectedProps.requireAdminApprovalAfterUserSignup = true;
- break;
- case 'toggled on':
- injectedProps.requireAdminApprovalAfterUserSignup = false;
- break;
- default:
- injectedProps.requireAdminApprovalAfterUserSignup = false;
- break;
- }
-
- formSubmitSpy = jest.spyOn(HTMLFormElement.prototype, 'submit').mockImplementation();
-
- await mountComponent({
- injectedProps,
- stubs: { GlButton, GlModal: stubComponent(GlModal) },
- });
-
- findModal().vm.show = jest.fn();
-
- if (
- requireAdminApprovalAction === 'toggled off' ||
- requireAdminApprovalAction === 'toggled on'
- ) {
- await findRequireAdminApprovalCheckbox().vm.$emit('input', false);
- }
-
- switch (userCapAction) {
- case 'increased':
- await findUserCapInput().vm.$emit('input', USER_CAP_DEFAULT + 1);
- break;
- case 'decreased':
- await findUserCapInput().vm.$emit('input', USER_CAP_DEFAULT - 1);
- break;
- case 'changed from limited to unlimited':
- await findUserCapInput().vm.$emit('input', '');
- break;
- case 'changed from unlimited to limited':
- await findUserCapInput().vm.$emit('input', USER_CAP_DEFAULT);
- break;
- default:
- break;
- }
-
- await findFormSubmitButton().trigger('click');
-
- if (isFormSubmittedWhenClickingFormSubmitButton) {
- expect(formSubmitSpy).toHaveBeenCalled();
- expect(findModal().vm.show).not.toHaveBeenCalled();
- } else {
- expect(formSubmitSpy).not.toHaveBeenCalled();
- expect(findModal().vm.show).toHaveBeenCalled();
- }
- },
- );
-
describe('modal actions', () => {
beforeEach(async () => {
const INITIAL_USER_CAP = 5;
diff --git a/spec/frontend/admin/users/components/actions/actions_spec.js b/spec/frontend/admin/users/components/actions/actions_spec.js
index fd05b08a3fb..67dcf5c6149 100644
--- a/spec/frontend/admin/users/components/actions/actions_spec.js
+++ b/spec/frontend/admin/users/components/actions/actions_spec.js
@@ -5,6 +5,7 @@ import { nextTick } from 'vue';
import Actions from '~/admin/users/components/actions';
import SharedDeleteAction from '~/admin/users/components/actions/shared/shared_delete_action.vue';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
+import { OBSTACLE_TYPES } from '~/vue_shared/components/user_deletion_obstacles/constants';
import { CONFIRMATION_ACTIONS, DELETE_ACTIONS } from '../../constants';
import { paths } from '../../mock_data';
@@ -46,7 +47,10 @@ describe('Action components', () => {
});
describe('DELETE_ACTION_COMPONENTS', () => {
- const oncallSchedules = [{ name: 'schedule1' }, { name: 'schedule2' }];
+ const userDeletionObstacles = [
+ { name: 'schedule1', type: OBSTACLE_TYPES.oncallSchedules },
+ { name: 'policy1', type: OBSTACLE_TYPES.escalationPolicies },
+ ];
it.each(DELETE_ACTIONS.map((action) => [action, paths[action]]))(
'renders a dropdown item for "%s"',
@@ -56,7 +60,7 @@ describe('Action components', () => {
props: {
username: 'John Doe',
paths,
- oncallSchedules,
+ userDeletionObstacles,
},
stubs: { SharedDeleteAction },
});
@@ -69,8 +73,8 @@ describe('Action components', () => {
expect(sharedAction.attributes('data-delete-user-url')).toBe(expectedPath);
expect(sharedAction.attributes('data-gl-modal-action')).toBe(kebabCase(action));
expect(sharedAction.attributes('data-username')).toBe('John Doe');
- expect(sharedAction.attributes('data-oncall-schedules')).toBe(
- JSON.stringify(oncallSchedules),
+ expect(sharedAction.attributes('data-user-deletion-obstacles')).toBe(
+ JSON.stringify(userDeletionObstacles),
);
expect(findDropdownItem().exists()).toBe(true);
},
diff --git a/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
index 5e367891337..472158a9b10 100644
--- a/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
@@ -8,8 +8,8 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
/>
</p>
- <oncall-schedules-list-stub
- schedules="schedule1,schedule2"
+ <user-deletion-obstacles-list-stub
+ obstacles="schedule1,policy1"
username="username"
/>
diff --git a/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
index fee74764645..82307c9e3b3 100644
--- a/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
+++ b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
@@ -1,7 +1,7 @@
import { GlButton, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import DeleteUserModal from '~/admin/users/components/modals/delete_user_modal.vue';
-import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
+import UserDeletionObstaclesList from '~/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list.vue';
import ModalStub from './stubs/modal_stub';
const TEST_DELETE_USER_URL = 'delete-url';
@@ -25,7 +25,7 @@ describe('User Operation confirmation modal', () => {
const getUsername = () => findUsernameInput().attributes('value');
const getMethodParam = () => new FormData(findForm().element).get('_method');
const getFormAction = () => findForm().attributes('action');
- const findOnCallSchedulesList = () => wrapper.findComponent(OncallSchedulesList);
+ const findUserDeletionObstaclesList = () => wrapper.findComponent(UserDeletionObstaclesList);
const setUsername = (username) => {
findUsernameInput().vm.$emit('input', username);
@@ -33,7 +33,7 @@ describe('User Operation confirmation modal', () => {
const username = 'username';
const badUsername = 'bad_username';
- const oncallSchedules = '["schedule1", "schedule2"]';
+ const userDeletionObstacles = '["schedule1", "policy1"]';
const createComponent = (props = {}) => {
wrapper = shallowMount(DeleteUserModal, {
@@ -46,7 +46,7 @@ describe('User Operation confirmation modal', () => {
deleteUserUrl: TEST_DELETE_USER_URL,
blockUserUrl: TEST_BLOCK_USER_URL,
csrfToken: TEST_CSRF,
- oncallSchedules,
+ userDeletionObstacles,
...props,
},
stubs: {
@@ -150,18 +150,18 @@ describe('User Operation confirmation modal', () => {
});
});
- describe('Related oncall-schedules list', () => {
- it('does NOT render the list when user has no related schedules', () => {
- createComponent({ oncallSchedules: '[]' });
- expect(findOnCallSchedulesList().exists()).toBe(false);
+ describe('Related user-deletion-obstacles list', () => {
+ it('does NOT render the list when user has no related obstacles', () => {
+ createComponent({ userDeletionObstacles: '[]' });
+ expect(findUserDeletionObstaclesList().exists()).toBe(false);
});
- it('renders the list when user has related schedules', () => {
+ it('renders the list when user has related obstalces', () => {
createComponent();
- const schedules = findOnCallSchedulesList();
- expect(schedules.exists()).toBe(true);
- expect(schedules.props('schedules')).toEqual(JSON.parse(oncallSchedules));
+ const obstacles = findUserDeletionObstaclesList();
+ expect(obstacles.exists()).toBe(true);
+ expect(obstacles.props('obstacles')).toEqual(JSON.parse(userDeletionObstacles));
});
});
});
diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
index ddb188edb10..f4d3fd97fd8 100644
--- a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -52,13 +52,13 @@ exports[`Alert integration settings form default state should match the default
block="true"
category="primary"
clearalltext="Clear all"
+ clearalltextclass="gl-px-5"
data-qa-selector="incident_templates_dropdown"
headertext=""
hideheaderborder="true"
highlighteditemstitle="Selected"
highlighteditemstitleclass="gl-px-5"
id="alert-integration-settings-issue-template"
- showhighlighteditemstitle="true"
size="medium"
text="selecte_tmpl"
variant="default"
diff --git a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
index 2537b8fb816..5d681c7da4f 100644
--- a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
+++ b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
@@ -1,6 +1,8 @@
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import { TEST_HOST } from 'helpers/test_constants';
+import waitForPromises from 'helpers/wait_for_promises';
import ProjectsDropdownFilter from '~/analytics/shared/components/projects_dropdown_filter.vue';
import getProjects from '~/analytics/shared/graphql/projects.query.graphql';
@@ -25,6 +27,17 @@ const projects = [
},
];
+const MockGlDropdown = stubComponent(GlDropdown, {
+ template: `
+ <div>
+ <div data-testid="vsa-highlighted-items">
+ <slot name="highlighted-items"></slot>
+ </div>
+ <div data-testid="vsa-default-items"><slot></slot></div>
+ </div>
+ `,
+});
+
const defaultMocks = {
$apollo: {
query: jest.fn().mockResolvedValue({
@@ -38,22 +51,33 @@ let spyQuery;
describe('ProjectsDropdownFilter component', () => {
let wrapper;
- const createComponent = (props = {}) => {
+ const createComponent = (props = {}, stubs = {}) => {
spyQuery = defaultMocks.$apollo.query;
- wrapper = mount(ProjectsDropdownFilter, {
+ wrapper = mountExtended(ProjectsDropdownFilter, {
mocks: { ...defaultMocks },
propsData: {
groupId: 1,
groupNamespace: 'gitlab-org',
...props,
},
+ stubs,
});
};
+ const createWithMockDropdown = (props) => {
+ createComponent(props, { GlDropdown: MockGlDropdown });
+ return waitForPromises();
+ };
+
afterEach(() => {
wrapper.destroy();
});
+ const findHighlightedItems = () => wrapper.findByTestId('vsa-highlighted-items');
+ const findUnhighlightedItems = () => wrapper.findByTestId('vsa-default-items');
+ const findHighlightedItemsTitle = () => wrapper.findByText('Selected');
+ const findClearAllButton = () => wrapper.findByText('Clear all');
+
const findDropdown = () => wrapper.find(GlDropdown);
const findDropdownItems = () =>
@@ -75,8 +99,19 @@ describe('ProjectsDropdownFilter component', () => {
const findDropdownFullPathAtIndex = (index) =>
findDropdownAtIndex(index).find('[data-testid="project-full-path"]');
- const selectDropdownItemAtIndex = (index) =>
+ const selectDropdownItemAtIndex = (index) => {
findDropdownAtIndex(index).find('button').trigger('click');
+ return wrapper.vm.$nextTick();
+ };
+
+ // NOTE: Selected items are now visually separated from unselected items
+ const findSelectedDropdownItems = () => findHighlightedItems().findAll(GlDropdownItem);
+
+ const findSelectedDropdownAtIndex = (index) => findSelectedDropdownItems().at(index);
+ const findSelectedButtonIdentIconAtIndex = (index) =>
+ findSelectedDropdownAtIndex(index).find('div.gl-avatar-identicon');
+ const findSelectedButtonAvatarItemAtIndex = (index) =>
+ findSelectedDropdownAtIndex(index).find('img.gl-avatar');
const selectedIds = () => wrapper.vm.selectedProjects.map(({ id }) => id);
@@ -109,7 +144,80 @@ describe('ProjectsDropdownFilter component', () => {
});
});
- describe('when passed a an array of defaultProject as prop', () => {
+ describe('highlighted items', () => {
+ const blockDefaultProps = { multiSelect: true };
+ beforeEach(() => {
+ createComponent(blockDefaultProps);
+ });
+
+ describe('with no project selected', () => {
+ it('does not render the highlighted items', async () => {
+ await createWithMockDropdown(blockDefaultProps);
+ expect(findSelectedDropdownItems().length).toBe(0);
+ });
+
+ it('does not render the highlighted items title', () => {
+ expect(findHighlightedItemsTitle().exists()).toBe(false);
+ });
+
+ it('does not render the clear all button', () => {
+ expect(findClearAllButton().exists()).toBe(false);
+ });
+ });
+
+ describe('with a selected project', () => {
+ beforeEach(async () => {
+ await selectDropdownItemAtIndex(0);
+ });
+
+ it('renders the highlighted items', async () => {
+ await createWithMockDropdown(blockDefaultProps);
+ await selectDropdownItemAtIndex(0);
+
+ expect(findSelectedDropdownItems().length).toBe(1);
+ });
+
+ it('renders the highlighted items title', () => {
+ expect(findHighlightedItemsTitle().exists()).toBe(true);
+ });
+
+ it('renders the clear all button', () => {
+ expect(findClearAllButton().exists()).toBe(true);
+ });
+
+ it('clears all selected items when the clear all button is clicked', async () => {
+ await selectDropdownItemAtIndex(1);
+
+ expect(wrapper.text()).toContain('2 projects selected');
+
+ findClearAllButton().trigger('click');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.text()).not.toContain('2 projects selected');
+ expect(wrapper.text()).toContain('Select projects');
+ });
+ });
+ });
+
+ describe('with a selected project and search term', () => {
+ beforeEach(async () => {
+ await createWithMockDropdown({ multiSelect: true });
+
+ selectDropdownItemAtIndex(0);
+ wrapper.setData({ searchTerm: 'this is a very long search string' });
+ });
+
+ it('renders the highlighted items', async () => {
+ expect(findUnhighlightedItems().findAll('li').length).toBe(1);
+ });
+
+ it('hides the unhighlighted items that do not match the string', async () => {
+ expect(findUnhighlightedItems().findAll('li').length).toBe(1);
+ expect(findUnhighlightedItems().text()).toContain('No matching results');
+ });
+ });
+
+ describe('when passed an array of defaultProject as prop', () => {
beforeEach(() => {
createComponent({
defaultProjects: [projects[0]],
@@ -130,8 +238,9 @@ describe('ProjectsDropdownFilter component', () => {
});
describe('when multiSelect is false', () => {
+ const blockDefaultProps = { multiSelect: false };
beforeEach(() => {
- createComponent({ multiSelect: false });
+ createComponent(blockDefaultProps);
});
describe('displays the correct information', () => {
@@ -183,21 +292,19 @@ describe('ProjectsDropdownFilter component', () => {
});
it('renders an avatar in the dropdown button when the project has an avatarUrl', async () => {
- selectDropdownItemAtIndex(0);
+ await createWithMockDropdown(blockDefaultProps);
+ await selectDropdownItemAtIndex(0);
- await wrapper.vm.$nextTick().then(() => {
- expect(findDropdownButtonAvatarAtIndex(0).exists()).toBe(true);
- expect(findDropdownButtonIdentIconAtIndex(0).exists()).toBe(false);
- });
+ expect(findSelectedButtonAvatarItemAtIndex(0).exists()).toBe(true);
+ expect(findSelectedButtonIdentIconAtIndex(0).exists()).toBe(false);
});
it("renders an identicon in the dropdown button when the project doesn't have an avatarUrl", async () => {
- selectDropdownItemAtIndex(1);
+ await createWithMockDropdown(blockDefaultProps);
+ await selectDropdownItemAtIndex(1);
- await wrapper.vm.$nextTick().then(() => {
- expect(findDropdownButtonAvatarAtIndex(1).exists()).toBe(false);
- expect(findDropdownButtonIdentIconAtIndex(1).exists()).toBe(true);
- });
+ expect(findSelectedButtonAvatarItemAtIndex(0).exists()).toBe(false);
+ expect(findSelectedButtonIdentIconAtIndex(0).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/analytics/shared/utils_spec.js b/spec/frontend/analytics/shared/utils_spec.js
index e3293f2d8bd..0513ccb2890 100644
--- a/spec/frontend/analytics/shared/utils_spec.js
+++ b/spec/frontend/analytics/shared/utils_spec.js
@@ -1,4 +1,10 @@
-import { filterBySearchTerm } from '~/analytics/shared/utils';
+import {
+ filterBySearchTerm,
+ extractFilterQueryParameters,
+ extractPaginationQueryParameters,
+ getDataZoomOption,
+} from '~/analytics/shared/utils';
+import { objectToQuery } from '~/lib/utils/url_utility';
describe('filterBySearchTerm', () => {
const data = [
@@ -22,3 +28,151 @@ describe('filterBySearchTerm', () => {
expect(filterBySearchTerm(data, 'ne', 'title')).toEqual([data[0]]);
});
});
+
+describe('extractFilterQueryParameters', () => {
+ const selectedAuthor = 'Author 1';
+ const selectedMilestone = 'Milestone 1.0';
+ const selectedSourceBranch = 'main';
+ const selectedTargetBranch = 'feature-1';
+ const selectedAssigneeList = ['Alice', 'Bob'];
+ const selectedLabelList = ['Label 1', 'Label 2'];
+
+ const queryParamsString = objectToQuery({
+ source_branch_name: selectedSourceBranch,
+ target_branch_name: selectedTargetBranch,
+ author_username: selectedAuthor,
+ milestone_title: selectedMilestone,
+ assignee_username: selectedAssigneeList,
+ label_name: selectedLabelList,
+ });
+
+ it('extracts the correct filter parameters from a url', () => {
+ const result = extractFilterQueryParameters(queryParamsString);
+ const operator = '=';
+ const expectedFilters = {
+ selectedAssigneeList: { operator, value: selectedAssigneeList.join(',') },
+ selectedLabelList: { operator, value: selectedLabelList.join(',') },
+ selectedAuthor: { operator, value: selectedAuthor },
+ selectedMilestone: { operator, value: selectedMilestone },
+ selectedSourceBranch: { operator, value: selectedSourceBranch },
+ selectedTargetBranch: { operator, value: selectedTargetBranch },
+ };
+ expect(result).toMatchObject(expectedFilters);
+ });
+
+ it('returns null for missing parameters', () => {
+ const result = extractFilterQueryParameters('');
+ const expectedFilters = {
+ selectedAuthor: null,
+ selectedMilestone: null,
+ selectedSourceBranch: null,
+ selectedTargetBranch: null,
+ };
+ expect(result).toMatchObject(expectedFilters);
+ });
+
+ it('only returns the parameters we expect', () => {
+ const result = extractFilterQueryParameters('foo="one"&bar="two"');
+ const resultKeys = Object.keys(result);
+ ['foo', 'bar'].forEach((key) => {
+ expect(resultKeys).not.toContain(key);
+ });
+
+ [
+ 'selectedAuthor',
+ 'selectedMilestone',
+ 'selectedSourceBranch',
+ 'selectedTargetBranch',
+ 'selectedAssigneeList',
+ 'selectedLabelList',
+ ].forEach((key) => {
+ expect(resultKeys).toContain(key);
+ });
+ });
+
+ it('returns an empty array for missing list parameters', () => {
+ const result = extractFilterQueryParameters('');
+ const expectedFilters = { selectedAssigneeList: [], selectedLabelList: [] };
+ expect(result).toMatchObject(expectedFilters);
+ });
+});
+
+describe('extractPaginationQueryParameters', () => {
+ const sort = 'title';
+ const direction = 'asc';
+ const page = '1';
+ const queryParamsString = objectToQuery({ sort, direction, page });
+
+ it('extracts the correct filter parameters from a url', () => {
+ const result = extractPaginationQueryParameters(queryParamsString);
+ const expectedFilters = { sort, page, direction };
+ expect(result).toMatchObject(expectedFilters);
+ });
+
+ it('returns null for missing parameters', () => {
+ const result = extractPaginationQueryParameters('');
+ const expectedFilters = { sort: null, direction: null, page: null };
+ expect(result).toMatchObject(expectedFilters);
+ });
+
+ it('only returns the parameters we expect', () => {
+ const result = extractPaginationQueryParameters('foo="one"&bar="two"&qux="three"');
+ const resultKeys = Object.keys(result);
+ ['foo', 'bar', 'qux'].forEach((key) => {
+ expect(resultKeys).not.toContain(key);
+ });
+
+ ['sort', 'page', 'direction'].forEach((key) => {
+ expect(resultKeys).toContain(key);
+ });
+ });
+});
+
+describe('getDataZoomOption', () => {
+ it('returns an empty object when totalItems <= maxItemsPerPage', () => {
+ const totalItems = 10;
+ const maxItemsPerPage = 20;
+
+ expect(getDataZoomOption({ totalItems, maxItemsPerPage })).toEqual({});
+ });
+
+ describe('when totalItems > maxItemsPerPage', () => {
+ const totalItems = 30;
+ const maxItemsPerPage = 20;
+
+ it('properly computes the end interval for the default datazoom config', () => {
+ const expected = [
+ {
+ type: 'slider',
+ bottom: 10,
+ start: 0,
+ end: 67,
+ },
+ ];
+
+ expect(getDataZoomOption({ totalItems, maxItemsPerPage })).toEqual(expected);
+ });
+
+ it('properly computes the end interval for a custom datazoom config', () => {
+ const dataZoom = [
+ { type: 'slider', bottom: 0, start: 0 },
+ { type: 'inside', start: 0 },
+ ];
+ const expected = [
+ {
+ type: 'slider',
+ bottom: 0,
+ start: 0,
+ end: 67,
+ },
+ {
+ type: 'inside',
+ start: 0,
+ end: 67,
+ },
+ ];
+
+ expect(getDataZoomOption({ totalItems, maxItemsPerPage, dataZoom })).toEqual(expected);
+ });
+ });
+});
diff --git a/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js b/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js
index 61c6a1dd167..870375318e3 100644
--- a/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js
+++ b/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js
@@ -1,5 +1,5 @@
-import { GlForm } from '@gitlab/ui';
import { within } from '@testing-library/dom';
+import { GlForm } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ManageTwoFactorForm, {
diff --git a/spec/frontend/blob/file_template_mediator_spec.js b/spec/frontend/blob/file_template_mediator_spec.js
new file mode 100644
index 00000000000..44e12deb564
--- /dev/null
+++ b/spec/frontend/blob/file_template_mediator_spec.js
@@ -0,0 +1,53 @@
+import TemplateSelectorMediator from '~/blob/file_template_mediator';
+
+describe('Template Selector Mediator', () => {
+ let mediator;
+
+ describe('setFilename', () => {
+ let input;
+ const newFileName = 'foo';
+ const editor = jest.fn().mockImplementationOnce(() => ({
+ getValue: jest.fn().mockImplementation(() => {}),
+ }))();
+
+ beforeEach(() => {
+ setFixtures('<div class="file-editor"><input class="js-file-path-name-input" /></div>');
+ input = document.querySelector('.js-file-path-name-input');
+ mediator = new TemplateSelectorMediator({
+ editor,
+ currentAction: jest.fn(),
+ projectId: jest.fn(),
+ });
+ });
+
+ it('fills out the input field', () => {
+ expect(input.value).toBe('');
+ mediator.setFilename(newFileName);
+ expect(input.value).toBe(newFileName);
+ });
+
+ it.each`
+ name | newName | shouldDispatch
+ ${newFileName} | ${newFileName} | ${false}
+ ${newFileName} | ${''} | ${true}
+ ${newFileName} | ${undefined} | ${false}
+ ${''} | ${''} | ${false}
+ ${''} | ${newFileName} | ${true}
+ ${''} | ${undefined} | ${false}
+ `(
+ 'correctly reacts to the name change when current name is $name and newName is $newName',
+ ({ name, newName, shouldDispatch }) => {
+ input.value = name;
+ const eventHandler = jest.fn();
+ input.addEventListener('change', eventHandler);
+
+ mediator.setFilename(newName);
+ if (shouldDispatch) {
+ expect(eventHandler).toHaveBeenCalledTimes(1);
+ } else {
+ expect(eventHandler).not.toHaveBeenCalled();
+ }
+ },
+ );
+ });
+});
diff --git a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
new file mode 100644
index 00000000000..c35f2463f69
--- /dev/null
+++ b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
@@ -0,0 +1,59 @@
+import { GlButton } from '@gitlab/ui';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import BoardAddNewColumnTrigger from '~/boards/components/board_add_new_column_trigger.vue';
+import { createStore } from '~/boards/stores';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+Vue.use(Vuex);
+
+describe('BoardAddNewColumnTrigger', () => {
+ let wrapper;
+
+ const findBoardsCreateList = () => wrapper.findByTestId('boards-create-list');
+ const findTooltipText = () => getBinding(findBoardsCreateList().element, 'gl-tooltip');
+
+ const mountComponent = () => {
+ wrapper = mountExtended(BoardAddNewColumnTrigger, {
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ store: createStore(),
+ });
+ };
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when button is active', () => {
+ it('does not show the tooltip', () => {
+ const tooltip = findTooltipText();
+
+ expect(tooltip.value).toBe('');
+ });
+
+ it('renders an enabled button', () => {
+ const button = wrapper.find(GlButton);
+
+ expect(button.props('disabled')).toBe(false);
+ });
+ });
+
+ describe('when button is disabled', () => {
+ it('shows the tooltip', async () => {
+ wrapper.find(GlButton).vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ const tooltip = findTooltipText();
+
+ expect(tooltip.value).toBe('The list creation wizard is already open');
+ });
+ });
+});
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 62e0fa7a68a..0b90912a584 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -21,9 +21,10 @@ import {
getMoveData,
updateListPosition,
} from '~/boards/boards_util';
+import { gqlClient } from '~/boards/graphql';
import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql';
-import actions, { gqlClient } from '~/boards/stores/actions';
+import actions from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
import mutations from '~/boards/stores/mutations';
@@ -1331,20 +1332,54 @@ describe('addListItem', () => {
list: mockLists[0],
item: mockIssue,
position: 0,
+ inProgress: true,
};
- testAction(actions.addListItem, payload, {}, [
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: {
- listId: mockLists[0].id,
- itemId: mockIssue.id,
- atIndex: 0,
- inProgress: false,
+ testAction(
+ actions.addListItem,
+ payload,
+ {},
+ [
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: {
+ listId: mockLists[0].id,
+ itemId: mockIssue.id,
+ atIndex: 0,
+ inProgress: true,
+ },
},
- },
- { type: types.UPDATE_BOARD_ITEM, payload: mockIssue },
- ]);
+ { type: types.UPDATE_BOARD_ITEM, payload: mockIssue },
+ ],
+ [],
+ );
+ });
+
+ it('should commit ADD_BOARD_ITEM_TO_LIST and UPDATE_BOARD_ITEM mutations, dispatch setActiveId action when inProgress is false', () => {
+ const payload = {
+ list: mockLists[0],
+ item: mockIssue,
+ position: 0,
+ };
+
+ testAction(
+ actions.addListItem,
+ payload,
+ {},
+ [
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: {
+ listId: mockLists[0].id,
+ itemId: mockIssue.id,
+ atIndex: 0,
+ inProgress: false,
+ },
+ },
+ { type: types.UPDATE_BOARD_ITEM, payload: mockIssue },
+ ],
+ [{ type: 'setActiveId', payload: { id: mockIssue.id, sidebarType: ISSUABLE } }],
+ );
});
});
@@ -1542,7 +1577,7 @@ describe('setActiveIssueLabels', () => {
projectPath: 'h/b',
};
- it('should assign labels on success', (done) => {
+ it('should assign labels on success, and sets loading state for labels', (done) => {
jest
.spyOn(gqlClient, 'mutate')
.mockResolvedValue({ data: { updateIssue: { issue: { labels: { nodes: labels } } } } });
@@ -1559,6 +1594,14 @@ describe('setActiveIssueLabels', () => {
{ ...state, ...getters },
[
{
+ type: types.SET_LABELS_LOADING,
+ payload: true,
+ },
+ {
+ type: types.SET_LABELS_LOADING,
+ payload: false,
+ },
+ {
type: types.UPDATE_BOARD_ITEM_BY_ID,
payload,
},
diff --git a/spec/frontend/clusters/agents/components/show_spec.js b/spec/frontend/clusters/agents/components/show_spec.js
new file mode 100644
index 00000000000..fd04ff8b3e7
--- /dev/null
+++ b/spec/frontend/clusters/agents/components/show_spec.js
@@ -0,0 +1,195 @@
+import { GlAlert, GlKeysetPagination, GlLoadingIcon, GlSprintf, GlTab } from '@gitlab/ui';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import ClusterAgentShow from '~/clusters/agents/components/show.vue';
+import TokenTable from '~/clusters/agents/components/token_table.vue';
+import getAgentQuery from '~/clusters/agents/graphql/queries/get_cluster_agent.query.graphql';
+import { useFakeDate } from 'helpers/fake_date';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('ClusterAgentShow', () => {
+ let wrapper;
+ useFakeDate([2021, 2, 15]);
+
+ const propsData = {
+ agentName: 'cluster-agent',
+ projectPath: 'path/to/project',
+ };
+
+ const defaultClusterAgent = {
+ id: '1',
+ createdAt: '2021-02-13T00:00:00Z',
+ createdByUser: {
+ name: 'user-1',
+ },
+ name: 'token-1',
+ tokens: {
+ count: 1,
+ nodes: [],
+ pageInfo: null,
+ },
+ };
+
+ const createWrapper = ({ clusterAgent, queryResponse = null }) => {
+ const agentQueryResponse =
+ queryResponse || jest.fn().mockResolvedValue({ data: { project: { clusterAgent } } });
+ const apolloProvider = createMockApollo([[getAgentQuery, agentQueryResponse]]);
+
+ wrapper = shallowMount(ClusterAgentShow, {
+ localVue,
+ apolloProvider,
+ propsData,
+ stubs: { GlSprintf, TimeAgoTooltip, GlTab },
+ });
+ };
+
+ const createWrapperWithoutApollo = ({ clusterAgent, loading = false }) => {
+ const $apollo = { queries: { clusterAgent: { loading } } };
+
+ wrapper = shallowMount(ClusterAgentShow, {
+ propsData,
+ mocks: { $apollo, clusterAgent },
+ stubs: { GlTab },
+ });
+ };
+
+ const findCreatedText = () => wrapper.find('[data-testid="cluster-agent-create-info"]').text();
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findPaginationButtons = () => wrapper.find(GlKeysetPagination);
+ const findTokenCount = () => wrapper.find('[data-testid="cluster-agent-token-count"]').text();
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default behaviour', () => {
+ beforeEach(() => {
+ return createWrapper({ clusterAgent: defaultClusterAgent });
+ });
+
+ it('displays the agent name', () => {
+ expect(wrapper.text()).toContain(propsData.agentName);
+ });
+
+ it('displays agent create information', () => {
+ expect(findCreatedText()).toMatchInterpolatedText('Created by user-1 2 days ago');
+ });
+
+ it('displays token count', () => {
+ expect(findTokenCount()).toMatchInterpolatedText(
+ `${ClusterAgentShow.i18n.tokens} ${defaultClusterAgent.tokens.count}`,
+ );
+ });
+
+ it('renders token table', () => {
+ expect(wrapper.find(TokenTable).exists()).toBe(true);
+ });
+
+ it('should not render pagination buttons when there are no additional pages', () => {
+ expect(findPaginationButtons().exists()).toBe(false);
+ });
+ });
+
+ describe('when create user is unknown', () => {
+ const missingUser = {
+ ...defaultClusterAgent,
+ createdByUser: null,
+ };
+
+ beforeEach(() => {
+ return createWrapper({ clusterAgent: missingUser });
+ });
+
+ it('displays agent create information with unknown user', () => {
+ expect(findCreatedText()).toMatchInterpolatedText('Created by Unknown user 2 days ago');
+ });
+ });
+
+ describe('when token count is missing', () => {
+ const missingTokens = {
+ ...defaultClusterAgent,
+ tokens: null,
+ };
+
+ beforeEach(() => {
+ return createWrapper({ clusterAgent: missingTokens });
+ });
+
+ it('displays token header with no count', () => {
+ expect(findTokenCount()).toMatchInterpolatedText(`${ClusterAgentShow.i18n.tokens}`);
+ });
+ });
+
+ describe('when the token list has additional pages', () => {
+ const pageInfo = {
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: 'prev',
+ endCursor: 'next',
+ };
+
+ const tokenPagination = {
+ ...defaultClusterAgent,
+ tokens: {
+ ...defaultClusterAgent.tokens,
+ pageInfo,
+ },
+ };
+
+ beforeEach(() => {
+ return createWrapper({ clusterAgent: tokenPagination });
+ });
+
+ it('should render pagination buttons', () => {
+ expect(findPaginationButtons().exists()).toBe(true);
+ });
+
+ it('should pass pageInfo to the pagination component', () => {
+ expect(findPaginationButtons().props()).toMatchObject(pageInfo);
+ });
+ });
+
+ describe('when the agent query is loading', () => {
+ describe('when the clusterAgent is missing', () => {
+ beforeEach(() => {
+ return createWrapper({
+ clusterAgent: null,
+ queryResponse: jest.fn().mockReturnValue(new Promise(() => {})),
+ });
+ });
+
+ it('displays a loading icon and hides the token tab', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(wrapper.text()).not.toContain(ClusterAgentShow.i18n.tokens);
+ });
+ });
+
+ describe('when the clusterAgent is present', () => {
+ beforeEach(() => {
+ createWrapperWithoutApollo({ clusterAgent: defaultClusterAgent, loading: true });
+ });
+
+ it('displays a loading icon and token tab', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(wrapper.text()).toContain(ClusterAgentShow.i18n.tokens);
+ });
+ });
+ });
+
+ describe('when the agent query has errored', () => {
+ beforeEach(() => {
+ createWrapper({ clusterAgent: null, queryResponse: jest.fn().mockRejectedValue() });
+ return waitForPromises();
+ });
+
+ it('displays an alert message', () => {
+ expect(wrapper.find(GlAlert).exists()).toBe(true);
+ expect(wrapper.text()).toContain(ClusterAgentShow.i18n.loadingError);
+ });
+ });
+});
diff --git a/spec/frontend/clusters/agents/components/token_table_spec.js b/spec/frontend/clusters/agents/components/token_table_spec.js
new file mode 100644
index 00000000000..47ff944dd84
--- /dev/null
+++ b/spec/frontend/clusters/agents/components/token_table_spec.js
@@ -0,0 +1,135 @@
+import { GlEmptyState, GlLink, GlTooltip, GlTruncate } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import TokenTable from '~/clusters/agents/components/token_table.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+
+describe('ClusterAgentTokenTable', () => {
+ let wrapper;
+ useFakeDate([2021, 2, 15]);
+
+ const defaultTokens = [
+ {
+ id: '1',
+ createdAt: '2021-02-13T00:00:00Z',
+ description: 'Description of token 1',
+ createdByUser: {
+ name: 'user-1',
+ },
+ lastUsedAt: '2021-02-13T00:00:00Z',
+ name: 'token-1',
+ },
+ {
+ id: '2',
+ createdAt: '2021-02-10T00:00:00Z',
+ description: null,
+ createdByUser: null,
+ lastUsedAt: null,
+ name: 'token-2',
+ },
+ ];
+
+ const createComponent = (tokens) => {
+ wrapper = extendedWrapper(mount(TokenTable, { propsData: { tokens } }));
+ };
+
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+ const findLink = () => wrapper.find(GlLink);
+
+ beforeEach(() => {
+ return createComponent(defaultTokens);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays a learn more link', () => {
+ const learnMoreLink = findLink();
+
+ expect(learnMoreLink.exists()).toBe(true);
+ expect(learnMoreLink.text()).toBe(TokenTable.i18n.learnMore);
+ });
+
+ it.each`
+ name | lineNumber
+ ${'token-1'} | ${0}
+ ${'token-2'} | ${1}
+ `('displays token name "$name" for line "$lineNumber"', ({ name, lineNumber }) => {
+ const tokens = wrapper.findAll('[data-testid="agent-token-name"]');
+ const token = tokens.at(lineNumber);
+
+ expect(token.text()).toBe(name);
+ });
+
+ it.each`
+ lastContactText | lineNumber
+ ${'2 days ago'} | ${0}
+ ${'Never'} | ${1}
+ `(
+ 'displays last contact information "$lastContactText" for line "$lineNumber"',
+ ({ lastContactText, lineNumber }) => {
+ const tokens = wrapper.findAllByTestId('agent-token-used');
+ const token = tokens.at(lineNumber);
+
+ expect(token.text()).toBe(lastContactText);
+ },
+ );
+
+ it.each`
+ createdText | lineNumber
+ ${'2 days ago'} | ${0}
+ ${'5 days ago'} | ${1}
+ `(
+ 'displays created information "$createdText" for line "$lineNumber"',
+ ({ createdText, lineNumber }) => {
+ const tokens = wrapper.findAll('[data-testid="agent-token-created-time"]');
+ const token = tokens.at(lineNumber);
+
+ expect(token.text()).toBe(createdText);
+ },
+ );
+
+ it.each`
+ createdBy | lineNumber
+ ${'user-1'} | ${0}
+ ${'Unknown user'} | ${1}
+ `(
+ 'displays creator information "$createdBy" for line "$lineNumber"',
+ ({ createdBy, lineNumber }) => {
+ const tokens = wrapper.findAll('[data-testid="agent-token-created-user"]');
+ const token = tokens.at(lineNumber);
+
+ expect(token.text()).toBe(createdBy);
+ },
+ );
+
+ it.each`
+ description | truncatesText | hasTooltip | lineNumber
+ ${'Description of token 1'} | ${true} | ${true} | ${0}
+ ${''} | ${false} | ${false} | ${1}
+ `(
+ 'displays description information "$description" for line "$lineNumber"',
+ ({ description, truncatesText, hasTooltip, lineNumber }) => {
+ const tokens = wrapper.findAll('[data-testid="agent-token-description"]');
+ const token = tokens.at(lineNumber);
+
+ expect(token.text()).toContain(description);
+ expect(token.find(GlTruncate).exists()).toBe(truncatesText);
+ expect(token.find(GlTooltip).exists()).toBe(hasTooltip);
+ },
+ );
+
+ describe('when there are no tokens', () => {
+ beforeEach(() => {
+ return createComponent([]);
+ });
+
+ it('displays an empty state', () => {
+ const emptyState = findEmptyState();
+
+ expect(emptyState.exists()).toBe(true);
+ expect(emptyState.text()).toContain(TokenTable.i18n.noTokens);
+ });
+ });
+});
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index b34265b7234..42d81900911 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -33,7 +33,7 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
<span
class="sr-only"
>
- Toggle Dropdown
+ Toggle dropdown
</span>
</button>
<ul
@@ -46,21 +46,7 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
>
<!---->
- <div
- class="gl-display-flex gl-flex-direction-row gl-justify-content-space-between gl-align-items-center gl-px-5"
- >
- <div
- class="gl-display-flex"
- >
- <!---->
- </div>
-
- <div
- class="gl-display-flex"
- >
- <!---->
- </div>
- </div>
+ <!---->
<div
class="gl-new-dropdown-contents"
diff --git a/spec/frontend/clusters_list/components/agent_empty_state_spec.js b/spec/frontend/clusters_list/components/agent_empty_state_spec.js
new file mode 100644
index 00000000000..a548721588e
--- /dev/null
+++ b/spec/frontend/clusters_list/components/agent_empty_state_spec.js
@@ -0,0 +1,77 @@
+import { GlAlert, GlEmptyState, GlSprintf } from '@gitlab/ui';
+import AgentEmptyState from '~/clusters_list/components/agent_empty_state.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+const emptyStateImage = '/path/to/image';
+const projectPath = 'path/to/project';
+const agentDocsUrl = 'path/to/agentDocs';
+const installDocsUrl = 'path/to/installDocs';
+const getStartedDocsUrl = 'path/to/getStartedDocs';
+const integrationDocsUrl = 'path/to/integrationDocs';
+
+describe('AgentEmptyStateComponent', () => {
+ let wrapper;
+
+ const propsData = {
+ hasConfigurations: false,
+ };
+ const provideData = {
+ emptyStateImage,
+ projectPath,
+ agentDocsUrl,
+ installDocsUrl,
+ getStartedDocsUrl,
+ integrationDocsUrl,
+ };
+
+ const findConfigurationsAlert = () => wrapper.findComponent(GlAlert);
+ const findAgentDocsLink = () => wrapper.findByTestId('agent-docs-link');
+ const findInstallDocsLink = () => wrapper.findByTestId('install-docs-link');
+ const findIntegrationButton = () => wrapper.findByTestId('integration-primary-button');
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ beforeEach(() => {
+ wrapper = shallowMountExtended(AgentEmptyState, {
+ propsData,
+ provide: provideData,
+ stubs: { GlEmptyState, GlSprintf },
+ });
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ it('renders correct href attributes for the links', () => {
+ expect(findAgentDocsLink().attributes('href')).toBe(agentDocsUrl);
+ expect(findInstallDocsLink().attributes('href')).toBe(installDocsUrl);
+ });
+
+ describe('when there are no agent configurations in repository', () => {
+ it('should render notification message box', () => {
+ expect(findConfigurationsAlert().exists()).toBe(true);
+ });
+
+ it('should disable integration button', () => {
+ expect(findIntegrationButton().attributes('disabled')).toBe('true');
+ });
+ });
+
+ describe('when there is a list of agent configurations', () => {
+ beforeEach(() => {
+ propsData.hasConfigurations = true;
+ wrapper = shallowMountExtended(AgentEmptyState, {
+ propsData,
+ provide: provideData,
+ });
+ });
+ it('should render content without notification message box', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findConfigurationsAlert().exists()).toBe(false);
+ expect(findIntegrationButton().attributes('disabled')).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/components/agent_table_spec.js b/spec/frontend/clusters_list/components/agent_table_spec.js
new file mode 100644
index 00000000000..e3b90584f29
--- /dev/null
+++ b/spec/frontend/clusters_list/components/agent_table_spec.js
@@ -0,0 +1,117 @@
+import { GlButton, GlLink, GlIcon } from '@gitlab/ui';
+import AgentTable from '~/clusters_list/components/agent_table.vue';
+import { ACTIVE_CONNECTION_TIME } from '~/clusters_list/constants';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import timeagoMixin from '~/vue_shared/mixins/timeago';
+
+const connectedTimeNow = new Date();
+const connectedTimeInactive = new Date(connectedTimeNow.getTime() - ACTIVE_CONNECTION_TIME);
+
+const propsData = {
+ agents: [
+ {
+ name: 'agent-1',
+ configFolder: {
+ webPath: '/agent/full/path',
+ },
+ webPath: '/agent-1',
+ status: 'unused',
+ lastContact: null,
+ tokens: null,
+ },
+ {
+ name: 'agent-2',
+ webPath: '/agent-2',
+ status: 'active',
+ lastContact: connectedTimeNow.getTime(),
+ tokens: {
+ nodes: [
+ {
+ lastUsedAt: connectedTimeNow,
+ },
+ ],
+ },
+ },
+ {
+ name: 'agent-3',
+ webPath: '/agent-3',
+ status: 'inactive',
+ lastContact: connectedTimeInactive.getTime(),
+ tokens: {
+ nodes: [
+ {
+ lastUsedAt: connectedTimeInactive,
+ },
+ ],
+ },
+ },
+ ],
+};
+const provideData = { integrationDocsUrl: 'path/to/integrationDocs' };
+
+describe('AgentTable', () => {
+ let wrapper;
+
+ const findAgentLink = (at) => wrapper.findAllByTestId('cluster-agent-name-link').at(at);
+ const findStatusIcon = (at) => wrapper.findAllComponents(GlIcon).at(at);
+ const findStatusText = (at) => wrapper.findAllByTestId('cluster-agent-connection-status').at(at);
+ const findLastContactText = (at) => wrapper.findAllByTestId('cluster-agent-last-contact').at(at);
+ const findConfiguration = (at) =>
+ wrapper.findAllByTestId('cluster-agent-configuration-link').at(at);
+
+ beforeEach(() => {
+ wrapper = mountExtended(AgentTable, { propsData, provide: provideData });
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ it('displays header button', () => {
+ expect(wrapper.find(GlButton).text()).toBe('Install a new GitLab Agent');
+ });
+
+ describe('agent table', () => {
+ it.each`
+ agentName | link | lineNumber
+ ${'agent-1'} | ${'/agent-1'} | ${0}
+ ${'agent-2'} | ${'/agent-2'} | ${1}
+ `('displays agent link', ({ agentName, link, lineNumber }) => {
+ expect(findAgentLink(lineNumber).text()).toBe(agentName);
+ expect(findAgentLink(lineNumber).attributes('href')).toBe(link);
+ });
+
+ it.each`
+ status | iconName | lineNumber
+ ${'Never connected'} | ${'status-neutral'} | ${0}
+ ${'Connected'} | ${'status-success'} | ${1}
+ ${'Not connected'} | ${'severity-critical'} | ${2}
+ `('displays agent connection status', ({ status, iconName, lineNumber }) => {
+ expect(findStatusText(lineNumber).text()).toBe(status);
+ expect(findStatusIcon(lineNumber).props('name')).toBe(iconName);
+ });
+
+ it.each`
+ lastContact | lineNumber
+ ${'Never'} | ${0}
+ ${timeagoMixin.methods.timeFormatted(connectedTimeNow)} | ${1}
+ ${timeagoMixin.methods.timeFormatted(connectedTimeInactive)} | ${2}
+ `('displays agent last contact time', ({ lastContact, lineNumber }) => {
+ expect(findLastContactText(lineNumber).text()).toBe(lastContact);
+ });
+
+ it.each`
+ agentPath | hasLink | lineNumber
+ ${'.gitlab/agents/agent-1'} | ${true} | ${0}
+ ${'.gitlab/agents/agent-2'} | ${false} | ${1}
+ `('displays config file path', ({ agentPath, hasLink, lineNumber }) => {
+ const findLink = findConfiguration(lineNumber).find(GlLink);
+
+ expect(findLink.exists()).toBe(hasLink);
+ expect(findConfiguration(lineNumber).text()).toBe(agentPath);
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/components/agents_spec.js b/spec/frontend/clusters_list/components/agents_spec.js
new file mode 100644
index 00000000000..54d5ae94172
--- /dev/null
+++ b/spec/frontend/clusters_list/components/agents_spec.js
@@ -0,0 +1,246 @@
+import { GlAlert, GlKeysetPagination, GlLoadingIcon } from '@gitlab/ui';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import AgentEmptyState from '~/clusters_list/components/agent_empty_state.vue';
+import AgentTable from '~/clusters_list/components/agent_table.vue';
+import Agents from '~/clusters_list/components/agents.vue';
+import { ACTIVE_CONNECTION_TIME } from '~/clusters_list/constants';
+import getAgentsQuery from '~/clusters_list/graphql/queries/get_agents.query.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('Agents', () => {
+ let wrapper;
+
+ const propsData = {
+ defaultBranchName: 'default',
+ };
+ const provideData = {
+ projectPath: 'path/to/project',
+ kasAddress: 'kas.example.com',
+ };
+
+ const createWrapper = ({ agents = [], pageInfo = null, trees = [] }) => {
+ const provide = provideData;
+ const apolloQueryResponse = {
+ data: {
+ project: {
+ clusterAgents: { nodes: agents, pageInfo, tokens: { nodes: [] } },
+ repository: { tree: { trees: { nodes: trees, pageInfo } } },
+ },
+ },
+ };
+
+ const apolloProvider = createMockApollo([
+ [getAgentsQuery, jest.fn().mockResolvedValue(apolloQueryResponse, provide)],
+ ]);
+
+ wrapper = shallowMount(Agents, {
+ localVue,
+ apolloProvider,
+ propsData,
+ provide: provideData,
+ });
+
+ return wrapper.vm.$nextTick();
+ };
+
+ const findAgentTable = () => wrapper.find(AgentTable);
+ const findEmptyState = () => wrapper.find(AgentEmptyState);
+ const findPaginationButtons = () => wrapper.find(GlKeysetPagination);
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('when there is a list of agents', () => {
+ let testDate = new Date();
+ const agents = [
+ {
+ id: '1',
+ name: 'agent-1',
+ webPath: '/agent-1',
+ tokens: null,
+ },
+ {
+ id: '2',
+ name: 'agent-2',
+ webPath: '/agent-2',
+ tokens: {
+ nodes: [
+ {
+ lastUsedAt: testDate,
+ },
+ ],
+ },
+ },
+ ];
+
+ const trees = [
+ {
+ name: 'agent-2',
+ path: '.gitlab/agents/agent-2',
+ webPath: '/project/path/.gitlab/agents/agent-2',
+ },
+ ];
+
+ const expectedAgentsList = [
+ {
+ id: '1',
+ name: 'agent-1',
+ webPath: '/agent-1',
+ configFolder: undefined,
+ status: 'unused',
+ lastContact: null,
+ tokens: null,
+ },
+ {
+ id: '2',
+ name: 'agent-2',
+ configFolder: {
+ name: 'agent-2',
+ path: '.gitlab/agents/agent-2',
+ webPath: '/project/path/.gitlab/agents/agent-2',
+ },
+ webPath: '/agent-2',
+ status: 'active',
+ lastContact: new Date(testDate).getTime(),
+ tokens: {
+ nodes: [
+ {
+ lastUsedAt: testDate,
+ },
+ ],
+ },
+ },
+ ];
+
+ beforeEach(() => {
+ return createWrapper({ agents, trees });
+ });
+
+ it('should render agent table', () => {
+ expect(findAgentTable().exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(false);
+ });
+
+ it('should pass agent and folder info to table component', () => {
+ expect(findAgentTable().props('agents')).toMatchObject(expectedAgentsList);
+ });
+
+ describe('when the agent has recently connected tokens', () => {
+ it('should set agent status to active', () => {
+ expect(findAgentTable().props('agents')).toMatchObject(expectedAgentsList);
+ });
+ });
+
+ describe('when the agent has tokens connected more then 8 minutes ago', () => {
+ const now = new Date();
+ testDate = new Date(now.getTime() - ACTIVE_CONNECTION_TIME);
+ it('should set agent status to inactive', () => {
+ expect(findAgentTable().props('agents')).toMatchObject(expectedAgentsList);
+ });
+ });
+
+ describe('when the agent has no connected tokens', () => {
+ testDate = null;
+ it('should set agent status to unused', () => {
+ expect(findAgentTable().props('agents')).toMatchObject(expectedAgentsList);
+ });
+ });
+
+ it('should not render pagination buttons when there are no additional pages', () => {
+ expect(findPaginationButtons().exists()).toBe(false);
+ });
+
+ describe('when the list has additional pages', () => {
+ const pageInfo = {
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: 'prev',
+ endCursor: 'next',
+ };
+
+ beforeEach(() => {
+ return createWrapper({
+ agents,
+ pageInfo,
+ });
+ });
+
+ it('should render pagination buttons', () => {
+ expect(findPaginationButtons().exists()).toBe(true);
+ });
+
+ it('should pass pageInfo to the pagination component', () => {
+ expect(findPaginationButtons().props()).toMatchObject(pageInfo);
+ });
+ });
+ });
+
+ describe('when the agent list is empty', () => {
+ beforeEach(() => {
+ return createWrapper({ agents: [] });
+ });
+
+ it('should render empty state', () => {
+ expect(findAgentTable().exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ });
+
+ describe('when the agent configurations are present', () => {
+ const trees = [
+ {
+ name: 'agent-1',
+ path: '.gitlab/agents/agent-1',
+ webPath: '/project/path/.gitlab/agents/agent-1',
+ },
+ ];
+
+ beforeEach(() => {
+ return createWrapper({ agents: [], trees });
+ });
+
+ it('should pass the correct hasConfigurations boolean value to empty state component', () => {
+ expect(findEmptyState().props('hasConfigurations')).toEqual(true);
+ });
+ });
+
+ describe('when agents query has errored', () => {
+ beforeEach(() => {
+ return createWrapper({ agents: null });
+ });
+
+ it('displays an alert message', () => {
+ expect(wrapper.find(GlAlert).exists()).toBe(true);
+ });
+ });
+
+ describe('when agents query is loading', () => {
+ const mocks = {
+ $apollo: {
+ queries: {
+ agents: {
+ loading: true,
+ },
+ },
+ },
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMount(Agents, { mocks, propsData, provide: provideData });
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays a loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
new file mode 100644
index 00000000000..40c2c59e187
--- /dev/null
+++ b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
@@ -0,0 +1,129 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { createLocalVue, mount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import AvailableAgentsDropdown from '~/clusters_list/components/available_agents_dropdown.vue';
+import { I18N_AVAILABLE_AGENTS_DROPDOWN } from '~/clusters_list/constants';
+import agentConfigurationsQuery from '~/clusters_list/graphql/queries/agent_configurations.query.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { agentConfigurationsResponse } from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('AvailableAgentsDropdown', () => {
+ let wrapper;
+
+ const i18n = I18N_AVAILABLE_AGENTS_DROPDOWN;
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findConfiguredAgentItem = () => findDropdownItems().at(0);
+
+ const createWrapper = ({ propsData = {}, isLoading = false }) => {
+ const provide = {
+ projectPath: 'path/to/project',
+ };
+
+ wrapper = (() => {
+ if (isLoading) {
+ const mocks = {
+ $apollo: {
+ queries: {
+ agents: {
+ loading: true,
+ },
+ },
+ },
+ };
+
+ return mount(AvailableAgentsDropdown, { mocks, provide, propsData });
+ }
+
+ const apolloProvider = createMockApollo([
+ [agentConfigurationsQuery, jest.fn().mockResolvedValue(agentConfigurationsResponse)],
+ ]);
+
+ return mount(AvailableAgentsDropdown, {
+ localVue,
+ apolloProvider,
+ provide,
+ propsData,
+ });
+ })();
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('there are agents available', () => {
+ const propsData = {
+ isRegistering: false,
+ };
+
+ beforeEach(() => {
+ createWrapper({ propsData });
+ });
+
+ it('prompts to select an agent', () => {
+ expect(findDropdown().props('text')).toBe(i18n.selectAgent);
+ });
+
+ it('shows only agents that are not yet installed', () => {
+ expect(findDropdownItems()).toHaveLength(1);
+ expect(findConfiguredAgentItem().text()).toBe('configured-agent');
+ expect(findConfiguredAgentItem().props('isChecked')).toBe(false);
+ });
+
+ describe('click events', () => {
+ beforeEach(() => {
+ findConfiguredAgentItem().vm.$emit('click');
+ });
+
+ it('emits agentSelected with the name of the clicked agent', () => {
+ expect(wrapper.emitted('agentSelected')).toEqual([['configured-agent']]);
+ });
+
+ it('marks the clicked item as selected', () => {
+ expect(findDropdown().props('text')).toBe('configured-agent');
+ expect(findConfiguredAgentItem().props('isChecked')).toBe(true);
+ });
+ });
+ });
+
+ describe('registration in progress', () => {
+ const propsData = {
+ isRegistering: true,
+ };
+
+ beforeEach(() => {
+ createWrapper({ propsData });
+ });
+
+ it('updates the text in the dropdown', () => {
+ expect(findDropdown().props('text')).toBe(i18n.registeringAgent);
+ });
+
+ it('displays a loading icon', () => {
+ expect(findDropdown().props('loading')).toBe(true);
+ });
+ });
+
+ describe('agents query is loading', () => {
+ const propsData = {
+ isRegistering: false,
+ };
+
+ beforeEach(() => {
+ createWrapper({ propsData, isLoading: true });
+ });
+
+ it('updates the text in the dropdown', () => {
+ expect(findDropdown().text()).toBe(i18n.selectAgent);
+ });
+
+ it('displays a loading icon', () => {
+ expect(findDropdown().props('loading')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/components/install_agent_modal_spec.js b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
new file mode 100644
index 00000000000..98ca5e05b3f
--- /dev/null
+++ b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
@@ -0,0 +1,190 @@
+import { GlAlert, GlButton, GlFormInputGroup } from '@gitlab/ui';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import AvailableAgentsDropdown from '~/clusters_list/components/available_agents_dropdown.vue';
+import InstallAgentModal from '~/clusters_list/components/install_agent_modal.vue';
+import { I18N_INSTALL_AGENT_MODAL } from '~/clusters_list/constants';
+import createAgentMutation from '~/clusters_list/graphql/mutations/create_agent.mutation.graphql';
+import createAgentTokenMutation from '~/clusters_list/graphql/mutations/create_agent_token.mutation.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import CodeBlock from '~/vue_shared/components/code_block.vue';
+import {
+ createAgentResponse,
+ createAgentErrorResponse,
+ createAgentTokenResponse,
+ createAgentTokenErrorResponse,
+} from '../mocks/apollo';
+import ModalStub from '../stubs';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('InstallAgentModal', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const i18n = I18N_INSTALL_AGENT_MODAL;
+ const findModal = () => wrapper.findComponent(ModalStub);
+ const findAgentDropdown = () => findModal().findComponent(AvailableAgentsDropdown);
+ const findAlert = () => findModal().findComponent(GlAlert);
+ const findButtonByVariant = (variant) =>
+ findModal()
+ .findAll(GlButton)
+ .wrappers.find((button) => button.props('variant') === variant);
+ const findActionButton = () => findButtonByVariant('confirm');
+ const findCancelButton = () => findButtonByVariant('default');
+
+ const expectDisabledAttribute = (element, disabled) => {
+ if (disabled) {
+ expect(element.attributes('disabled')).toBe('true');
+ } else {
+ expect(element.attributes('disabled')).toBeUndefined();
+ }
+ };
+
+ const createWrapper = () => {
+ const provide = {
+ projectPath: 'path/to/project',
+ kasAddress: 'kas.example.com',
+ };
+
+ wrapper = shallowMount(InstallAgentModal, {
+ attachTo: document.body,
+ stubs: {
+ GlModal: ModalStub,
+ },
+ localVue,
+ apolloProvider,
+ provide,
+ });
+ };
+
+ const mockSelectedAgentResponse = () => {
+ createWrapper();
+
+ wrapper.vm.setAgentName('agent-name');
+ findActionButton().vm.$emit('click');
+
+ return waitForPromises();
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ apolloProvider = null;
+ });
+
+ describe('initial state', () => {
+ it('renders the dropdown for available agents', () => {
+ expect(findAgentDropdown().isVisible()).toBe(true);
+ expect(findModal().text()).not.toContain(i18n.basicInstallTitle);
+ expect(findModal().findComponent(GlFormInputGroup).exists()).toBe(false);
+ expect(findModal().findComponent(GlAlert).exists()).toBe(false);
+ expect(findModal().findComponent(CodeBlock).exists()).toBe(false);
+ });
+
+ it('renders a cancel button', () => {
+ expect(findCancelButton().isVisible()).toBe(true);
+ expectDisabledAttribute(findCancelButton(), false);
+ });
+
+ it('renders a disabled next button', () => {
+ expect(findActionButton().isVisible()).toBe(true);
+ expect(findActionButton().text()).toBe(i18n.next);
+ expectDisabledAttribute(findActionButton(), true);
+ });
+ });
+
+ describe('an agent is selected', () => {
+ beforeEach(() => {
+ findAgentDropdown().vm.$emit('agentSelected');
+ });
+
+ it('enables the next button', () => {
+ expect(findActionButton().isVisible()).toBe(true);
+ expectDisabledAttribute(findActionButton(), false);
+ });
+ });
+
+ describe('registering an agent', () => {
+ const createAgentHandler = jest.fn().mockResolvedValue(createAgentResponse);
+ const createAgentTokenHandler = jest.fn().mockResolvedValue(createAgentTokenResponse);
+
+ beforeEach(() => {
+ apolloProvider = createMockApollo([
+ [createAgentMutation, createAgentHandler],
+ [createAgentTokenMutation, createAgentTokenHandler],
+ ]);
+
+ return mockSelectedAgentResponse(apolloProvider);
+ });
+
+ it('creates an agent and token', () => {
+ expect(createAgentHandler).toHaveBeenCalledWith({
+ input: { name: 'agent-name', projectPath: 'path/to/project' },
+ });
+
+ expect(createAgentTokenHandler).toHaveBeenCalledWith({
+ input: { clusterAgentId: 'agent-id', name: 'agent-name' },
+ });
+ });
+
+ it('renders a done button', () => {
+ expect(findActionButton().isVisible()).toBe(true);
+ expect(findActionButton().text()).toBe(i18n.done);
+ expectDisabledAttribute(findActionButton(), false);
+ });
+
+ it('shows agent instructions', () => {
+ const modalText = findModal().text();
+ expect(modalText).toContain(i18n.basicInstallTitle);
+ expect(modalText).toContain(i18n.basicInstallBody);
+
+ const token = findModal().findComponent(GlFormInputGroup);
+ expect(token.props('value')).toBe('mock-agent-token');
+
+ const alert = findModal().findComponent(GlAlert);
+ expect(alert.props('title')).toBe(i18n.tokenSingleUseWarningTitle);
+
+ const code = findModal().findComponent(CodeBlock).props('code');
+ expect(code).toContain('--agent-token=mock-agent-token');
+ expect(code).toContain('--kas-address=kas.example.com');
+ });
+
+ describe('error creating agent', () => {
+ beforeEach(() => {
+ apolloProvider = createMockApollo([
+ [createAgentMutation, jest.fn().mockResolvedValue(createAgentErrorResponse)],
+ ]);
+
+ return mockSelectedAgentResponse();
+ });
+
+ it('displays the error message', () => {
+ expect(findAlert().text()).toBe(createAgentErrorResponse.data.createClusterAgent.errors[0]);
+ });
+ });
+
+ describe('error creating token', () => {
+ beforeEach(() => {
+ apolloProvider = createMockApollo([
+ [createAgentMutation, jest.fn().mockResolvedValue(createAgentResponse)],
+ [createAgentTokenMutation, jest.fn().mockResolvedValue(createAgentTokenErrorResponse)],
+ ]);
+
+ return mockSelectedAgentResponse();
+ });
+
+ it('displays the error message', () => {
+ expect(findAlert().text()).toBe(
+ createAgentTokenErrorResponse.data.clusterAgentTokenCreate.errors[0],
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/components/mock_data.js b/spec/frontend/clusters_list/components/mock_data.js
new file mode 100644
index 00000000000..e388d791b89
--- /dev/null
+++ b/spec/frontend/clusters_list/components/mock_data.js
@@ -0,0 +1,12 @@
+export const agentConfigurationsResponse = {
+ data: {
+ project: {
+ agentConfigurations: {
+ nodes: [{ agentName: 'installed-agent' }, { agentName: 'configured-agent' }],
+ },
+ clusterAgents: {
+ nodes: [{ name: 'installed-agent' }],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/clusters_list/mocks/apollo.js b/spec/frontend/clusters_list/mocks/apollo.js
new file mode 100644
index 00000000000..27b71a0d4b5
--- /dev/null
+++ b/spec/frontend/clusters_list/mocks/apollo.js
@@ -0,0 +1,45 @@
+export const createAgentResponse = {
+ data: {
+ createClusterAgent: {
+ clusterAgent: {
+ id: 'agent-id',
+ },
+ errors: [],
+ },
+ },
+};
+
+export const createAgentErrorResponse = {
+ data: {
+ createClusterAgent: {
+ clusterAgent: {
+ id: 'agent-id',
+ },
+ errors: ['could not create agent'],
+ },
+ },
+};
+
+export const createAgentTokenResponse = {
+ data: {
+ clusterAgentTokenCreate: {
+ token: {
+ id: 'token-id',
+ },
+ secret: 'mock-agent-token',
+ errors: [],
+ },
+ },
+};
+
+export const createAgentTokenErrorResponse = {
+ data: {
+ clusterAgentTokenCreate: {
+ token: {
+ id: 'token-id',
+ },
+ secret: 'mock-agent-token',
+ errors: ['could not create agent token'],
+ },
+ },
+};
diff --git a/spec/frontend/clusters_list/stubs.js b/spec/frontend/clusters_list/stubs.js
new file mode 100644
index 00000000000..5769d6190f6
--- /dev/null
+++ b/spec/frontend/clusters_list/stubs.js
@@ -0,0 +1,14 @@
+const ModalStub = {
+ name: 'glmodal-stub',
+ template: `
+ <div>
+ <slot></slot>
+ <slot name="modal-footer"></slot>
+ </div>
+ `,
+ methods: {
+ hide: jest.fn(),
+ },
+};
+
+export default ModalStub;
diff --git a/spec/frontend/comment_type_toggle_spec.js b/spec/frontend/comment_type_toggle_spec.js
deleted file mode 100644
index 06dbfac1803..00000000000
--- a/spec/frontend/comment_type_toggle_spec.js
+++ /dev/null
@@ -1,169 +0,0 @@
-import CommentTypeToggle from '~/comment_type_toggle';
-import DropLab from '~/droplab/drop_lab';
-import InputSetter from '~/droplab/plugins/input_setter';
-
-describe('CommentTypeToggle', () => {
- const testContext = {};
-
- describe('class constructor', () => {
- beforeEach(() => {
- testContext.dropdownTrigger = {};
- testContext.dropdownList = {};
- testContext.noteTypeInput = {};
- testContext.submitButton = {};
- testContext.closeButton = {};
-
- testContext.commentTypeToggle = new CommentTypeToggle({
- dropdownTrigger: testContext.dropdownTrigger,
- dropdownList: testContext.dropdownList,
- noteTypeInput: testContext.noteTypeInput,
- submitButton: testContext.submitButton,
- closeButton: testContext.closeButton,
- });
- });
-
- it('should set .dropdownTrigger', () => {
- expect(testContext.commentTypeToggle.dropdownTrigger).toBe(testContext.dropdownTrigger);
- });
-
- it('should set .dropdownList', () => {
- expect(testContext.commentTypeToggle.dropdownList).toBe(testContext.dropdownList);
- });
-
- it('should set .noteTypeInput', () => {
- expect(testContext.commentTypeToggle.noteTypeInput).toBe(testContext.noteTypeInput);
- });
-
- it('should set .submitButton', () => {
- expect(testContext.commentTypeToggle.submitButton).toBe(testContext.submitButton);
- });
-
- it('should set .closeButton', () => {
- expect(testContext.commentTypeToggle.closeButton).toBe(testContext.closeButton);
- });
-
- it('should set .reopenButton', () => {
- expect(testContext.commentTypeToggle.reopenButton).toBe(testContext.reopenButton);
- });
- });
-
- describe('initDroplab', () => {
- beforeEach(() => {
- testContext.commentTypeToggle = {
- dropdownTrigger: {},
- dropdownList: {},
- noteTypeInput: {},
- submitButton: {},
- closeButton: {},
- setConfig: () => {},
- };
- testContext.config = {};
-
- jest.spyOn(DropLab.prototype, 'init').mockImplementation();
- jest.spyOn(DropLab.prototype, 'constructor').mockImplementation();
-
- jest.spyOn(testContext.commentTypeToggle, 'setConfig').mockReturnValue(testContext.config);
-
- CommentTypeToggle.prototype.initDroplab.call(testContext.commentTypeToggle);
- });
-
- it('should instantiate a DropLab instance and set .droplab', () => {
- expect(testContext.commentTypeToggle.droplab instanceof DropLab).toBe(true);
- });
-
- it('should call .setConfig', () => {
- expect(testContext.commentTypeToggle.setConfig).toHaveBeenCalled();
- });
-
- it('should call DropLab.prototype.init', () => {
- expect(DropLab.prototype.init).toHaveBeenCalledWith(
- testContext.commentTypeToggle.dropdownTrigger,
- testContext.commentTypeToggle.dropdownList,
- [InputSetter],
- testContext.config,
- );
- });
- });
-
- describe('setConfig', () => {
- describe('if no .closeButton is provided', () => {
- beforeEach(() => {
- testContext.commentTypeToggle = {
- dropdownTrigger: {},
- dropdownList: {},
- noteTypeInput: {},
- submitButton: {},
- reopenButton: {},
- };
-
- testContext.setConfig = CommentTypeToggle.prototype.setConfig.call(
- testContext.commentTypeToggle,
- );
- });
-
- it('should not add .closeButton related InputSetter config', () => {
- expect(testContext.setConfig).toEqual({
- InputSetter: [
- {
- input: testContext.commentTypeToggle.noteTypeInput,
- valueAttribute: 'data-value',
- },
- {
- input: testContext.commentTypeToggle.submitButton,
- valueAttribute: 'data-submit-text',
- },
- {
- input: testContext.commentTypeToggle.reopenButton,
- valueAttribute: 'data-reopen-text',
- },
- {
- input: testContext.commentTypeToggle.reopenButton,
- valueAttribute: 'data-reopen-text',
- inputAttribute: 'data-alternative-text',
- },
- ],
- });
- });
- });
-
- describe('if no .reopenButton is provided', () => {
- beforeEach(() => {
- testContext.commentTypeToggle = {
- dropdownTrigger: {},
- dropdownList: {},
- noteTypeInput: {},
- submitButton: {},
- closeButton: {},
- };
-
- testContext.setConfig = CommentTypeToggle.prototype.setConfig.call(
- testContext.commentTypeToggle,
- );
- });
-
- it('should not add .reopenButton related InputSetter config', () => {
- expect(testContext.setConfig).toEqual({
- InputSetter: [
- {
- input: testContext.commentTypeToggle.noteTypeInput,
- valueAttribute: 'data-value',
- },
- {
- input: testContext.commentTypeToggle.submitButton,
- valueAttribute: 'data-submit-text',
- },
- {
- input: testContext.commentTypeToggle.closeButton,
- valueAttribute: 'data-close-text',
- },
- {
- input: testContext.commentTypeToggle.closeButton,
- valueAttribute: 'data-close-text',
- inputAttribute: 'data-alternative-text',
- },
- ],
- });
- });
- });
- });
-});
diff --git a/spec/frontend/commit/commit_pipeline_status_component_spec.js b/spec/frontend/commit/commit_pipeline_status_component_spec.js
index 8082b8524e7..3a549e66eb7 100644
--- a/spec/frontend/commit/commit_pipeline_status_component_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_component_spec.js
@@ -1,7 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Visibility from 'visibilityjs';
-import { getJSONFixture } from 'helpers/fixtures';
+import fixture from 'test_fixtures/pipelines/pipelines.json';
import createFlash from '~/flash';
import Poll from '~/lib/utils/poll';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
@@ -20,7 +20,7 @@ jest.mock('~/projects/tree/services/commit_pipeline_service', () =>
describe('Commit pipeline status component', () => {
let wrapper;
- const { pipelines } = getJSONFixture('pipelines/pipelines.json');
+ const { pipelines } = fixture;
const { status: mockCiStatus } = pipelines[0].details;
const defaultProps = {
diff --git a/spec/frontend/commit/pipelines/pipelines_table_spec.js b/spec/frontend/commit/pipelines/pipelines_table_spec.js
index 1defb3d586c..17f7be9d1d7 100644
--- a/spec/frontend/commit/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_table_spec.js
@@ -1,6 +1,7 @@
import { GlEmptyState, GlLoadingIcon, GlModal, GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import fixture from 'test_fixtures/pipelines/pipelines.json';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
@@ -8,7 +9,6 @@ import PipelinesTable from '~/commit/pipelines/pipelines_table.vue';
import axios from '~/lib/utils/axios_utils';
describe('Pipelines table in Commits and Merge requests', () => {
- const jsonFixtureName = 'pipelines/pipelines.json';
let wrapper;
let pipeline;
let mock;
@@ -37,7 +37,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- const { pipelines } = getJSONFixture(jsonFixtureName);
+ const { pipelines } = fixture;
pipeline = pipelines.find((p) => p.user !== null && p.commit !== null);
});
diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
index 8f5516545eb..178c7d749c8 100644
--- a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
+++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
@@ -11,14 +11,7 @@ exports[`content_editor/components/toolbar_link_button renders dropdown componen
<ul role=\\"menu\\" tabindex=\\"-1\\" class=\\"dropdown-menu\\">
<div class=\\"gl-new-dropdown-inner\\">
<!---->
- <div class=\\"gl-display-flex gl-flex-direction-row gl-justify-content-space-between gl-align-items-center gl-px-5\\">
- <div class=\\"gl-display-flex\\">
- <!---->
- </div>
- <div class=\\"gl-display-flex\\">
- <!---->
- </div>
- </div>
+ <!---->
<div class=\\"gl-new-dropdown-contents\\">
<!---->
<li role=\\"presentation\\" class=\\"gl-px-3!\\">
diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js
index a5df3d73289..ec58877470c 100644
--- a/spec/frontend/content_editor/components/top_toolbar_spec.js
+++ b/spec/frontend/content_editor/components/top_toolbar_spec.js
@@ -31,6 +31,7 @@ describe('content_editor/components/top_toolbar', () => {
${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }}
${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
+ ${'details'} | ${{ contentType: 'details', iconName: 'details-block', label: 'Add a collapsible section', editorCommand: 'toggleDetails' }}
${'horizontal-rule'} | ${{ contentType: 'horizontalRule', iconName: 'dash', label: 'Add a horizontal rule', editorCommand: 'setHorizontalRule' }}
${'code-block'} | ${{ contentType: 'codeBlock', iconName: 'doc-code', label: 'Insert a code block', editorCommand: 'toggleCodeBlock' }}
${'text-styles'} | ${{}}
diff --git a/spec/frontend/content_editor/components/wrappers/details_spec.js b/spec/frontend/content_editor/components/wrappers/details_spec.js
new file mode 100644
index 00000000000..d746b9fa2f1
--- /dev/null
+++ b/spec/frontend/content_editor/components/wrappers/details_spec.js
@@ -0,0 +1,40 @@
+import { NodeViewContent } from '@tiptap/vue-2';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DetailsWrapper from '~/content_editor/components/wrappers/details.vue';
+
+describe('content/components/wrappers/details', () => {
+ let wrapper;
+
+ const createWrapper = async () => {
+ wrapper = shallowMountExtended(DetailsWrapper, {
+ propsData: {
+ node: {},
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a node-view-content as a ul element', () => {
+ createWrapper();
+
+ expect(wrapper.findComponent(NodeViewContent).props().as).toBe('ul');
+ });
+
+ it('is "open" by default', () => {
+ createWrapper();
+
+ expect(wrapper.findByTestId('details-toggle-icon').classes()).toContain('is-open');
+ expect(wrapper.findComponent(NodeViewContent).classes()).toContain('is-open');
+ });
+
+ it('closes the details block on clicking the details toggle icon', async () => {
+ createWrapper();
+
+ await wrapper.findByTestId('details-toggle-icon').trigger('click');
+ expect(wrapper.findByTestId('details-toggle-icon').classes()).not.toContain('is-open');
+ expect(wrapper.findComponent(NodeViewContent).classes()).not.toContain('is-open');
+ });
+});
diff --git a/spec/frontend/content_editor/components/wrappers/frontmatter_spec.js b/spec/frontend/content_editor/components/wrappers/frontmatter_spec.js
new file mode 100644
index 00000000000..de8f8efd260
--- /dev/null
+++ b/spec/frontend/content_editor/components/wrappers/frontmatter_spec.js
@@ -0,0 +1,43 @@
+import { NodeViewWrapper, NodeViewContent } from '@tiptap/vue-2';
+import { shallowMount } from '@vue/test-utils';
+import FrontmatterWrapper from '~/content_editor/components/wrappers/frontmatter.vue';
+
+describe('content/components/wrappers/frontmatter', () => {
+ let wrapper;
+
+ const createWrapper = async (nodeAttrs = { language: 'yaml' }) => {
+ wrapper = shallowMount(FrontmatterWrapper, {
+ propsData: {
+ node: {
+ attrs: nodeAttrs,
+ },
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a node-view-wrapper as a pre element', () => {
+ createWrapper();
+
+ expect(wrapper.findComponent(NodeViewWrapper).props().as).toBe('pre');
+ expect(wrapper.findComponent(NodeViewWrapper).classes()).toContain('gl-relative');
+ });
+
+ it('renders a node-view-content as a code element', () => {
+ createWrapper();
+
+ expect(wrapper.findComponent(NodeViewContent).props().as).toBe('code');
+ });
+
+ it('renders label indicating that code block is frontmatter', () => {
+ createWrapper();
+
+ const label = wrapper.find('[data-testid="frontmatter-label"]');
+
+ expect(label.text()).toEqual('frontmatter:yaml');
+ expect(label.classes()).toEqual(['gl-absolute', 'gl-top-0', 'gl-right-3']);
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/color_chip_spec.js b/spec/frontend/content_editor/extensions/color_chip_spec.js
new file mode 100644
index 00000000000..4bb6f344ab4
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/color_chip_spec.js
@@ -0,0 +1,33 @@
+import ColorChip, { colorDecoratorPlugin } from '~/content_editor/extensions/color_chip';
+import Code from '~/content_editor/extensions/code';
+import { createTestEditor } from '../test_utils';
+
+describe('content_editor/extensions/color_chip', () => {
+ let tiptapEditor;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [ColorChip, Code] });
+ });
+
+ describe.each`
+ colorExpression | decorated
+ ${'#F00'} | ${true}
+ ${'rgba(0,0,0,0)'} | ${true}
+ ${'hsl(540,70%,50%)'} | ${true}
+ ${'F00'} | ${false}
+ ${'F00'} | ${false}
+ ${'gba(0,0,0,0)'} | ${false}
+ ${'hls(540,70%,50%)'} | ${false}
+ ${'red'} | ${false}
+ `(
+ 'when a code span with $colorExpression color expression is found',
+ ({ colorExpression, decorated }) => {
+ it(`${decorated ? 'adds' : 'does not add'} a color chip decorator`, () => {
+ tiptapEditor.commands.setContent(`<p><code>${colorExpression}</code></p>`);
+ const pluginState = colorDecoratorPlugin.getState(tiptapEditor.state);
+
+ expect(pluginState.children).toHaveLength(decorated ? 3 : 0);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/content_editor/extensions/details_content_spec.js b/spec/frontend/content_editor/extensions/details_content_spec.js
new file mode 100644
index 00000000000..575f3bf65e4
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/details_content_spec.js
@@ -0,0 +1,76 @@
+import Details from '~/content_editor/extensions/details';
+import DetailsContent from '~/content_editor/extensions/details_content';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/details_content', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let details;
+ let detailsContent;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [Details, DetailsContent] });
+
+ ({
+ builders: { doc, p, details, detailsContent },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ details: { nodeType: Details.name },
+ detailsContent: { nodeType: DetailsContent.name },
+ },
+ }));
+ });
+
+ describe('shortcut: Enter', () => {
+ it('splits a details content into two items', () => {
+ const initialDoc = doc(
+ details(
+ detailsContent(p('Summary')),
+ detailsContent(p('Text content')),
+ detailsContent(p('Text content')),
+ ),
+ );
+ const expectedDoc = doc(
+ details(
+ detailsContent(p('Summary')),
+ detailsContent(p('')),
+ detailsContent(p('Text content')),
+ detailsContent(p('Text content')),
+ ),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ tiptapEditor.commands.setTextSelection(10);
+ tiptapEditor.commands.keyboardShortcut('Enter');
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('shortcut: Shift-Tab', () => {
+ it('lifts a details content and creates two separate details items', () => {
+ const initialDoc = doc(
+ details(
+ detailsContent(p('Summary')),
+ detailsContent(p('Text content')),
+ detailsContent(p('Text content')),
+ ),
+ );
+ const expectedDoc = doc(
+ details(detailsContent(p('Summary'))),
+ p('Text content'),
+ details(detailsContent(p('Text content'))),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ tiptapEditor.commands.setTextSelection(20);
+ tiptapEditor.commands.keyboardShortcut('Shift-Tab');
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/details_spec.js b/spec/frontend/content_editor/extensions/details_spec.js
new file mode 100644
index 00000000000..cd59943982f
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/details_spec.js
@@ -0,0 +1,92 @@
+import Details from '~/content_editor/extensions/details';
+import DetailsContent from '~/content_editor/extensions/details_content';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/details', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let details;
+ let detailsContent;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [Details, DetailsContent] });
+
+ ({
+ builders: { doc, p, details, detailsContent },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ details: { nodeType: Details.name },
+ detailsContent: { nodeType: DetailsContent.name },
+ },
+ }));
+ });
+
+ describe('setDetails command', () => {
+ describe('when current block is a paragraph', () => {
+ it('converts current paragraph into a details block', () => {
+ const initialDoc = doc(p('Text content'));
+ const expectedDoc = doc(details(detailsContent(p('Text content'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setDetails();
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('when current block is a details block', () => {
+ it('maintains the same document structure', () => {
+ const initialDoc = doc(details(detailsContent(p('Text content'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setDetails();
+
+ expect(tiptapEditor.getJSON()).toEqual(initialDoc.toJSON());
+ });
+ });
+ });
+
+ describe('toggleDetails command', () => {
+ describe('when current block is a paragraph', () => {
+ it('converts current paragraph into a details block', () => {
+ const initialDoc = doc(p('Text content'));
+ const expectedDoc = doc(details(detailsContent(p('Text content'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.toggleDetails();
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('when current block is a details block', () => {
+ it('convert details block into a paragraph', () => {
+ const initialDoc = doc(details(detailsContent(p('Text content'))));
+ const expectedDoc = doc(p('Text content'));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.toggleDetails();
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+ });
+
+ it.each`
+ input | insertedNode
+ ${'<details>'} | ${(...args) => details(detailsContent(p(...args)))}
+ ${'<details'} | ${(...args) => p(...args)}
+ ${'details>'} | ${(...args) => p(...args)}
+ `('with input=$input, then should insert a $insertedNode', ({ input, insertedNode }) => {
+ const { view } = tiptapEditor;
+ const { selection } = view.state;
+ const expectedDoc = doc(insertedNode());
+
+ // Triggers the event handler that input rules listen to
+ view.someProp('handleTextInput', (f) => f(view, selection.from, selection.to, input));
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/math_inline_spec.js b/spec/frontend/content_editor/extensions/math_inline_spec.js
new file mode 100644
index 00000000000..82eb85477de
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/math_inline_spec.js
@@ -0,0 +1,42 @@
+import MathInline from '~/content_editor/extensions/math_inline';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/math_inline', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let mathInline;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [MathInline] });
+
+ ({
+ builders: { doc, p, mathInline },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ details: { markType: MathInline.name },
+ },
+ }));
+ });
+
+ it.each`
+ input | insertedNode
+ ${'$`a^2`$'} | ${() => p(mathInline('a^2'))}
+ ${'$`a^2`'} | ${() => p('$`a^2`')}
+ ${'`a^2`$'} | ${() => p('`a^2`$')}
+ `('with input=$input, then should insert a $insertedNode', ({ input, insertedNode }) => {
+ const { view } = tiptapEditor;
+ const expectedDoc = doc(insertedNode());
+
+ tiptapEditor.chain().setContent(input).setTextSelection(0).run();
+
+ const { state } = tiptapEditor;
+ const { selection } = state;
+
+ // Triggers the event handler that input rules listen to
+ view.someProp('handleTextInput', (f) => f(view, selection.from, input.length + 1, input));
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/table_of_contents_spec.js b/spec/frontend/content_editor/extensions/table_of_contents_spec.js
new file mode 100644
index 00000000000..83818899c17
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/table_of_contents_spec.js
@@ -0,0 +1,35 @@
+import TableOfContents from '~/content_editor/extensions/table_of_contents';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/emoji', () => {
+ let tiptapEditor;
+ let builders;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [TableOfContents] });
+ ({ builders } = createDocBuilder({
+ tiptapEditor,
+ names: { tableOfContents: { nodeType: TableOfContents.name } },
+ }));
+ });
+
+ it.each`
+ input | insertedNode
+ ${'[[_TOC_]]'} | ${'tableOfContents'}
+ ${'[TOC]'} | ${'tableOfContents'}
+ ${'[toc]'} | ${'p'}
+ ${'TOC'} | ${'p'}
+ ${'[_TOC_]'} | ${'p'}
+ ${'[[TOC]]'} | ${'p'}
+ `('with input=$input, then should insert a $insertedNode', ({ input, insertedNode }) => {
+ const { doc } = builders;
+ const { view } = tiptapEditor;
+ const { selection } = view.state;
+ const expectedDoc = doc(builders[insertedNode]());
+
+ // Triggers the event handler that input rules listen to
+ view.someProp('handleTextInput', (f) => f(view, selection.from, selection.to, input));
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_processing_examples.js b/spec/frontend/content_editor/markdown_processing_examples.js
index b3aabfeb145..da895970289 100644
--- a/spec/frontend/content_editor/markdown_processing_examples.js
+++ b/spec/frontend/content_editor/markdown_processing_examples.js
@@ -1,11 +1,13 @@
import fs from 'fs';
import path from 'path';
import jsYaml from 'js-yaml';
+// eslint-disable-next-line import/no-deprecated
import { getJSONFixture } from 'helpers/fixtures';
export const loadMarkdownApiResult = (testName) => {
const fixturePathPrefix = `api/markdown/${testName}.json`;
+ // eslint-disable-next-line import/no-deprecated
const fixture = getJSONFixture(fixturePathPrefix);
return fixture.body || fixture.html;
};
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 6f2c908c289..33056ab9e4a 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -5,6 +5,8 @@ import Code from '~/content_editor/extensions/code';
import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
import DescriptionItem from '~/content_editor/extensions/description_item';
import DescriptionList from '~/content_editor/extensions/description_list';
+import Details from '~/content_editor/extensions/details';
+import DetailsContent from '~/content_editor/extensions/details_content';
import Division from '~/content_editor/extensions/division';
import Emoji from '~/content_editor/extensions/emoji';
import Figure from '~/content_editor/extensions/figure';
@@ -45,6 +47,8 @@ const tiptapEditor = createTestEditor({
CodeBlockHighlight,
DescriptionItem,
DescriptionList,
+ Details,
+ DetailsContent,
Division,
Emoji,
Figure,
@@ -78,6 +82,8 @@ const {
bulletList,
code,
codeBlock,
+ details,
+ detailsContent,
division,
descriptionItem,
descriptionList,
@@ -110,6 +116,8 @@ const {
bulletList: { nodeType: BulletList.name },
code: { markType: Code.name },
codeBlock: { nodeType: CodeBlockHighlight.name },
+ details: { nodeType: Details.name },
+ detailsContent: { nodeType: DetailsContent.name },
division: { nodeType: Division.name },
descriptionItem: { nodeType: DescriptionItem.name },
descriptionList: { nodeType: DescriptionList.name },
@@ -588,6 +596,105 @@ A giant _owl-like_ creature.
);
});
+ it('correctly renders a simple details/summary', () => {
+ expect(
+ serialize(
+ details(
+ detailsContent(paragraph('this is the summary')),
+ detailsContent(paragraph('this content will be hidden')),
+ ),
+ ),
+ ).toBe(
+ `
+<details>
+<summary>this is the summary</summary>
+this content will be hidden
+</details>
+ `.trim(),
+ );
+ });
+
+ it('correctly renders details/summary with styled content', () => {
+ expect(
+ serialize(
+ details(
+ detailsContent(paragraph('this is the ', bold('summary'))),
+ detailsContent(
+ codeBlock(
+ { language: 'javascript' },
+ 'var a = 2;\nvar b = 3;\nvar c = a + d;\n\nconsole.log(c);',
+ ),
+ ),
+ detailsContent(paragraph('this content will be ', italic('hidden'))),
+ ),
+ details(detailsContent(paragraph('summary 2')), detailsContent(paragraph('content 2'))),
+ ),
+ ).toBe(
+ `
+<details>
+<summary>
+
+this is the **summary**
+
+</summary>
+
+\`\`\`javascript
+var a = 2;
+var b = 3;
+var c = a + d;
+
+console.log(c);
+\`\`\`
+
+this content will be _hidden_
+
+</details>
+<details>
+<summary>summary 2</summary>
+content 2
+</details>
+ `.trim(),
+ );
+ });
+
+ it('correctly renders nested details', () => {
+ expect(
+ serialize(
+ details(
+ detailsContent(paragraph('dream level 1')),
+ detailsContent(
+ details(
+ detailsContent(paragraph('dream level 2')),
+ detailsContent(
+ details(
+ detailsContent(paragraph('dream level 3')),
+ detailsContent(paragraph(italic('inception'))),
+ ),
+ ),
+ ),
+ ),
+ ),
+ ),
+ ).toBe(
+ `
+<details>
+<summary>dream level 1</summary>
+
+<details>
+<summary>dream level 2</summary>
+
+<details>
+<summary>dream level 3</summary>
+
+_inception_
+
+</details>
+</details>
+</details>
+ `.trim(),
+ );
+ });
+
it('correctly renders div', () => {
expect(
serialize(
diff --git a/spec/frontend/cycle_analytics/base_spec.js b/spec/frontend/cycle_analytics/base_spec.js
index 5d3361bfa35..9a9415cc12a 100644
--- a/spec/frontend/cycle_analytics/base_spec.js
+++ b/spec/frontend/cycle_analytics/base_spec.js
@@ -19,6 +19,7 @@ import {
createdAfter,
currentGroup,
stageCounts,
+ initialPaginationState as pagination,
} from './mock_data';
const selectedStageEvents = issueEvents.events;
@@ -81,6 +82,7 @@ const findOverviewMetrics = () => wrapper.findComponent(ValueStreamMetrics);
const findStageTable = () => wrapper.findComponent(StageTable);
const findStageEvents = () => findStageTable().props('stageEvents');
const findEmptyStageTitle = () => wrapper.findComponent(GlEmptyState).props('title');
+const findPagination = () => wrapper.findByTestId('vsa-stage-pagination');
const hasMetricsRequests = (reqs) => {
const foundReqs = findOverviewMetrics().props('requests');
@@ -90,7 +92,7 @@ const hasMetricsRequests = (reqs) => {
describe('Value stream analytics component', () => {
beforeEach(() => {
- wrapper = createComponent({ initialState: { selectedStage, selectedStageEvents } });
+ wrapper = createComponent({ initialState: { selectedStage, selectedStageEvents, pagination } });
});
afterEach(() => {
@@ -153,6 +155,10 @@ describe('Value stream analytics component', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
+ it('renders pagination', () => {
+ expect(findPagination().exists()).toBe(true);
+ });
+
describe('with `cycleAnalyticsForGroups=true` license', () => {
beforeEach(() => {
wrapper = createComponent({ initialState: { features: { cycleAnalyticsForGroups: true } } });
diff --git a/spec/frontend/cycle_analytics/mock_data.js b/spec/frontend/cycle_analytics/mock_data.js
index d9659d5d4c3..1882457960a 100644
--- a/spec/frontend/cycle_analytics/mock_data.js
+++ b/spec/frontend/cycle_analytics/mock_data.js
@@ -1,6 +1,14 @@
+/* eslint-disable import/no-deprecated */
+
import { getJSONFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
-import { DEFAULT_VALUE_STREAM, DEFAULT_DAYS_IN_PAST } from '~/cycle_analytics/constants';
+import {
+ DEFAULT_VALUE_STREAM,
+ DEFAULT_DAYS_IN_PAST,
+ PAGINATION_TYPE,
+ PAGINATION_SORT_DIRECTION_DESC,
+ PAGINATION_SORT_FIELD_END_EVENT,
+} from '~/cycle_analytics/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { getDateInPast } from '~/lib/utils/datetime_utility';
@@ -13,9 +21,10 @@ export const getStageByTitle = (stages, title) =>
stages.find((stage) => stage.title && stage.title.toLowerCase().trim() === title) || {};
const fixtureEndpoints = {
- customizableCycleAnalyticsStagesAndEvents: 'projects/analytics/value_stream_analytics/stages',
- stageEvents: (stage) => `projects/analytics/value_stream_analytics/events/${stage}`,
- metricsData: 'projects/analytics/value_stream_analytics/summary',
+ customizableCycleAnalyticsStagesAndEvents:
+ 'projects/analytics/value_stream_analytics/stages.json',
+ stageEvents: (stage) => `projects/analytics/value_stream_analytics/events/${stage}.json`,
+ metricsData: 'projects/analytics/value_stream_analytics/summary.json',
};
export const metricsData = getJSONFixture(fixtureEndpoints.metricsData);
@@ -256,3 +265,22 @@ export const rawValueStreamStages = customizableStagesAndEvents.stages;
export const valueStreamStages = rawValueStreamStages.map((s) =>
convertObjectPropsToCamelCase(s, { deep: true }),
);
+
+export const initialPaginationQuery = {
+ page: 15,
+ sort: PAGINATION_SORT_FIELD_END_EVENT,
+ direction: PAGINATION_SORT_DIRECTION_DESC,
+};
+
+export const initialPaginationState = {
+ ...initialPaginationQuery,
+ page: null,
+ hasNextPage: false,
+};
+
+export const basePaginationResult = {
+ pagination: PAGINATION_TYPE,
+ sort: PAGINATION_SORT_FIELD_END_EVENT,
+ direction: PAGINATION_SORT_DIRECTION_DESC,
+ page: null,
+};
diff --git a/spec/frontend/cycle_analytics/store/actions_spec.js b/spec/frontend/cycle_analytics/store/actions_spec.js
index 97b5bd03e18..993e6b6b73a 100644
--- a/spec/frontend/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/cycle_analytics/store/actions_spec.js
@@ -11,6 +11,8 @@ import {
currentGroup,
createdAfter,
createdBefore,
+ initialPaginationState,
+ reviewEvents,
} from '../mock_data';
const { id: groupId, path: groupPath } = currentGroup;
@@ -31,7 +33,13 @@ const mockSetDateActionCommit = {
type: 'SET_DATE_RANGE',
};
-const defaultState = { ...getters, selectedValueStream, createdAfter, createdBefore };
+const defaultState = {
+ ...getters,
+ selectedValueStream,
+ createdAfter,
+ createdBefore,
+ pagination: initialPaginationState,
+};
describe('Project Value Stream Analytics actions', () => {
let state;
@@ -112,6 +120,21 @@ describe('Project Value Stream Analytics actions', () => {
});
});
+ describe('updateStageTablePagination', () => {
+ beforeEach(() => {
+ state = { ...state, selectedStage };
+ });
+
+ it(`will dispatch the "fetchStageData" action and commit the 'SET_PAGINATION' mutation`, () => {
+ return testAction({
+ action: actions.updateStageTablePagination,
+ state,
+ expectedMutations: [{ type: 'SET_PAGINATION' }],
+ expectedActions: [{ type: 'fetchStageData', payload: selectedStage.id }],
+ });
+ });
+ });
+
describe('fetchCycleAnalyticsData', () => {
beforeEach(() => {
state = { ...defaultState, endpoints: mockEndpoints };
@@ -154,6 +177,10 @@ describe('Project Value Stream Analytics actions', () => {
describe('fetchStageData', () => {
const mockStagePath = /value_streams\/\w+\/stages\/\w+\/records/;
+ const headers = {
+ 'X-Next-Page': 2,
+ 'X-Page': 1,
+ };
beforeEach(() => {
state = {
@@ -162,7 +189,7 @@ describe('Project Value Stream Analytics actions', () => {
selectedStage,
};
mock = new MockAdapter(axios);
- mock.onGet(mockStagePath).reply(httpStatusCodes.OK);
+ mock.onGet(mockStagePath).reply(httpStatusCodes.OK, reviewEvents, headers);
});
it(`commits the 'RECEIVE_STAGE_DATA_SUCCESS' mutation`, () =>
@@ -170,7 +197,11 @@ describe('Project Value Stream Analytics actions', () => {
action: actions.fetchStageData,
state,
payload: {},
- expectedMutations: [{ type: 'REQUEST_STAGE_DATA' }, { type: 'RECEIVE_STAGE_DATA_SUCCESS' }],
+ expectedMutations: [
+ { type: 'REQUEST_STAGE_DATA' },
+ { type: 'RECEIVE_STAGE_DATA_SUCCESS', payload: reviewEvents },
+ { type: 'SET_PAGINATION', payload: { hasNextPage: true, page: 1 } },
+ ],
expectedActions: [],
}));
diff --git a/spec/frontend/cycle_analytics/store/getters_spec.js b/spec/frontend/cycle_analytics/store/getters_spec.js
index c47a30a5f79..c9208045a68 100644
--- a/spec/frontend/cycle_analytics/store/getters_spec.js
+++ b/spec/frontend/cycle_analytics/store/getters_spec.js
@@ -1,17 +1,42 @@
import * as getters from '~/cycle_analytics/store/getters';
+
import {
allowedStages,
stageMedians,
transformedProjectStagePathData,
selectedStage,
stageCounts,
+ basePaginationResult,
+ initialPaginationState,
} from '../mock_data';
describe('Value stream analytics getters', () => {
+ let state = {};
+
describe('pathNavigationData', () => {
it('returns the transformed data', () => {
- const state = { stages: allowedStages, medians: stageMedians, selectedStage, stageCounts };
+ state = { stages: allowedStages, medians: stageMedians, selectedStage, stageCounts };
expect(getters.pathNavigationData(state)).toEqual(transformedProjectStagePathData);
});
});
+
+ describe('paginationParams', () => {
+ beforeEach(() => {
+ state = { pagination: initialPaginationState };
+ });
+
+ it('returns the `pagination` type', () => {
+ expect(getters.paginationParams(state)).toEqual(basePaginationResult);
+ });
+
+ it('returns the `sort` type', () => {
+ expect(getters.paginationParams(state)).toEqual(basePaginationResult);
+ });
+
+ it('with page=10, sets the `page` property', () => {
+ const page = 10;
+ state = { pagination: { ...initialPaginationState, page } };
+ expect(getters.paginationParams(state)).toEqual({ ...basePaginationResult, page });
+ });
+ });
});
diff --git a/spec/frontend/cycle_analytics/store/mutations_spec.js b/spec/frontend/cycle_analytics/store/mutations_spec.js
index 628e2a4e7ae..4860225c995 100644
--- a/spec/frontend/cycle_analytics/store/mutations_spec.js
+++ b/spec/frontend/cycle_analytics/store/mutations_spec.js
@@ -2,6 +2,10 @@ import { useFakeDate } from 'helpers/fake_date';
import * as types from '~/cycle_analytics/store/mutation_types';
import mutations from '~/cycle_analytics/store/mutations';
import {
+ PAGINATION_SORT_FIELD_END_EVENT,
+ PAGINATION_SORT_DIRECTION_DESC,
+} from '~/cycle_analytics/constants';
+import {
selectedStage,
rawIssueEvents,
issueEvents,
@@ -12,6 +16,7 @@ import {
formattedStageMedians,
rawStageCounts,
stageCounts,
+ initialPaginationState as pagination,
} from '../mock_data';
let state;
@@ -25,7 +30,7 @@ describe('Project Value Stream Analytics mutations', () => {
useFakeDate(2020, 6, 18);
beforeEach(() => {
- state = {};
+ state = { pagination };
});
afterEach(() => {
@@ -88,16 +93,18 @@ describe('Project Value Stream Analytics mutations', () => {
});
it.each`
- mutation | payload | stateKey | value
- ${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdAfter'} | ${mockCreatedAfter}
- ${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdBefore'} | ${mockCreatedBefore}
- ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
- ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
- ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
- ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
- ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
- ${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
- ${types.RECEIVE_STAGE_COUNTS_SUCCESS} | ${rawStageCounts} | ${'stageCounts'} | ${stageCounts}
+ mutation | payload | stateKey | value
+ ${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdAfter'} | ${mockCreatedAfter}
+ ${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdBefore'} | ${mockCreatedBefore}
+ ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
+ ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
+ ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
+ ${types.SET_PAGINATION} | ${pagination} | ${'pagination'} | ${{ ...pagination, sort: PAGINATION_SORT_FIELD_END_EVENT, direction: PAGINATION_SORT_DIRECTION_DESC }}
+ ${types.SET_PAGINATION} | ${{ ...pagination, sort: 'duration', direction: 'asc' }} | ${'pagination'} | ${{ ...pagination, sort: 'duration', direction: 'asc' }}
+ ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
+ ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
+ ${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
+ ${types.RECEIVE_STAGE_COUNTS_SUCCESS} | ${rawStageCounts} | ${'stageCounts'} | ${stageCounts}
`(
'$mutation with $payload will set $stateKey to $value',
({ mutation, payload, stateKey, value }) => {
diff --git a/spec/frontend/cycle_analytics/utils_spec.js b/spec/frontend/cycle_analytics/utils_spec.js
index 69fed879fd8..74d64cd8d71 100644
--- a/spec/frontend/cycle_analytics/utils_spec.js
+++ b/spec/frontend/cycle_analytics/utils_spec.js
@@ -1,7 +1,6 @@
import { useFakeDate } from 'helpers/fake_date';
import {
transformStagesForPathNavigation,
- timeSummaryForPathNavigation,
medianTimeToParsedSeconds,
formatMedianValues,
filterStagesByHiddenStatus,
@@ -47,21 +46,6 @@ describe('Value stream analytics utils', () => {
});
});
- describe('timeSummaryForPathNavigation', () => {
- it.each`
- unit | value | result
- ${'months'} | ${1.5} | ${'1.5M'}
- ${'weeks'} | ${1.25} | ${'1.5w'}
- ${'days'} | ${2} | ${'2d'}
- ${'hours'} | ${10} | ${'10h'}
- ${'minutes'} | ${20} | ${'20m'}
- ${'seconds'} | ${10} | ${'<1m'}
- ${'seconds'} | ${0} | ${'-'}
- `('will format $value $unit to $result', ({ unit, value, result }) => {
- expect(timeSummaryForPathNavigation({ [unit]: value })).toBe(result);
- });
- });
-
describe('medianTimeToParsedSeconds', () => {
it.each`
value | result
diff --git a/spec/frontend/deploy_freeze/helpers.js b/spec/frontend/deploy_freeze/helpers.js
index 598f14d45f6..43e66183ab5 100644
--- a/spec/frontend/deploy_freeze/helpers.js
+++ b/spec/frontend/deploy_freeze/helpers.js
@@ -1,7 +1,8 @@
+import freezePeriodsFixture from 'test_fixtures/api/freeze-periods/freeze_periods.json';
+import timezoneDataFixture from 'test_fixtures/timezones/short.json';
import { secondsToHours } from '~/lib/utils/datetime_utility';
-export const freezePeriodsFixture = getJSONFixture('/api/freeze-periods/freeze_periods.json');
-export const timezoneDataFixture = getJSONFixture('/timezones/short.json');
+export { freezePeriodsFixture, timezoneDataFixture };
export const findTzByName = (identifier = '') =>
timezoneDataFixture.find(({ name }) => name.toLowerCase() === identifier.toLowerCase());
diff --git a/spec/frontend/deploy_keys/components/action_btn_spec.js b/spec/frontend/deploy_keys/components/action_btn_spec.js
index 307a0b6d8b0..6ac68061518 100644
--- a/spec/frontend/deploy_keys/components/action_btn_spec.js
+++ b/spec/frontend/deploy_keys/components/action_btn_spec.js
@@ -1,10 +1,10 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import data from 'test_fixtures/deploy_keys/keys.json';
import actionBtn from '~/deploy_keys/components/action_btn.vue';
import eventHub from '~/deploy_keys/eventhub';
describe('Deploy keys action btn', () => {
- const data = getJSONFixture('deploy_keys/keys.json');
const deployKey = data.enabled_keys[0];
let wrapper;
diff --git a/spec/frontend/deploy_keys/components/app_spec.js b/spec/frontend/deploy_keys/components/app_spec.js
index a72b2b00776..598b7a0f173 100644
--- a/spec/frontend/deploy_keys/components/app_spec.js
+++ b/spec/frontend/deploy_keys/components/app_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import data from 'test_fixtures/deploy_keys/keys.json';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
import deployKeysApp from '~/deploy_keys/components/app.vue';
@@ -10,7 +11,6 @@ import axios from '~/lib/utils/axios_utils';
const TEST_ENDPOINT = `${TEST_HOST}/dummy/`;
describe('Deploy keys app component', () => {
- const data = getJSONFixture('deploy_keys/keys.json');
let wrapper;
let mock;
diff --git a/spec/frontend/deploy_keys/components/key_spec.js b/spec/frontend/deploy_keys/components/key_spec.js
index 5420f9a01f9..511b9d6ef55 100644
--- a/spec/frontend/deploy_keys/components/key_spec.js
+++ b/spec/frontend/deploy_keys/components/key_spec.js
@@ -1,4 +1,5 @@
import { mount } from '@vue/test-utils';
+import data from 'test_fixtures/deploy_keys/keys.json';
import key from '~/deploy_keys/components/key.vue';
import DeployKeysStore from '~/deploy_keys/store';
import { getTimeago } from '~/lib/utils/datetime_utility';
@@ -7,8 +8,6 @@ describe('Deploy keys key', () => {
let wrapper;
let store;
- const data = getJSONFixture('deploy_keys/keys.json');
-
const findTextAndTrim = (selector) => wrapper.find(selector).text().trim();
const createComponent = (propsData) => {
diff --git a/spec/frontend/deploy_keys/components/keys_panel_spec.js b/spec/frontend/deploy_keys/components/keys_panel_spec.js
index d6419356166..f3b907e5450 100644
--- a/spec/frontend/deploy_keys/components/keys_panel_spec.js
+++ b/spec/frontend/deploy_keys/components/keys_panel_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
+import data from 'test_fixtures/deploy_keys/keys.json';
import deployKeysPanel from '~/deploy_keys/components/keys_panel.vue';
import DeployKeysStore from '~/deploy_keys/store';
describe('Deploy keys panel', () => {
- const data = getJSONFixture('deploy_keys/keys.json');
let wrapper;
const findTableRowHeader = () => wrapper.find('.table-row-header');
diff --git a/spec/frontend/deprecated_jquery_dropdown_spec.js b/spec/frontend/deprecated_jquery_dropdown_spec.js
index 4a6dee31cd5..7e4c6e131b4 100644
--- a/spec/frontend/deprecated_jquery_dropdown_spec.js
+++ b/spec/frontend/deprecated_jquery_dropdown_spec.js
@@ -3,6 +3,8 @@
import $ from 'jquery';
import initDeprecatedJQueryDropdown from '~/deprecated_jquery_dropdown';
import '~/lib/utils/common_utils';
+// eslint-disable-next-line import/no-deprecated
+import { getJSONFixture } from 'helpers/fixtures';
import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
@@ -66,6 +68,7 @@ describe('deprecatedJQueryDropdown', () => {
loadFixtures('static/deprecated_jquery_dropdown.html');
test.dropdownContainerElement = $('.dropdown.inline');
test.$dropdownMenuElement = $('.dropdown-menu', test.dropdownContainerElement);
+ // eslint-disable-next-line import/no-deprecated
test.projectsData = getJSONFixture('static/projects.json');
});
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
index 67e4a82787c..2b706d21f51 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
@@ -4,13 +4,13 @@ exports[`Design management design version dropdown component renders design vers
<gl-dropdown-stub
category="primary"
clearalltext="Clear all"
+ clearalltextclass="gl-px-5"
headertext=""
hideheaderborder="true"
highlighteditemstitle="Selected"
highlighteditemstitleclass="gl-px-5"
issueiid=""
projectpath=""
- showhighlighteditemstitle="true"
size="small"
text="Showing latest version"
variant="default"
@@ -85,13 +85,13 @@ exports[`Design management design version dropdown component renders design vers
<gl-dropdown-stub
category="primary"
clearalltext="Clear all"
+ clearalltextclass="gl-px-5"
headertext=""
hideheaderborder="true"
highlighteditemstitle="Selected"
highlighteditemstitleclass="gl-px-5"
issueiid=""
projectpath=""
- showhighlighteditemstitle="true"
size="small"
text="Showing latest version"
variant="default"
diff --git a/spec/frontend/design_management/utils/cache_update_spec.js b/spec/frontend/design_management/utils/cache_update_spec.js
index 7327cf00abd..fa6a666bb37 100644
--- a/spec/frontend/design_management/utils/cache_update_spec.js
+++ b/spec/frontend/design_management/utils/cache_update_spec.js
@@ -26,11 +26,11 @@ describe('Design Management cache update', () => {
describe('error handling', () => {
it.each`
- fnName | subject | errorMessage | extraArgs
- ${'updateStoreAfterDesignsDelete'} | ${updateStoreAfterDesignsDelete} | ${designDeletionError({ singular: true })} | ${[[design]]}
- ${'updateStoreAfterAddImageDiffNote'} | ${updateStoreAfterAddImageDiffNote} | ${ADD_IMAGE_DIFF_NOTE_ERROR} | ${[]}
- ${'updateStoreAfterUploadDesign'} | ${updateStoreAfterUploadDesign} | ${mockErrors[0]} | ${[]}
- ${'updateStoreAfterUpdateImageDiffNote'} | ${updateStoreAfterRepositionImageDiffNote} | ${UPDATE_IMAGE_DIFF_NOTE_ERROR} | ${[]}
+ fnName | subject | errorMessage | extraArgs
+ ${'updateStoreAfterDesignsDelete'} | ${updateStoreAfterDesignsDelete} | ${designDeletionError()} | ${[[design]]}
+ ${'updateStoreAfterAddImageDiffNote'} | ${updateStoreAfterAddImageDiffNote} | ${ADD_IMAGE_DIFF_NOTE_ERROR} | ${[]}
+ ${'updateStoreAfterUploadDesign'} | ${updateStoreAfterUploadDesign} | ${mockErrors[0]} | ${[]}
+ ${'updateStoreAfterUpdateImageDiffNote'} | ${updateStoreAfterRepositionImageDiffNote} | ${UPDATE_IMAGE_DIFF_NOTE_ERROR} | ${[]}
`('$fnName handles errors in response', ({ subject, extraArgs, errorMessage }) => {
expect(createFlash).not.toHaveBeenCalled();
expect(() => subject(mockStore, { errors: mockErrors }, {}, ...extraArgs)).toThrow();
diff --git a/spec/frontend/design_management/utils/error_messages_spec.js b/spec/frontend/design_management/utils/error_messages_spec.js
index b80dcd9abde..4994f4f6fd0 100644
--- a/spec/frontend/design_management/utils/error_messages_spec.js
+++ b/spec/frontend/design_management/utils/error_messages_spec.js
@@ -10,20 +10,21 @@ const mockFilenames = (n) =>
describe('Error message', () => {
describe('designDeletionError', () => {
- const singularMsg = 'Could not archive a design. Please try again.';
- const pluralMsg = 'Could not archive designs. Please try again.';
+ const singularMsg = 'Failed to archive a design. Please try again.';
+ const pluralMsg = 'Failed to archive designs. Please try again.';
- describe('when [singular=true]', () => {
- it.each([[undefined], [true]])('uses singular grammar', (singularOption) => {
- expect(designDeletionError({ singular: singularOption })).toEqual(singularMsg);
- });
- });
-
- describe('when [singular=false]', () => {
- it('uses plural grammar', () => {
- expect(designDeletionError({ singular: false })).toEqual(pluralMsg);
- });
- });
+ it.each`
+ designsLength | expectedText
+ ${undefined} | ${singularMsg}
+ ${0} | ${pluralMsg}
+ ${1} | ${singularMsg}
+ ${2} | ${pluralMsg}
+ `(
+ 'returns "$expectedText" when designsLength is $designsLength',
+ ({ designsLength, expectedText }) => {
+ expect(designDeletionError(designsLength)).toBe(expectedText);
+ },
+ );
});
describe.each([
@@ -47,12 +48,12 @@ describe('Error message', () => {
[
mockFilenames(7),
mockFilenames(6),
- 'Upload skipped. Some of the designs you tried uploading did not change: 1.jpg, 2.jpg, 3.jpg, 4.jpg, 5.jpg, and 1 more.',
+ 'Upload skipped. Some of the designs you tried uploading did not change: 1.jpg, 2.jpg, 3.jpg, 4.jpg, 5.jpg and 1 more.',
],
[
mockFilenames(8),
mockFilenames(7),
- 'Upload skipped. Some of the designs you tried uploading did not change: 1.jpg, 2.jpg, 3.jpg, 4.jpg, 5.jpg, and 2 more.',
+ 'Upload skipped. Some of the designs you tried uploading did not change: 1.jpg, 2.jpg, 3.jpg, 4.jpg, 5.jpg and 2 more.',
],
])('designUploadSkippedWarning', (uploadedFiles, skippedFiles, expected) => {
it('returns expected warning message', () => {
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 9dc82bbdc93..0527c2153f4 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -13,11 +13,8 @@ import DiffFile from '~/diffs/components/diff_file.vue';
import NoChanges from '~/diffs/components/no_changes.vue';
import TreeList from '~/diffs/components/tree_list.vue';
-/* eslint-disable import/order */
-/* You know what: sometimes alphabetical isn't the best order */
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
-/* eslint-enable import/order */
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -705,4 +702,23 @@ describe('diffs/components/app', () => {
);
});
});
+
+ describe('fluid layout', () => {
+ beforeEach(() => {
+ setFixtures(
+ '<div><div class="merge-request-container limit-container-width container-limited"></div></div>',
+ );
+ });
+
+ it('removes limited container classes when on diffs tab', () => {
+ createComponent({ isFluidLayout: false, shouldShow: true }, () => {}, {
+ glFeatures: { mrChangesFluidLayout: true },
+ });
+
+ const containerClassList = document.querySelector('.merge-request-container').classList;
+
+ expect(containerClassList).not.toContain('container-limited');
+ expect(containerClassList).not.toContain('limit-container-width');
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index 0191822d97a..d887029124f 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -1,10 +1,10 @@
import { mount } from '@vue/test-utils';
+import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json';
import { TEST_HOST } from 'helpers/test_constants';
import { trimText } from 'helpers/text_helper';
import Component from '~/diffs/components/commit_item.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
-import getDiffWithCommit from '../mock_data/diff_with_commit';
jest.mock('~/user_popovers');
@@ -18,7 +18,7 @@ describe('diffs/components/commit_item', () => {
let wrapper;
const timeago = getTimeago();
- const { commit } = getDiffWithCommit();
+ const { commit } = getDiffWithCommit;
const getTitleElement = () => wrapper.find('.commit-row-message.item-title');
const getDescElement = () => wrapper.find('pre.commit-row-description');
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 1c0cb1193fa..c48935bc4f0 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -1,11 +1,11 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { trimText } from 'helpers/text_helper';
import CompareVersionsComponent from '~/diffs/components/compare_versions.vue';
import { createStore } from '~/mr_notes/stores';
-import getDiffWithCommit from '../mock_data/diff_with_commit';
import diffsMockData from '../mock_data/merge_request_diffs';
const localVue = createLocalVue();
@@ -22,7 +22,7 @@ describe('CompareVersions', () => {
let wrapper;
let store;
const targetBranchName = 'tmp-wine-dev';
- const { commit } = getDiffWithCommit();
+ const { commit } = getDiffWithCommit;
const createWrapper = (props = {}, commitArgs = {}, createCommit = true) => {
if (createCommit) {
@@ -150,7 +150,7 @@ describe('CompareVersions', () => {
describe('commit', () => {
beforeEach(() => {
- store.state.diffs.commit = getDiffWithCommit().commit;
+ store.state.diffs.commit = getDiffWithCommit.commit;
createWrapper();
});
diff --git a/spec/frontend/diffs/mock_data/diff_with_commit.js b/spec/frontend/diffs/mock_data/diff_with_commit.js
deleted file mode 100644
index f3b39bd3577..00000000000
--- a/spec/frontend/diffs/mock_data/diff_with_commit.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const FIXTURE = 'merge_request_diffs/with_commit.json';
-
-export default function getDiffWithCommit() {
- return getJSONFixture(FIXTURE);
-}
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index b35abc9da02..85734e05aeb 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -51,7 +51,7 @@ import {
} from '~/diffs/store/actions';
import * as types from '~/diffs/store/mutation_types';
import * as utils from '~/diffs/store/utils';
-import * as workerUtils from '~/diffs/utils/workers';
+import * as treeWorkerUtils from '~/diffs/utils/tree_worker_utils';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as commonUtils from '~/lib/utils/common_utils';
@@ -253,7 +253,7 @@ describe('DiffsStoreActions', () => {
// Workers are synchronous in Jest environment (see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/58805)
{
type: types.SET_TREE_DATA,
- payload: workerUtils.generateTreeList(diffMetadata.diff_files),
+ payload: treeWorkerUtils.generateTreeList(diffMetadata.diff_files),
},
],
[],
diff --git a/spec/frontend/diffs/utils/workers_spec.js b/spec/frontend/diffs/utils/tree_worker_utils_spec.js
index 25d8183b777..8113428f712 100644
--- a/spec/frontend/diffs/utils/workers_spec.js
+++ b/spec/frontend/diffs/utils/tree_worker_utils_spec.js
@@ -1,6 +1,10 @@
-import { generateTreeList, getLowestSingleFolder, flattenTree } from '~/diffs/utils/workers';
+import {
+ generateTreeList,
+ getLowestSingleFolder,
+ flattenTree,
+} from '~/diffs/utils/tree_worker_utils';
-describe('~/diffs/utils/workers', () => {
+describe('~/diffs/utils/tree_worker_utils', () => {
describe('generateTreeList', () => {
let files;
diff --git a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
index 07ac080fe08..8a0d1ecf1af 100644
--- a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
+++ b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
@@ -1,7 +1,7 @@
import { languages } from 'monaco-editor';
import { TEST_HOST } from 'helpers/test_constants';
-import { EXTENSION_CI_SCHEMA_FILE_NAME_MATCH } from '~/editor/constants';
import { CiSchemaExtension } from '~/editor/extensions/source_editor_ci_schema_ext';
+import ciSchemaPath from '~/editor/schema/ci.json';
import SourceEditor from '~/editor/source_editor';
const mockRef = 'AABBCCDD';
@@ -84,7 +84,7 @@ describe('~/editor/editor_ci_config_ext', () => {
});
expect(getConfiguredYmlSchema()).toEqual({
- uri: `${TEST_HOST}/${mockProjectNamespace}/${mockProjectPath}/-/schema/${mockRef}/${EXTENSION_CI_SCHEMA_FILE_NAME_MATCH}`,
+ uri: `${TEST_HOST}${ciSchemaPath}`,
fileMatch: [defaultBlobPath],
});
});
@@ -99,7 +99,7 @@ describe('~/editor/editor_ci_config_ext', () => {
});
expect(getConfiguredYmlSchema()).toEqual({
- uri: `${TEST_HOST}/${mockProjectNamespace}/${mockProjectPath}/-/schema/${mockRef}/${EXTENSION_CI_SCHEMA_FILE_NAME_MATCH}`,
+ uri: `${TEST_HOST}${ciSchemaPath}`,
fileMatch: ['another-ci-filename.yml'],
});
});
diff --git a/spec/frontend/environments/environment_delete_spec.js b/spec/frontend/environments/environment_delete_spec.js
index a8c288a3bd8..2d8cff0c74a 100644
--- a/spec/frontend/environments/environment_delete_spec.js
+++ b/spec/frontend/environments/environment_delete_spec.js
@@ -1,4 +1,4 @@
-import { GlButton } from '@gitlab/ui';
+import { GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import DeleteComponent from '~/environments/components/environment_delete.vue';
@@ -15,7 +15,7 @@ describe('External URL Component', () => {
});
};
- const findButton = () => wrapper.find(GlButton);
+ const findDropdownItem = () => wrapper.find(GlDropdownItem);
beforeEach(() => {
jest.spyOn(window, 'confirm');
@@ -23,14 +23,15 @@ describe('External URL Component', () => {
createWrapper();
});
- it('should render a button to delete the environment', () => {
- expect(findButton().exists()).toBe(true);
- expect(wrapper.attributes('title')).toEqual('Delete environment');
+ it('should render a dropdown item to delete the environment', () => {
+ expect(findDropdownItem().exists()).toBe(true);
+ expect(wrapper.text()).toEqual('Delete environment');
+ expect(findDropdownItem().attributes('variant')).toBe('danger');
});
it('emits requestDeleteEnvironment in the event hub when button is clicked', () => {
jest.spyOn(eventHub, '$emit');
- findButton().vm.$emit('click');
+ findDropdownItem().vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('requestDeleteEnvironment', wrapper.vm.environment);
});
});
diff --git a/spec/frontend/environments/environment_monitoring_spec.js b/spec/frontend/environments/environment_monitoring_spec.js
index 3a53b57c3c6..98dd9edd812 100644
--- a/spec/frontend/environments/environment_monitoring_spec.js
+++ b/spec/frontend/environments/environment_monitoring_spec.js
@@ -1,6 +1,6 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import MonitoringComponent from '~/environments/components/environment_monitoring.vue';
+import { __ } from '~/locale';
describe('Monitoring Component', () => {
let wrapper;
@@ -8,31 +8,19 @@ describe('Monitoring Component', () => {
const monitoringUrl = 'https://gitlab.com';
const createWrapper = () => {
- wrapper = shallowMount(MonitoringComponent, {
+ wrapper = mountExtended(MonitoringComponent, {
propsData: {
monitoringUrl,
},
});
};
- const findButtons = () => wrapper.findAll(GlButton);
- const findButtonsByIcon = (icon) =>
- findButtons().filter((button) => button.props('icon') === icon);
-
beforeEach(() => {
createWrapper();
});
- describe('computed', () => {
- it('title', () => {
- expect(wrapper.vm.title).toBe('Monitoring');
- });
- });
-
it('should render a link to environment monitoring page', () => {
- expect(wrapper.attributes('href')).toEqual(monitoringUrl);
- expect(findButtonsByIcon('chart').length).toBe(1);
- expect(wrapper.attributes('title')).toBe('Monitoring');
- expect(wrapper.attributes('aria-label')).toBe('Monitoring');
+ const link = wrapper.findByRole('menuitem', { name: __('Monitoring') });
+ expect(link.attributes('href')).toEqual(monitoringUrl);
});
});
diff --git a/spec/frontend/environments/environment_pin_spec.js b/spec/frontend/environments/environment_pin_spec.js
index 5cdd52294b6..a9a58071e12 100644
--- a/spec/frontend/environments/environment_pin_spec.js
+++ b/spec/frontend/environments/environment_pin_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlIcon } from '@gitlab/ui';
+import { GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import PinComponent from '~/environments/components/environment_pin.vue';
import eventHub from '~/environments/event_hub';
@@ -30,15 +30,15 @@ describe('Pin Component', () => {
wrapper.destroy();
});
- it('should render the component with thumbtack icon', () => {
- expect(wrapper.find(GlIcon).props('name')).toBe('thumbtack');
+ it('should render the component with descriptive text', () => {
+ expect(wrapper.text()).toBe('Prevent auto-stopping');
});
it('should emit onPinClick when clicked', () => {
const eventHubSpy = jest.spyOn(eventHub, '$emit');
- const button = wrapper.find(GlButton);
+ const item = wrapper.find(GlDropdownItem);
- button.vm.$emit('click');
+ item.vm.$emit('click');
expect(eventHubSpy).toHaveBeenCalledWith('cancelAutoStop', autoStopUrl);
});
diff --git a/spec/frontend/environments/environment_rollback_spec.js b/spec/frontend/environments/environment_rollback_spec.js
index b6c3d436c18..cde675cd9e7 100644
--- a/spec/frontend/environments/environment_rollback_spec.js
+++ b/spec/frontend/environments/environment_rollback_spec.js
@@ -1,5 +1,5 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
+import { GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import RollbackComponent from '~/environments/components/environment_rollback.vue';
import eventHub from '~/environments/event_hub';
@@ -7,7 +7,7 @@ describe('Rollback Component', () => {
const retryUrl = 'https://gitlab.com/retry';
it('Should render Re-deploy label when isLastDeployment is true', () => {
- const wrapper = mount(RollbackComponent, {
+ const wrapper = shallowMount(RollbackComponent, {
propsData: {
retryUrl,
isLastDeployment: true,
@@ -15,11 +15,11 @@ describe('Rollback Component', () => {
},
});
- expect(wrapper.element).toHaveSpriteIcon('repeat');
+ expect(wrapper.text()).toBe('Re-deploy to environment');
});
it('Should render Rollback label when isLastDeployment is false', () => {
- const wrapper = mount(RollbackComponent, {
+ const wrapper = shallowMount(RollbackComponent, {
propsData: {
retryUrl,
isLastDeployment: false,
@@ -27,7 +27,7 @@ describe('Rollback Component', () => {
},
});
- expect(wrapper.element).toHaveSpriteIcon('redo');
+ expect(wrapper.text()).toBe('Rollback environment');
});
it('should emit a "rollback" event on button click', () => {
@@ -40,7 +40,7 @@ describe('Rollback Component', () => {
},
},
});
- const button = wrapper.find(GlButton);
+ const button = wrapper.find(GlDropdownItem);
button.vm.$emit('click');
diff --git a/spec/frontend/environments/environment_terminal_button_spec.js b/spec/frontend/environments/environment_terminal_button_spec.js
index 2475785a927..ab9f370595f 100644
--- a/spec/frontend/environments/environment_terminal_button_spec.js
+++ b/spec/frontend/environments/environment_terminal_button_spec.js
@@ -1,12 +1,13 @@
-import { shallowMount } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import TerminalComponent from '~/environments/components/environment_terminal_button.vue';
+import { __ } from '~/locale';
-describe('Stop Component', () => {
+describe('Terminal Component', () => {
let wrapper;
const terminalPath = '/path';
const mountWithProps = (props) => {
- wrapper = shallowMount(TerminalComponent, {
+ wrapper = mountExtended(TerminalComponent, {
propsData: props,
});
};
@@ -15,17 +16,9 @@ describe('Stop Component', () => {
mountWithProps({ terminalPath });
});
- describe('computed', () => {
- it('title', () => {
- expect(wrapper.vm.title).toEqual('Terminal');
- });
- });
-
it('should render a link to open a web terminal with the provided path', () => {
- expect(wrapper.element.tagName).toBe('A');
- expect(wrapper.attributes('title')).toBe('Terminal');
- expect(wrapper.attributes('aria-label')).toBe('Terminal');
- expect(wrapper.attributes('href')).toBe(terminalPath);
+ const link = wrapper.findByRole('menuitem', { name: __('Terminal') });
+ expect(link.attributes('href')).toBe(terminalPath);
});
it('should render a non-disabled button', () => {
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index babbc0c8a4d..4e459d800e8 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -503,6 +503,53 @@ describe('ErrorDetails', () => {
});
});
});
+
+ describe('Release links', () => {
+ const firstReleaseVersion = '7975be01';
+ const firstCommitLink = '/gitlab/-/commit/7975be01';
+ const firstReleaseLink = '/sentry/releases/7975be01';
+ const findFirstCommitLink = () => wrapper.find(`[href$="${firstCommitLink}"]`);
+ const findFirstReleaseLink = () => wrapper.find(`[href$="${firstReleaseLink}"]`);
+
+ const lastReleaseVersion = '6ca5a5c1';
+ const lastCommitLink = '/gitlab/-/commit/6ca5a5c1';
+ const lastReleaseLink = '/sentry/releases/6ca5a5c1';
+ const findLastCommitLink = () => wrapper.find(`[href$="${lastCommitLink}"]`);
+ const findLastReleaseLink = () => wrapper.find(`[href$="${lastReleaseLink}"]`);
+
+ it('should display links to Sentry', async () => {
+ mocks.$apollo.queries.error.loading = false;
+ await wrapper.setData({
+ error: {
+ firstReleaseVersion,
+ lastReleaseVersion,
+ externalBaseUrl: '/sentry',
+ },
+ });
+
+ expect(findFirstReleaseLink().exists()).toBe(true);
+ expect(findLastReleaseLink().exists()).toBe(true);
+ expect(findFirstCommitLink().exists()).toBe(false);
+ expect(findLastCommitLink().exists()).toBe(false);
+ });
+
+ it('should display links to GitLab when integrated', async () => {
+ mocks.$apollo.queries.error.loading = false;
+ await wrapper.setData({
+ error: {
+ firstReleaseVersion,
+ lastReleaseVersion,
+ integrated: true,
+ externalBaseUrl: '/gitlab',
+ },
+ });
+
+ expect(findFirstCommitLink().exists()).toBe(true);
+ expect(findLastCommitLink().exists()).toBe(true);
+ expect(findFirstReleaseLink().exists()).toBe(false);
+ expect(findLastReleaseLink().exists()).toBe(false);
+ });
+ });
});
describe('Snowplow tracking', () => {
diff --git a/spec/frontend/error_tracking_settings/components/app_spec.js b/spec/frontend/error_tracking_settings/components/app_spec.js
index 30541ba68a5..844faff64a1 100644
--- a/spec/frontend/error_tracking_settings/components/app_spec.js
+++ b/spec/frontend/error_tracking_settings/components/app_spec.js
@@ -1,7 +1,8 @@
-import { GlFormRadioGroup, GlFormRadio } from '@gitlab/ui';
+import { GlFormRadioGroup, GlFormRadio, GlFormInputGroup } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { TEST_HOST } from 'helpers/test_constants';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ErrorTrackingSettings from '~/error_tracking_settings/components/app.vue';
@@ -12,6 +13,8 @@ import createStore from '~/error_tracking_settings/store';
const localVue = createLocalVue();
localVue.use(Vuex);
+const TEST_GITLAB_DSN = 'https://gitlab.example.com/123456';
+
describe('error tracking settings app', () => {
let store;
let wrapper;
@@ -29,6 +32,10 @@ describe('error tracking settings app', () => {
initialProject: null,
listProjectsEndpoint: TEST_HOST,
operationsSettingsEndpoint: TEST_HOST,
+ gitlabDsn: TEST_GITLAB_DSN,
+ },
+ stubs: {
+ GlFormInputGroup, // we need this non-shallow to query for a component within a slot
},
}),
);
@@ -41,6 +48,12 @@ describe('error tracking settings app', () => {
findBackendSettingsRadioGroup().findAllComponents(GlFormRadio);
const findElementWithText = (wrappers, text) => wrappers.filter((item) => item.text() === text);
const findSentrySettings = () => wrapper.findByTestId('sentry-setting-form');
+ const findDsnSettings = () => wrapper.findByTestId('gitlab-dsn-setting-form');
+
+ const enableGitLabErrorTracking = async () => {
+ findBackendSettingsRadioGroup().vm.$emit('change', true);
+ await nextTick();
+ };
beforeEach(() => {
store = createStore();
@@ -93,17 +106,35 @@ describe('error tracking settings app', () => {
expect(findElementWithText(findBackendSettingsRadioButtons(), 'GitLab')).toHaveLength(1);
});
- it('toggles the sentry-settings section when sentry is selected as a tracking-backend', async () => {
+ it('hides the Sentry settings when GitLab is selected as a tracking-backend', async () => {
expect(findSentrySettings().exists()).toBe(true);
- // set the "integrated" setting to "true"
- findBackendSettingsRadioGroup().vm.$emit('change', true);
-
- await nextTick();
+ await enableGitLabErrorTracking();
expect(findSentrySettings().exists()).toBe(false);
});
+ describe('GitLab DSN section', () => {
+ it('is visible when GitLab is selected as a tracking-backend and DSN is present', async () => {
+ expect(findDsnSettings().exists()).toBe(false);
+
+ await enableGitLabErrorTracking();
+
+ expect(findDsnSettings().exists()).toBe(true);
+ });
+
+ it('contains copy-to-clipboard functionality for the GitLab DSN string', async () => {
+ await enableGitLabErrorTracking();
+
+ const clipBoardInput = findDsnSettings().findComponent(GlFormInputGroup);
+ const clipBoardButton = findDsnSettings().findComponent(ClipboardButton);
+
+ expect(clipBoardInput.props('value')).toBe(TEST_GITLAB_DSN);
+ expect(clipBoardInput.attributes('readonly')).toBeTruthy();
+ expect(clipBoardButton.props('text')).toBe(TEST_GITLAB_DSN);
+ });
+ });
+
it.each([true, false])(
'calls the `updateIntegrated` action when the setting changes to `%s`',
(integrated) => {
diff --git a/spec/frontend/experimentation/utils_spec.js b/spec/frontend/experimentation/utils_spec.js
index 999bed1ffbd..de060f5eb8c 100644
--- a/spec/frontend/experimentation/utils_spec.js
+++ b/spec/frontend/experimentation/utils_spec.js
@@ -23,20 +23,6 @@ describe('experiment Utilities', () => {
});
});
- describe('getExperimentContexts', () => {
- describe.each`
- gon | input | output
- ${[TEST_KEY, '_data_']} | ${[TEST_KEY]} | ${[{ schema: TRACKING_CONTEXT_SCHEMA, data: { variant: '_data_' } }]}
- ${[]} | ${[TEST_KEY]} | ${[]}
- `('with input=$input and gon=$gon', ({ gon, input, output }) => {
- assignGitlabExperiment(...gon);
-
- it(`returns ${output}`, () => {
- expect(experimentUtils.getExperimentContexts(...input)).toEqual(output);
- });
- });
- });
-
describe('getAllExperimentContexts', () => {
const schema = TRACKING_CONTEXT_SCHEMA;
let origGon;
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
index 799b567a2c0..721b7249abc 100644
--- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -4,6 +4,7 @@ import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import Vuex from 'vuex';
import { mockTracking } from 'helpers/tracking_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
import EditFeatureFlag from '~/feature_flags/components/edit_feature_flag.vue';
import Form from '~/feature_flags/components/form.vue';
@@ -20,7 +21,7 @@ describe('Edit feature flag form', () => {
endpoint: `${TEST_HOST}/feature_flags.json`,
});
- const factory = (provide = {}) => {
+ const factory = (provide = { searchPath: '/search' }) => {
if (wrapper) {
wrapper.destroy();
wrapper = null;
@@ -31,7 +32,7 @@ describe('Edit feature flag form', () => {
});
};
- beforeEach((done) => {
+ beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
id: 21,
@@ -45,7 +46,8 @@ describe('Edit feature flag form', () => {
destroy_path: '/h5bp/html5-boilerplate/-/feature_flags/21',
});
factory();
- setImmediate(() => done());
+
+ return waitForPromises();
});
afterEach(() => {
@@ -60,7 +62,7 @@ describe('Edit feature flag form', () => {
});
it('should render the toggle', () => {
- expect(wrapper.find(GlToggle).exists()).toBe(true);
+ expect(wrapper.findComponent(GlToggle).exists()).toBe(true);
});
describe('with error', () => {
@@ -80,11 +82,11 @@ describe('Edit feature flag form', () => {
});
it('should render feature flag form', () => {
- expect(wrapper.find(Form).exists()).toEqual(true);
+ expect(wrapper.findComponent(Form).exists()).toEqual(true);
});
it('should track when the toggle is clicked', () => {
- const toggle = wrapper.find(GlToggle);
+ const toggle = wrapper.findComponent(GlToggle);
const spy = mockTracking('_category_', toggle.element, jest.spyOn);
toggle.trigger('click');
@@ -95,7 +97,7 @@ describe('Edit feature flag form', () => {
});
it('should render the toggle with a visually hidden label', () => {
- expect(wrapper.find(GlToggle).props()).toMatchObject({
+ expect(wrapper.findComponent(GlToggle).props()).toMatchObject({
label: 'Feature flag status',
labelPosition: 'hidden',
});
diff --git a/spec/frontend/filterable_list_spec.js b/spec/frontend/filterable_list_spec.js
index 8c6a71abad7..556cf6f8137 100644
--- a/spec/frontend/filterable_list_spec.js
+++ b/spec/frontend/filterable_list_spec.js
@@ -1,3 +1,4 @@
+// eslint-disable-next-line import/no-deprecated
import { getJSONFixture, setHTMLFixture } from 'helpers/fixtures';
import FilterableList from '~/filterable_list';
@@ -14,6 +15,7 @@ describe('FilterableList', () => {
</div>
<div class="js-projects-list-holder"></div>
`);
+ // eslint-disable-next-line import/no-deprecated
getJSONFixture('static/projects.json');
form = document.querySelector('form#project-filter-form');
filter = document.querySelector('.js-projects-list-filter');
diff --git a/spec/frontend/filtered_search/dropdown_user_spec.js b/spec/frontend/filtered_search/dropdown_user_spec.js
index 961587f7146..9a20fb1bae6 100644
--- a/spec/frontend/filtered_search/dropdown_user_spec.js
+++ b/spec/frontend/filtered_search/dropdown_user_spec.js
@@ -1,8 +1,5 @@
-import DropdownUtils from '~/filtered_search/dropdown_utils';
-// TODO: Moving this line up throws an error about `FilteredSearchDropdown`
-// being undefined in test. See gitlab-org/gitlab#321476 for more info.
-// eslint-disable-next-line import/order
import DropdownUser from '~/filtered_search/dropdown_user';
+import DropdownUtils from '~/filtered_search/dropdown_utils';
import FilteredSearchTokenizer from '~/filtered_search/filtered_search_tokenizer';
import IssuableFilteredTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
diff --git a/spec/frontend/droplab/constants_spec.js b/spec/frontend/filtered_search/droplab/constants_spec.js
index fd48228d6a2..9c1caf90ac0 100644
--- a/spec/frontend/droplab/constants_spec.js
+++ b/spec/frontend/filtered_search/droplab/constants_spec.js
@@ -1,4 +1,4 @@
-import * as constants from '~/droplab/constants';
+import * as constants from '~/filtered_search/droplab/constants';
describe('constants', () => {
describe('DATA_TRIGGER', () => {
diff --git a/spec/frontend/droplab/drop_down_spec.js b/spec/frontend/filtered_search/droplab/drop_down_spec.js
index dcdbbcd4ccf..f49dbfcf79c 100644
--- a/spec/frontend/droplab/drop_down_spec.js
+++ b/spec/frontend/filtered_search/droplab/drop_down_spec.js
@@ -1,6 +1,6 @@
-import { SELECTED_CLASS } from '~/droplab/constants';
-import DropDown from '~/droplab/drop_down';
-import utils from '~/droplab/utils';
+import { SELECTED_CLASS } from '~/filtered_search/droplab/constants';
+import DropDown from '~/filtered_search/droplab/drop_down';
+import utils from '~/filtered_search/droplab/utils';
describe('DropLab DropDown', () => {
let testContext;
diff --git a/spec/frontend/droplab/hook_spec.js b/spec/frontend/filtered_search/droplab/hook_spec.js
index 0b897a570f6..0d92170cfcf 100644
--- a/spec/frontend/droplab/hook_spec.js
+++ b/spec/frontend/filtered_search/droplab/hook_spec.js
@@ -1,7 +1,7 @@
-import DropDown from '~/droplab/drop_down';
-import Hook from '~/droplab/hook';
+import DropDown from '~/filtered_search/droplab/drop_down';
+import Hook from '~/filtered_search/droplab/hook';
-jest.mock('~/droplab/drop_down', () => jest.fn());
+jest.mock('~/filtered_search/droplab/drop_down', () => jest.fn());
describe('Hook', () => {
let testContext;
diff --git a/spec/frontend/droplab/plugins/ajax_filter_spec.js b/spec/frontend/filtered_search/droplab/plugins/ajax_filter_spec.js
index d442d5cf416..88b3fc236e4 100644
--- a/spec/frontend/droplab/plugins/ajax_filter_spec.js
+++ b/spec/frontend/filtered_search/droplab/plugins/ajax_filter_spec.js
@@ -1,4 +1,4 @@
-import AjaxFilter from '~/droplab/plugins/ajax_filter';
+import AjaxFilter from '~/filtered_search/droplab/plugins/ajax_filter';
import AjaxCache from '~/lib/utils/ajax_cache';
describe('AjaxFilter', () => {
diff --git a/spec/frontend/droplab/plugins/ajax_spec.js b/spec/frontend/filtered_search/droplab/plugins/ajax_spec.js
index 7c6452e8337..c968b982091 100644
--- a/spec/frontend/droplab/plugins/ajax_spec.js
+++ b/spec/frontend/filtered_search/droplab/plugins/ajax_spec.js
@@ -1,4 +1,4 @@
-import Ajax from '~/droplab/plugins/ajax';
+import Ajax from '~/filtered_search/droplab/plugins/ajax';
import AjaxCache from '~/lib/utils/ajax_cache';
describe('Ajax', () => {
diff --git a/spec/frontend/droplab/plugins/input_setter_spec.js b/spec/frontend/filtered_search/droplab/plugins/input_setter_spec.js
index eebde018fa1..811b5ca4573 100644
--- a/spec/frontend/droplab/plugins/input_setter_spec.js
+++ b/spec/frontend/filtered_search/droplab/plugins/input_setter_spec.js
@@ -1,4 +1,4 @@
-import InputSetter from '~/droplab/plugins/input_setter';
+import InputSetter from '~/filtered_search/droplab/plugins/input_setter';
describe('InputSetter', () => {
let testContext;
diff --git a/spec/frontend/filtered_search/visual_token_value_spec.js b/spec/frontend/filtered_search/visual_token_value_spec.js
index 7185f382fc1..8ac5b6fbea6 100644
--- a/spec/frontend/filtered_search/visual_token_value_spec.js
+++ b/spec/frontend/filtered_search/visual_token_value_spec.js
@@ -1,4 +1,5 @@
import { escape } from 'lodash';
+import labelData from 'test_fixtures/labels/project_labels.json';
import FilteredSearchSpecHelper from 'helpers/filtered_search_spec_helper';
import { TEST_HOST } from 'helpers/test_constants';
import DropdownUtils from '~/filtered_search/dropdown_utils';
@@ -132,15 +133,8 @@ describe('Filtered Search Visual Tokens', () => {
});
describe('updateLabelTokenColor', () => {
- const jsonFixtureName = 'labels/project_labels.json';
const dummyEndpoint = '/dummy/endpoint';
- let labelData;
-
- beforeAll(() => {
- labelData = getJSONFixture(jsonFixtureName);
- });
-
const missingLabelToken = FilteredSearchSpecHelper.createFilterVisualToken(
'label',
'=',
diff --git a/spec/frontend/fixtures/abuse_reports.rb b/spec/frontend/fixtures/abuse_reports.rb
index f5524a10033..d8c8737b125 100644
--- a/spec/frontend/fixtures/abuse_reports.rb
+++ b/spec/frontend/fixtures/abuse_reports.rb
@@ -13,10 +13,6 @@ RSpec.describe Admin::AbuseReportsController, '(JavaScript fixtures)', type: :co
render_views
- before(:all) do
- clean_frontend_fixtures('abuse_reports/')
- end
-
before do
sign_in(admin)
enable_admin_mode!(admin)
diff --git a/spec/frontend/fixtures/admin_users.rb b/spec/frontend/fixtures/admin_users.rb
index e0fecbdb1aa..5579f50da74 100644
--- a/spec/frontend/fixtures/admin_users.rb
+++ b/spec/frontend/fixtures/admin_users.rb
@@ -17,10 +17,6 @@ RSpec.describe Admin::UsersController, '(JavaScript fixtures)', type: :controlle
render_views
- before(:all) do
- clean_frontend_fixtures('admin/users')
- end
-
it 'admin/users/new_with_internal_user_regex.html' do
stub_application_setting(user_default_external: true)
stub_application_setting(user_default_internal_regex: '^(?:(?!\.ext@).)*$\r?')
diff --git a/spec/frontend/fixtures/analytics.rb b/spec/frontend/fixtures/analytics.rb
index 6d106dce166..b6a5ea6616d 100644
--- a/spec/frontend/fixtures/analytics.rb
+++ b/spec/frontend/fixtures/analytics.rb
@@ -6,10 +6,6 @@ RSpec.describe 'Analytics (JavaScript fixtures)', :sidekiq_inline do
let_it_be(:value_stream_id) { 'default' }
- before(:all) do
- clean_frontend_fixtures('projects/analytics/value_stream_analytics/')
- end
-
before do
update_metrics
create_deployment
@@ -26,7 +22,7 @@ RSpec.describe 'Analytics (JavaScript fixtures)', :sidekiq_inline do
sign_in(user)
end
- it 'projects/analytics/value_stream_analytics/stages' do
+ it 'projects/analytics/value_stream_analytics/stages.json' do
get(:index, params: params, format: :json)
expect(response).to be_successful
@@ -44,7 +40,7 @@ RSpec.describe 'Analytics (JavaScript fixtures)', :sidekiq_inline do
end
Gitlab::Analytics::CycleAnalytics::DefaultStages.all.each do |stage|
- it "projects/analytics/value_stream_analytics/events/#{stage[:name]}" do
+ it "projects/analytics/value_stream_analytics/events/#{stage[:name]}.json" do
get(stage[:name], params: params, format: :json)
expect(response).to be_successful
@@ -62,7 +58,7 @@ RSpec.describe 'Analytics (JavaScript fixtures)', :sidekiq_inline do
sign_in(user)
end
- it "projects/analytics/value_stream_analytics/summary" do
+ it "projects/analytics/value_stream_analytics/summary.json" do
get(:show, params: params, format: :json)
expect(response).to be_successful
diff --git a/spec/frontend/fixtures/api_markdown.rb b/spec/frontend/fixtures/api_markdown.rb
index cb9a116f293..89f012a5110 100644
--- a/spec/frontend/fixtures/api_markdown.rb
+++ b/spec/frontend/fixtures/api_markdown.rb
@@ -21,11 +21,7 @@ RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
let(:project_wiki_page) { create(:wiki_page, wiki: project_wiki) }
- fixture_subdir = 'api/markdown'
-
before(:all) do
- clean_frontend_fixtures(fixture_subdir)
-
group.add_owner(user)
project.add_maintainer(user)
end
@@ -49,7 +45,7 @@ RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
name = "#{context}_#{name}" unless context.empty?
- it "#{fixture_subdir}/#{name}.json" do
+ it "api/markdown/#{name}.json" do
api_url = case context
when 'project'
"/#{project.full_path}/preview_markdown"
diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml
index 1edb8cb3f41..45f73260887 100644
--- a/spec/frontend/fixtures/api_markdown.yml
+++ b/spec/frontend/fixtures/api_markdown.yml
@@ -77,6 +77,35 @@
</dd>
</dl>
+- name: details
+ markdown: |-
+ <details>
+ <summary>Apply this patch</summary>
+
+ ```diff
+ diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml
+ index 8433efaf00c..69b12c59d46 100644
+ --- a/spec/frontend/fixtures/api_markdown.yml
+ +++ b/spec/frontend/fixtures/api_markdown.yml
+ @@ -33,6 +33,13 @@
+ * <ruby>漢<rt>ㄏㄢˋ</rt></ruby>
+ * C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O
+ * The **Pythagorean theorem** is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var>.The **Pythagorean theorem** is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var>
+ +- name: details
+ + markdown: |-
+ + <details>
+ + <summary>Apply this patch</summary>
+ +
+ + 🐶 much meta, 🐶 many patch
+ + 🐶 such diff, 🐶 very meme
+ + 🐶 wow!
+ + </details>
+ - name: link
+ markdown: '[GitLab](https://gitlab.com)'
+ - name: attachment_link
+ ```
+
+ </details>
- name: link
markdown: '[GitLab](https://gitlab.com)'
- name: attachment_link
@@ -204,3 +233,57 @@
* [x] ![Sample Audio](https://gitlab.com/1.mp3)
* [x] ![Sample Audio](https://gitlab.com/2.mp3)
* [x] ![Sample Video](https://gitlab.com/3.mp4)
+- name: table_of_contents
+ markdown: |-
+ [[_TOC_]]
+
+ # Lorem
+
+ Well, that's just like... your opinion.. man.
+
+ ## Ipsum
+
+ ### Dolar
+
+ # Sit amit
+
+ ### I don't know
+- name: word_break
+ markdown: Fernstraßen<wbr>bau<wbr>privat<wbr>finanzierungs<wbr>gesetz
+- name: frontmatter_yaml
+ markdown: |-
+ ---
+ title: Page title
+ ---
+- name: frontmatter_toml
+ markdown: |-
+ +++
+ title = "Page title"
+ +++
+- name: frontmatter_json
+ markdown: |-
+ ;;;
+ {
+ "title": "Page title"
+ }
+ ;;;
+- name: color_chips
+ markdown: |-
+ - `#F00`
+ - `#F00A`
+ - `#FF0000`
+ - `#FF0000AA`
+ - `RGB(0,255,0)`
+ - `RGB(0%,100%,0%)`
+ - `RGBA(0,255,0,0.3)`
+ - `HSL(540,70%,50%)`
+ - `HSLA(540,70%,50%,0.3)`
+- name: math
+ markdown: |-
+ This math is inline $`a^2+b^2=c^2`$.
+
+ This is on a separate line:
+
+ ```math
+ a^2+b^2=c^2
+ ```
diff --git a/spec/frontend/fixtures/api_merge_requests.rb b/spec/frontend/fixtures/api_merge_requests.rb
index 7117c9a1c7a..47321fbbeaa 100644
--- a/spec/frontend/fixtures/api_merge_requests.rb
+++ b/spec/frontend/fixtures/api_merge_requests.rb
@@ -11,10 +11,6 @@ RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
let_it_be(:project) { create(:project, :repository, namespace: namespace, path: 'lorem-ipsum') }
let_it_be(:mr) { create(:merge_request, source_project: project) }
- before(:all) do
- clean_frontend_fixtures('api/merge_requests')
- end
-
it 'api/merge_requests/get.json' do
4.times { |i| create(:merge_request, source_project: project, source_branch: "branch-#{i}") }
diff --git a/spec/frontend/fixtures/api_projects.rb b/spec/frontend/fixtures/api_projects.rb
index fa77ca1c0cf..eada2f8e0f7 100644
--- a/spec/frontend/fixtures/api_projects.rb
+++ b/spec/frontend/fixtures/api_projects.rb
@@ -11,10 +11,6 @@ RSpec.describe API::Projects, '(JavaScript fixtures)', type: :request do
let(:project) { create(:project, :repository, namespace: namespace, path: 'lorem-ipsum') }
let(:project_empty) { create(:project_empty_repo, namespace: namespace, path: 'lorem-ipsum-empty') }
- before(:all) do
- clean_frontend_fixtures('api/projects')
- end
-
it 'api/projects/get.json' do
get api("/projects/#{project.id}", admin)
diff --git a/spec/frontend/fixtures/application_settings.rb b/spec/frontend/fixtures/application_settings.rb
index b09bea56b94..9fa8d68e695 100644
--- a/spec/frontend/fixtures/application_settings.rb
+++ b/spec/frontend/fixtures/application_settings.rb
@@ -19,10 +19,6 @@ RSpec.describe Admin::ApplicationSettingsController, '(JavaScript fixtures)', ty
render_views
- before(:all) do
- clean_frontend_fixtures('application_settings/')
- end
-
after do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/autocomplete.rb b/spec/frontend/fixtures/autocomplete.rb
index 8983e241aa5..6215fa44e27 100644
--- a/spec/frontend/fixtures/autocomplete.rb
+++ b/spec/frontend/fixtures/autocomplete.rb
@@ -11,10 +11,6 @@ RSpec.describe ::AutocompleteController, '(JavaScript fixtures)', type: :control
let(:project) { create(:project, namespace: group, path: 'autocomplete-project') }
let(:merge_request) { create(:merge_request, source_project: project, author: user) }
- before(:all) do
- clean_frontend_fixtures('autocomplete/')
- end
-
before do
group.add_owner(user)
sign_in(user)
diff --git a/spec/frontend/fixtures/autocomplete_sources.rb b/spec/frontend/fixtures/autocomplete_sources.rb
index 9ff0f959c11..74bf58cc106 100644
--- a/spec/frontend/fixtures/autocomplete_sources.rb
+++ b/spec/frontend/fixtures/autocomplete_sources.rb
@@ -10,10 +10,6 @@ RSpec.describe Projects::AutocompleteSourcesController, '(JavaScript fixtures)',
let_it_be(:project) { create(:project, namespace: group, path: 'autocomplete-sources-project') }
let_it_be(:issue) { create(:issue, project: project) }
- before(:all) do
- clean_frontend_fixtures('autocomplete_sources/')
- end
-
before do
group.add_owner(user)
sign_in(user)
diff --git a/spec/frontend/fixtures/blob.rb b/spec/frontend/fixtures/blob.rb
index b112886b2ca..f90e3662e98 100644
--- a/spec/frontend/fixtures/blob.rb
+++ b/spec/frontend/fixtures/blob.rb
@@ -11,10 +11,6 @@ RSpec.describe Projects::BlobController, '(JavaScript fixtures)', type: :control
render_views
- before(:all) do
- clean_frontend_fixtures('blob/')
- end
-
before do
sign_in(user)
allow(SecureRandom).to receive(:hex).and_return('securerandomhex:thereisnospoon')
diff --git a/spec/frontend/fixtures/branches.rb b/spec/frontend/fixtures/branches.rb
index f3b3633347d..828564977e0 100644
--- a/spec/frontend/fixtures/branches.rb
+++ b/spec/frontend/fixtures/branches.rb
@@ -9,11 +9,6 @@ RSpec.describe 'Branches (JavaScript fixtures)' do
let_it_be(:project) { create(:project, :repository, namespace: namespace, path: 'branches-project') }
let_it_be(:user) { project.owner }
- before(:all) do
- clean_frontend_fixtures('branches/')
- clean_frontend_fixtures('api/branches/')
- end
-
after(:all) do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/clusters.rb b/spec/frontend/fixtures/clusters.rb
index b37aa137504..ea883555255 100644
--- a/spec/frontend/fixtures/clusters.rb
+++ b/spec/frontend/fixtures/clusters.rb
@@ -12,10 +12,6 @@ RSpec.describe Projects::ClustersController, '(JavaScript fixtures)', type: :con
render_views
- before(:all) do
- clean_frontend_fixtures('clusters/')
- end
-
before do
sign_in(user)
end
diff --git a/spec/frontend/fixtures/commit.rb b/spec/frontend/fixtures/commit.rb
index ff62a8286fc..f9e0f604b52 100644
--- a/spec/frontend/fixtures/commit.rb
+++ b/spec/frontend/fixtures/commit.rb
@@ -9,11 +9,6 @@ RSpec.describe 'Commit (JavaScript fixtures)' do
let_it_be(:user) { project.owner }
let_it_be(:commit) { project.commit("master") }
- before(:all) do
- clean_frontend_fixtures('commit/')
- clean_frontend_fixtures('api/commits/')
- end
-
before do
allow(SecureRandom).to receive(:hex).and_return('securerandomhex:thereisnospoon')
end
diff --git a/spec/frontend/fixtures/deploy_keys.rb b/spec/frontend/fixtures/deploy_keys.rb
index 5c24c071792..bed6c798793 100644
--- a/spec/frontend/fixtures/deploy_keys.rb
+++ b/spec/frontend/fixtures/deploy_keys.rb
@@ -13,10 +13,6 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
let(:project3) { create(:project, :internal)}
let(:project4) { create(:project, :internal)}
- before(:all) do
- clean_frontend_fixtures('deploy_keys/')
- end
-
before do
# Using an admin for these fixtures because they are used for verifying a frontend
# component that would normally get its data from `Admin::DeployKeysController`
diff --git a/spec/frontend/fixtures/freeze_period.rb b/spec/frontend/fixtures/freeze_period.rb
index 42762fa56f9..d9573c8000d 100644
--- a/spec/frontend/fixtures/freeze_period.rb
+++ b/spec/frontend/fixtures/freeze_period.rb
@@ -9,10 +9,6 @@ RSpec.describe 'Freeze Periods (JavaScript fixtures)' do
let_it_be(:project) { create(:project, :repository, path: 'freeze-periods-project') }
let_it_be(:user) { project.owner }
- before(:all) do
- clean_frontend_fixtures('api/freeze-periods/')
- end
-
after(:all) do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/groups.rb b/spec/frontend/fixtures/groups.rb
index 42aad9f187e..ddd436b98c6 100644
--- a/spec/frontend/fixtures/groups.rb
+++ b/spec/frontend/fixtures/groups.rb
@@ -8,10 +8,6 @@ RSpec.describe 'Groups (JavaScript fixtures)', type: :controller do
let(:user) { create(:user) }
let(:group) { create(:group, name: 'frontend-fixtures-group', runners_token: 'runnerstoken:intabulasreferre')}
- before(:all) do
- clean_frontend_fixtures('groups/')
- end
-
before do
group.add_owner(user)
sign_in(user)
diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb
index a027247bd0d..6519416cb9e 100644
--- a/spec/frontend/fixtures/issues.rb
+++ b/spec/frontend/fixtures/issues.rb
@@ -11,10 +11,6 @@ RSpec.describe Projects::IssuesController, '(JavaScript fixtures)', type: :contr
render_views
- before(:all) do
- clean_frontend_fixtures('issues/')
- end
-
before do
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/frontend/fixtures/jobs.rb b/spec/frontend/fixtures/jobs.rb
index 22179c790bd..12584f38629 100644
--- a/spec/frontend/fixtures/jobs.rb
+++ b/spec/frontend/fixtures/jobs.rb
@@ -21,10 +21,6 @@ RSpec.describe Projects::JobsController, '(JavaScript fixtures)', type: :control
render_views
- before(:all) do
- clean_frontend_fixtures('jobs/')
- end
-
before do
sign_in(user)
end
diff --git a/spec/frontend/fixtures/labels.rb b/spec/frontend/fixtures/labels.rb
index d7ca2aff18c..6736baed199 100644
--- a/spec/frontend/fixtures/labels.rb
+++ b/spec/frontend/fixtures/labels.rb
@@ -17,10 +17,6 @@ RSpec.describe 'Labels (JavaScript fixtures)' do
let!(:groub_label_space) { create(:group_label, group: group, title: 'some space', color: '#FFFFFF') }
let!(:groub_label_violets) { create(:group_label, group: group, title: 'violets', color: '#0000FF') }
- before(:all) do
- clean_frontend_fixtures('labels/')
- end
-
after do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/merge_requests.rb b/spec/frontend/fixtures/merge_requests.rb
index f10f96f2516..68ed2ca2359 100644
--- a/spec/frontend/fixtures/merge_requests.rb
+++ b/spec/frontend/fixtures/merge_requests.rb
@@ -49,10 +49,6 @@ RSpec.describe Projects::MergeRequestsController, '(JavaScript fixtures)', type:
render_views
- before(:all) do
- clean_frontend_fixtures('merge_requests/')
- end
-
before do
sign_in(user)
allow(Discussion).to receive(:build_discussion_id).and_return(['discussionid:ceterumcenseo'])
diff --git a/spec/frontend/fixtures/merge_requests_diffs.rb b/spec/frontend/fixtures/merge_requests_diffs.rb
index edf1fcf3c0a..e733764f248 100644
--- a/spec/frontend/fixtures/merge_requests_diffs.rb
+++ b/spec/frontend/fixtures/merge_requests_diffs.rb
@@ -20,10 +20,6 @@ RSpec.describe Projects::MergeRequests::DiffsController, '(JavaScript fixtures)'
render_views
- before(:all) do
- clean_frontend_fixtures('merge_request_diffs/')
- end
-
before do
# Create a user that matches the project.commit author
# This is so that the "author" information will be populated
diff --git a/spec/frontend/fixtures/metrics_dashboard.rb b/spec/frontend/fixtures/metrics_dashboard.rb
index eef79825ae7..d59b01b04af 100644
--- a/spec/frontend/fixtures/metrics_dashboard.rb
+++ b/spec/frontend/fixtures/metrics_dashboard.rb
@@ -12,10 +12,6 @@ RSpec.describe MetricsDashboard, '(JavaScript fixtures)', type: :controller do
let_it_be(:environment) { create(:environment, id: 1, project: project) }
let_it_be(:params) { { environment: environment } }
- before(:all) do
- clean_frontend_fixtures('metrics_dashboard/')
- end
-
controller(::ApplicationController) do
include MetricsDashboard
end
diff --git a/spec/frontend/fixtures/pipeline_schedules.rb b/spec/frontend/fixtures/pipeline_schedules.rb
index a7d43fdbe62..6389f59aa0a 100644
--- a/spec/frontend/fixtures/pipeline_schedules.rb
+++ b/spec/frontend/fixtures/pipeline_schedules.rb
@@ -15,10 +15,6 @@ RSpec.describe Projects::PipelineSchedulesController, '(JavaScript fixtures)', t
render_views
- before(:all) do
- clean_frontend_fixtures('pipeline_schedules/')
- end
-
before do
sign_in(user)
end
diff --git a/spec/frontend/fixtures/pipelines.rb b/spec/frontend/fixtures/pipelines.rb
index f695b74ec87..709e14183df 100644
--- a/spec/frontend/fixtures/pipelines.rb
+++ b/spec/frontend/fixtures/pipelines.rb
@@ -23,10 +23,6 @@ RSpec.describe Projects::PipelinesController, '(JavaScript fixtures)', type: :co
let!(:build_test) { create(:ci_build, pipeline: pipeline, stage: 'test') }
let!(:build_deploy_failed) { create(:ci_build, status: :failed, pipeline: pipeline, stage: 'deploy') }
- before(:all) do
- clean_frontend_fixtures('pipelines/')
- end
-
before do
sign_in(user)
end
diff --git a/spec/frontend/fixtures/projects.rb b/spec/frontend/fixtures/projects.rb
index 7873d59dbad..3c8964d398a 100644
--- a/spec/frontend/fixtures/projects.rb
+++ b/spec/frontend/fixtures/projects.rb
@@ -16,10 +16,6 @@ RSpec.describe 'Projects (JavaScript fixtures)', type: :controller do
render_views
- before(:all) do
- clean_frontend_fixtures('projects/')
- end
-
before do
project_with_repo.add_maintainer(user)
sign_in(user)
@@ -57,10 +53,6 @@ RSpec.describe 'Projects (JavaScript fixtures)', type: :controller do
project_variable_populated.add_maintainer(user)
end
- before(:all) do
- clean_frontend_fixtures('graphql/projects/access_tokens')
- end
-
base_input_path = 'access_tokens/graphql/queries/'
base_output_path = 'graphql/projects/access_tokens/'
query_name = 'get_projects.query.graphql'
diff --git a/spec/frontend/fixtures/projects_json.rb b/spec/frontend/fixtures/projects_json.rb
index c081d4f08dc..c4de56ccfab 100644
--- a/spec/frontend/fixtures/projects_json.rb
+++ b/spec/frontend/fixtures/projects_json.rb
@@ -8,10 +8,6 @@ RSpec.describe 'Projects JSON endpoints (JavaScript fixtures)', type: :controlle
let(:admin) { create(:admin, name: 'root') }
let(:project) { create(:project, :repository) }
- before(:all) do
- clean_frontend_fixtures('projects_json/')
- end
-
before do
project.add_maintainer(admin)
sign_in(admin)
diff --git a/spec/frontend/fixtures/prometheus_service.rb b/spec/frontend/fixtures/prometheus_service.rb
index c349f2a24bc..bbd938c66f6 100644
--- a/spec/frontend/fixtures/prometheus_service.rb
+++ b/spec/frontend/fixtures/prometheus_service.rb
@@ -12,10 +12,6 @@ RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :con
render_views
- before(:all) do
- clean_frontend_fixtures('services/prometheus')
- end
-
before do
sign_in(user)
end
diff --git a/spec/frontend/fixtures/raw.rb b/spec/frontend/fixtures/raw.rb
index 44927bd29d8..211c4e7c048 100644
--- a/spec/frontend/fixtures/raw.rb
+++ b/spec/frontend/fixtures/raw.rb
@@ -9,14 +9,6 @@ RSpec.describe 'Raw files', '(JavaScript fixtures)' do
let(:project) { create(:project, :repository, namespace: namespace, path: 'raw-project') }
let(:response) { @blob.data.force_encoding('UTF-8') }
- before(:all) do
- clean_frontend_fixtures('blob/notebook/')
- clean_frontend_fixtures('blob/pdf/')
- clean_frontend_fixtures('blob/text/')
- clean_frontend_fixtures('blob/binary/')
- clean_frontend_fixtures('blob/images/')
- end
-
after do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/releases.rb b/spec/frontend/fixtures/releases.rb
index e8f259fba15..fc344472588 100644
--- a/spec/frontend/fixtures/releases.rb
+++ b/spec/frontend/fixtures/releases.rb
@@ -116,10 +116,6 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
end
describe API::Releases, type: :request do
- before(:all) do
- clean_frontend_fixtures('api/releases/')
- end
-
it 'api/releases/release.json' do
get api("/projects/#{project.id}/releases/#{release.tag}", admin)
@@ -134,10 +130,6 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
one_release_query_path = 'releases/graphql/queries/one_release.query.graphql'
one_release_for_editing_query_path = 'releases/graphql/queries/one_release_for_editing.query.graphql'
- before(:all) do
- clean_frontend_fixtures('graphql/releases/')
- end
-
it "graphql/#{all_releases_query_path}.json" do
query = get_graphql_query_as_string(all_releases_query_path)
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index d5d6f534def..fa150fbf57c 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -20,10 +20,6 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
query_path = 'runner/graphql/'
fixtures_path = 'graphql/runner/'
- before(:all) do
- clean_frontend_fixtures(fixtures_path)
- end
-
after(:all) do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/search.rb b/spec/frontend/fixtures/search.rb
index 264ce7d010c..db1ef67998f 100644
--- a/spec/frontend/fixtures/search.rb
+++ b/spec/frontend/fixtures/search.rb
@@ -9,10 +9,6 @@ RSpec.describe SearchController, '(JavaScript fixtures)', type: :controller do
let_it_be(:user) { create(:user) }
- before(:all) do
- clean_frontend_fixtures('search/')
- end
-
before do
sign_in(user)
end
diff --git a/spec/frontend/fixtures/services.rb b/spec/frontend/fixtures/services.rb
index 91e6c2eb280..a8293a080a9 100644
--- a/spec/frontend/fixtures/services.rb
+++ b/spec/frontend/fixtures/services.rb
@@ -12,10 +12,6 @@ RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :con
render_views
- before(:all) do
- clean_frontend_fixtures('services/')
- end
-
before do
sign_in(user)
end
diff --git a/spec/frontend/fixtures/sessions.rb b/spec/frontend/fixtures/sessions.rb
index 0ef14c1d4fa..bb73bf3215c 100644
--- a/spec/frontend/fixtures/sessions.rb
+++ b/spec/frontend/fixtures/sessions.rb
@@ -5,10 +5,6 @@ require 'spec_helper'
RSpec.describe 'Sessions (JavaScript fixtures)' do
include JavaScriptFixturesHelpers
- before(:all) do
- clean_frontend_fixtures('sessions/')
- end
-
describe SessionsController, '(JavaScript fixtures)', type: :controller do
include DeviseHelpers
diff --git a/spec/frontend/fixtures/snippet.rb b/spec/frontend/fixtures/snippet.rb
index 5211d52f374..397fb3e7124 100644
--- a/spec/frontend/fixtures/snippet.rb
+++ b/spec/frontend/fixtures/snippet.rb
@@ -12,10 +12,6 @@ RSpec.describe SnippetsController, '(JavaScript fixtures)', type: :controller do
render_views
- before(:all) do
- clean_frontend_fixtures('snippets/')
- end
-
before do
sign_in(user)
allow(Discussion).to receive(:build_discussion_id).and_return(['discussionid:ceterumcenseo'])
diff --git a/spec/frontend/fixtures/startup_css.rb b/spec/frontend/fixtures/startup_css.rb
index 1bd99f5cd7f..e19a98c3bab 100644
--- a/spec/frontend/fixtures/startup_css.rb
+++ b/spec/frontend/fixtures/startup_css.rb
@@ -9,15 +9,15 @@ RSpec.describe 'Startup CSS fixtures', type: :controller do
render_views
- before(:all) do
- clean_frontend_fixtures('startup_css/')
- end
-
shared_examples 'startup css project fixtures' do |type|
let(:user) { create(:user, :admin) }
let(:project) { create(:project, :public, :repository, description: 'Code and stuff', creator: user) }
before do
+ # We want vNext badge to be included and com/canary don't remove/hide any other elements.
+ # This is why we're turning com and canary on by default for now.
+ allow(Gitlab).to receive(:com?).and_return(true)
+ allow(Gitlab).to receive(:canary?).and_return(true)
sign_in(user)
end
diff --git a/spec/frontend/fixtures/static/oauth_remember_me.html b/spec/frontend/fixtures/static/oauth_remember_me.html
index c6af8129b4d..0b4d482925d 100644
--- a/spec/frontend/fixtures/static/oauth_remember_me.html
+++ b/spec/frontend/fixtures/static/oauth_remember_me.html
@@ -1,22 +1,21 @@
<div id="oauth-container">
-<input id="remember_me" type="checkbox">
+ <input id="remember_me" type="checkbox" />
-<form method="post" action="http://example.com/">
- <button class="oauth-login twitter" type="submit">
- <span>Twitter</span>
- </button>
-</form>
+ <form method="post" action="http://example.com/">
+ <button class="js-oauth-login twitter" type="submit">
+ <span>Twitter</span>
+ </button>
+ </form>
-<form method="post" action="http://example.com/">
- <button class="oauth-login github" type="submit">
- <span>GitHub</span>
- </button>
-</form>
-
-<form method="post" action="http://example.com/?redirect_fragment=L1">
- <button class="oauth-login facebook" type="submit">
- <span>Facebook</span>
- </button>
-</form>
+ <form method="post" action="http://example.com/">
+ <button class="js-oauth-login github" type="submit">
+ <span>GitHub</span>
+ </button>
+ </form>
+ <form method="post" action="http://example.com/?redirect_fragment=L1">
+ <button class="js-oauth-login facebook" type="submit">
+ <span>Facebook</span>
+ </button>
+ </form>
</div>
diff --git a/spec/frontend/fixtures/tags.rb b/spec/frontend/fixtures/tags.rb
index 9483f0a4492..6cfa5f82efe 100644
--- a/spec/frontend/fixtures/tags.rb
+++ b/spec/frontend/fixtures/tags.rb
@@ -8,10 +8,6 @@ RSpec.describe 'Tags (JavaScript fixtures)' do
let_it_be(:project) { create(:project, :repository, path: 'tags-project') }
let_it_be(:user) { project.owner }
- before(:all) do
- clean_frontend_fixtures('api/tags/')
- end
-
after(:all) do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/timezones.rb b/spec/frontend/fixtures/timezones.rb
index 261dcf5e116..157f47855ea 100644
--- a/spec/frontend/fixtures/timezones.rb
+++ b/spec/frontend/fixtures/timezones.rb
@@ -8,10 +8,6 @@ RSpec.describe TimeZoneHelper, '(JavaScript fixtures)' do
let(:response) { @timezones.sort_by! { |tz| tz[:name] }.to_json }
- before(:all) do
- clean_frontend_fixtures('timezones/')
- end
-
it 'timezones/short.json' do
@timezones = timezone_data(format: :short)
end
diff --git a/spec/frontend/fixtures/todos.rb b/spec/frontend/fixtures/todos.rb
index 985afafe50e..a0573b0b658 100644
--- a/spec/frontend/fixtures/todos.rb
+++ b/spec/frontend/fixtures/todos.rb
@@ -13,10 +13,6 @@ RSpec.describe 'Todos (JavaScript fixtures)' do
let(:issue_2) { create(:issue, title: 'issue_2', project: project) }
let!(:todo_2) { create(:todo, :done, user: user, project: project, target: issue_2, created_at: 50.hours.ago) }
- before(:all) do
- clean_frontend_fixtures('todos/')
- end
-
after do
remove_repository(project)
end
diff --git a/spec/frontend/fixtures/u2f.rb b/spec/frontend/fixtures/u2f.rb
index a6a8ba7318b..96820c9ae80 100644
--- a/spec/frontend/fixtures/u2f.rb
+++ b/spec/frontend/fixtures/u2f.rb
@@ -7,10 +7,6 @@ RSpec.context 'U2F' do
let(:user) { create(:user, :two_factor_via_u2f, otp_secret: 'otpsecret:coolkids') }
- before(:all) do
- clean_frontend_fixtures('u2f/')
- end
-
before do
stub_feature_flags(webauthn: false)
end
diff --git a/spec/frontend/fixtures/webauthn.rb b/spec/frontend/fixtures/webauthn.rb
index b195fee76f0..c6e9b41b584 100644
--- a/spec/frontend/fixtures/webauthn.rb
+++ b/spec/frontend/fixtures/webauthn.rb
@@ -7,10 +7,6 @@ RSpec.context 'WebAuthn' do
let(:user) { create(:user, :two_factor_via_webauthn, otp_secret: 'otpsecret:coolkids') }
- before(:all) do
- clean_frontend_fixtures('webauthn/')
- end
-
describe SessionsController, '(JavaScript fixtures)', type: :controller do
include DeviseHelpers
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 94ad7759110..eb11df2fe43 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -1,17 +1,15 @@
/* eslint no-param-reassign: "off" */
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
+import labelsFixture from 'test_fixtures/autocomplete_sources/labels.json';
import GfmAutoComplete, { membersBeforeSave } from 'ee_else_ce/gfm_auto_complete';
import { initEmojiMock } from 'helpers/emoji';
import '~/lib/utils/jquery_at_who';
-import { getJSONFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import AjaxCache from '~/lib/utils/ajax_cache';
import axios from '~/lib/utils/axios_utils';
-const labelsFixture = getJSONFixture('autocomplete_sources/labels.json');
-
describe('GfmAutoComplete', () => {
const fetchDataMock = { fetchData: jest.fn() };
let gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call(fetchDataMock);
diff --git a/spec/frontend/header_search/components/app_spec.js b/spec/frontend/header_search/components/app_spec.js
index 2cbcb73ce5b..2ea2693a978 100644
--- a/spec/frontend/header_search/components/app_spec.js
+++ b/spec/frontend/header_search/components/app_spec.js
@@ -3,6 +3,7 @@ import Vue from 'vue';
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import HeaderSearchApp from '~/header_search/components/app.vue';
+import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue';
import HeaderSearchDefaultItems from '~/header_search/components/header_search_default_items.vue';
import HeaderSearchScopedItems from '~/header_search/components/header_search_scoped_items.vue';
import { ENTER_KEY, ESC_KEY } from '~/lib/utils/keys';
@@ -20,6 +21,7 @@ describe('HeaderSearchApp', () => {
const actionSpies = {
setSearch: jest.fn(),
+ fetchAutocompleteOptions: jest.fn(),
};
const createComponent = (initialState) => {
@@ -46,6 +48,8 @@ describe('HeaderSearchApp', () => {
const findHeaderSearchDropdown = () => wrapper.findByTestId('header-search-dropdown-menu');
const findHeaderSearchDefaultItems = () => wrapper.findComponent(HeaderSearchDefaultItems);
const findHeaderSearchScopedItems = () => wrapper.findComponent(HeaderSearchScopedItems);
+ const findHeaderSearchAutocompleteItems = () =>
+ wrapper.findComponent(HeaderSearchAutocompleteItems);
describe('template', () => {
it('always renders Header Search Input', () => {
@@ -74,11 +78,11 @@ describe('HeaderSearchApp', () => {
});
describe.each`
- search | showDefault | showScoped
- ${null} | ${true} | ${false}
- ${''} | ${true} | ${false}
- ${MOCK_SEARCH} | ${false} | ${true}
- `('Header Search Dropdown Items', ({ search, showDefault, showScoped }) => {
+ search | showDefault | showScoped | showAutocomplete
+ ${null} | ${true} | ${false} | ${false}
+ ${''} | ${true} | ${false} | ${false}
+ ${MOCK_SEARCH} | ${false} | ${true} | ${true}
+ `('Header Search Dropdown Items', ({ search, showDefault, showScoped, showAutocomplete }) => {
describe(`when search is ${search}`, () => {
beforeEach(() => {
createComponent({ search });
@@ -93,6 +97,10 @@ describe('HeaderSearchApp', () => {
it(`should${showScoped ? '' : ' not'} render the Scoped Dropdown Items`, () => {
expect(findHeaderSearchScopedItems().exists()).toBe(showScoped);
});
+
+ it(`should${showAutocomplete ? '' : ' not'} render the Autocomplete Dropdown Items`, () => {
+ expect(findHeaderSearchAutocompleteItems().exists()).toBe(showAutocomplete);
+ });
});
});
});
@@ -139,12 +147,18 @@ describe('HeaderSearchApp', () => {
});
});
- it('calls setSearch when search input event is fired', async () => {
- findHeaderSearchInput().vm.$emit('input', MOCK_SEARCH);
+ describe('onInput', () => {
+ beforeEach(() => {
+ findHeaderSearchInput().vm.$emit('input', MOCK_SEARCH);
+ });
- await wrapper.vm.$nextTick();
+ it('calls setSearch with search term', () => {
+ expect(actionSpies.setSearch).toHaveBeenCalledWith(expect.any(Object), MOCK_SEARCH);
+ });
- expect(actionSpies.setSearch).toHaveBeenCalledWith(expect.any(Object), MOCK_SEARCH);
+ it('calls fetchAutocompleteOptions', () => {
+ expect(actionSpies.fetchAutocompleteOptions).toHaveBeenCalled();
+ });
});
it('submits a search onKey-Enter', async () => {
diff --git a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
new file mode 100644
index 00000000000..6b84e63989d
--- /dev/null
+++ b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
@@ -0,0 +1,108 @@
+import { GlDropdownItem, GlLoadingIcon, GlAvatar } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue';
+import {
+ GROUPS_CATEGORY,
+ LARGE_AVATAR_PX,
+ PROJECTS_CATEGORY,
+ SMALL_AVATAR_PX,
+} from '~/header_search/constants';
+import { MOCK_GROUPED_AUTOCOMPLETE_OPTIONS, MOCK_AUTOCOMPLETE_OPTIONS } from '../mock_data';
+
+Vue.use(Vuex);
+
+describe('HeaderSearchAutocompleteItems', () => {
+ let wrapper;
+
+ const createComponent = (initialState, mockGetters) => {
+ const store = new Vuex.Store({
+ state: {
+ loading: false,
+ ...initialState,
+ },
+ getters: {
+ autocompleteGroupedSearchOptions: () => MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
+ ...mockGetters,
+ },
+ });
+
+ wrapper = shallowMount(HeaderSearchAutocompleteItems, {
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findDropdownItemTitles = () => findDropdownItems().wrappers.map((w) => w.text());
+ const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href'));
+ const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findGlAvatar = () => wrapper.findComponent(GlAvatar);
+
+ describe('template', () => {
+ describe('when loading is true', () => {
+ beforeEach(() => {
+ createComponent({ loading: true });
+ });
+
+ it('renders GlLoadingIcon', () => {
+ expect(findGlLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not render autocomplete options', () => {
+ expect(findDropdownItems()).toHaveLength(0);
+ });
+ });
+
+ describe('when loading is false', () => {
+ beforeEach(() => {
+ createComponent({ loading: false });
+ });
+
+ it('does not render GlLoadingIcon', () => {
+ expect(findGlLoadingIcon().exists()).toBe(false);
+ });
+
+ describe('Dropdown items', () => {
+ it('renders item for each option in autocomplete option', () => {
+ expect(findDropdownItems()).toHaveLength(MOCK_AUTOCOMPLETE_OPTIONS.length);
+ });
+
+ it('renders titles correctly', () => {
+ const expectedTitles = MOCK_AUTOCOMPLETE_OPTIONS.map((o) => o.label);
+ expect(findDropdownItemTitles()).toStrictEqual(expectedTitles);
+ });
+
+ it('renders links correctly', () => {
+ const expectedLinks = MOCK_AUTOCOMPLETE_OPTIONS.map((o) => o.url);
+ expect(findDropdownItemLinks()).toStrictEqual(expectedLinks);
+ });
+ });
+ describe.each`
+ item | showAvatar | avatarSize
+ ${{ data: [{ category: PROJECTS_CATEGORY, avatar_url: null }] }} | ${true} | ${String(LARGE_AVATAR_PX)}
+ ${{ data: [{ category: GROUPS_CATEGORY, avatar_url: '/123' }] }} | ${true} | ${String(LARGE_AVATAR_PX)}
+ ${{ data: [{ category: 'Help', avatar_url: '' }] }} | ${true} | ${String(SMALL_AVATAR_PX)}
+ ${{ data: [{ category: 'Settings' }] }} | ${false} | ${false}
+ `('GlAvatar', ({ item, showAvatar, avatarSize }) => {
+ describe(`when category is ${item.data[0].category} and avatar_url is ${item.data[0].avatar_url}`, () => {
+ beforeEach(() => {
+ createComponent({}, { autocompleteGroupedSearchOptions: () => [item] });
+ });
+
+ it(`should${showAvatar ? '' : ' not'} render`, () => {
+ expect(findGlAvatar().exists()).toBe(showAvatar);
+ });
+
+ it(`should set avatarSize to ${avatarSize}`, () => {
+ expect(findGlAvatar().exists() && findGlAvatar().attributes('size')).toBe(avatarSize);
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/header_search/mock_data.js b/spec/frontend/header_search/mock_data.js
index 5963ad9c279..915b3a4a678 100644
--- a/spec/frontend/header_search/mock_data.js
+++ b/spec/frontend/header_search/mock_data.js
@@ -19,6 +19,8 @@ export const MOCK_MR_PATH = '/dashboard/merge_requests';
export const MOCK_ALL_PATH = '/';
+export const MOCK_AUTOCOMPLETE_PATH = '/autocomplete';
+
export const MOCK_PROJECT = {
id: 123,
name: 'MockProject',
@@ -81,3 +83,70 @@ export const MOCK_SCOPED_SEARCH_OPTIONS = [
url: MOCK_ALL_PATH,
},
];
+
+export const MOCK_AUTOCOMPLETE_OPTIONS = [
+ {
+ category: 'Projects',
+ id: 1,
+ label: 'MockProject1',
+ url: 'project/1',
+ },
+ {
+ category: 'Projects',
+ id: 2,
+ label: 'MockProject2',
+ url: 'project/2',
+ },
+ {
+ category: 'Groups',
+ id: 1,
+ label: 'MockGroup1',
+ url: 'group/1',
+ },
+ {
+ category: 'Help',
+ label: 'GitLab Help',
+ url: 'help/gitlab',
+ },
+];
+
+export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
+ {
+ category: 'Projects',
+ data: [
+ {
+ category: 'Projects',
+ id: 1,
+ label: 'MockProject1',
+ url: 'project/1',
+ },
+ {
+ category: 'Projects',
+ id: 2,
+ label: 'MockProject2',
+ url: 'project/2',
+ },
+ ],
+ },
+ {
+ category: 'Groups',
+ data: [
+ {
+ category: 'Groups',
+ id: 1,
+ label: 'MockGroup1',
+ url: 'group/1',
+ },
+ ],
+ },
+ {
+ category: 'Help',
+ data: [
+ {
+ category: 'Help',
+ label: 'GitLab Help',
+ url: 'help/gitlab',
+ },
+ ],
+ },
+];
diff --git a/spec/frontend/header_search/store/actions_spec.js b/spec/frontend/header_search/store/actions_spec.js
index 4530df0d91c..ee2c72df77b 100644
--- a/spec/frontend/header_search/store/actions_spec.js
+++ b/spec/frontend/header_search/store/actions_spec.js
@@ -1,18 +1,50 @@
+import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import createFlash from '~/flash';
import * as actions from '~/header_search/store/actions';
import * as types from '~/header_search/store/mutation_types';
import createState from '~/header_search/store/state';
-import { MOCK_SEARCH } from '../mock_data';
+import axios from '~/lib/utils/axios_utils';
+import { MOCK_SEARCH, MOCK_AUTOCOMPLETE_OPTIONS } from '../mock_data';
+
+jest.mock('~/flash');
describe('Header Search Store Actions', () => {
let state;
+ let mock;
+
+ const flashCallback = (callCount) => {
+ expect(createFlash).toHaveBeenCalledTimes(callCount);
+ createFlash.mockClear();
+ };
beforeEach(() => {
state = createState({});
+ mock = new MockAdapter(axios);
});
afterEach(() => {
state = null;
+ mock.restore();
+ });
+
+ describe.each`
+ axiosMock | type | expectedMutations | flashCallCount
+ ${{ method: 'onGet', code: 200, res: MOCK_AUTOCOMPLETE_OPTIONS }} | ${'success'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_SUCCESS, payload: MOCK_AUTOCOMPLETE_OPTIONS }]} | ${0}
+ ${{ method: 'onGet', code: 500, res: null }} | ${'error'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_ERROR }]} | ${1}
+ `('fetchAutocompleteOptions', ({ axiosMock, type, expectedMutations, flashCallCount }) => {
+ describe(`on ${type}`, () => {
+ beforeEach(() => {
+ mock[axiosMock.method]().replyOnce(axiosMock.code, axiosMock.res);
+ });
+ it(`should dispatch the correct mutations`, () => {
+ return testAction({
+ action: actions.fetchAutocompleteOptions,
+ state,
+ expectedMutations,
+ }).then(() => flashCallback(flashCallCount));
+ });
+ });
});
describe('setSearch', () => {
diff --git a/spec/frontend/header_search/store/getters_spec.js b/spec/frontend/header_search/store/getters_spec.js
index 2ad0a082f6a..d55db07188e 100644
--- a/spec/frontend/header_search/store/getters_spec.js
+++ b/spec/frontend/header_search/store/getters_spec.js
@@ -5,6 +5,7 @@ import {
MOCK_SEARCH_PATH,
MOCK_ISSUE_PATH,
MOCK_MR_PATH,
+ MOCK_AUTOCOMPLETE_PATH,
MOCK_SEARCH_CONTEXT,
MOCK_DEFAULT_SEARCH_OPTIONS,
MOCK_SCOPED_SEARCH_OPTIONS,
@@ -12,6 +13,8 @@ import {
MOCK_GROUP,
MOCK_ALL_PATH,
MOCK_SEARCH,
+ MOCK_AUTOCOMPLETE_OPTIONS,
+ MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
} from '../mock_data';
describe('Header Search Store Getters', () => {
@@ -22,6 +25,7 @@ describe('Header Search Store Getters', () => {
searchPath: MOCK_SEARCH_PATH,
issuesPath: MOCK_ISSUE_PATH,
mrPath: MOCK_MR_PATH,
+ autocompletePath: MOCK_AUTOCOMPLETE_PATH,
searchContext: MOCK_SEARCH_CONTEXT,
...initialState,
});
@@ -56,6 +60,29 @@ describe('Header Search Store Getters', () => {
});
describe.each`
+ project | ref | expectedPath
+ ${null} | ${null} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=undefined&project_ref=null`}
+ ${MOCK_PROJECT} | ${null} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=${MOCK_PROJECT.id}&project_ref=null`}
+ ${MOCK_PROJECT} | ${MOCK_PROJECT.id} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=${MOCK_PROJECT.id}&project_ref=${MOCK_PROJECT.id}`}
+ `('autocompleteQuery', ({ project, ref, expectedPath }) => {
+ describe(`when project is ${project?.name} and project ref is ${ref}`, () => {
+ beforeEach(() => {
+ createState({
+ searchContext: {
+ project,
+ ref,
+ },
+ });
+ state.search = MOCK_SEARCH;
+ });
+
+ it(`should return ${expectedPath}`, () => {
+ expect(getters.autocompleteQuery(state)).toBe(expectedPath);
+ });
+ });
+ });
+
+ describe.each`
group | group_metadata | project | project_metadata | expectedPath
${null} | ${null} | ${null} | ${null} | ${MOCK_ISSUE_PATH}
${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${null} | ${null} | ${'group/path'}
@@ -208,4 +235,17 @@ describe('Header Search Store Getters', () => {
);
});
});
+
+ describe('autocompleteGroupedSearchOptions', () => {
+ beforeEach(() => {
+ createState();
+ state.autocompleteOptions = MOCK_AUTOCOMPLETE_OPTIONS;
+ });
+
+ it('returns the correct grouped array', () => {
+ expect(getters.autocompleteGroupedSearchOptions(state)).toStrictEqual(
+ MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
+ );
+ });
+ });
});
diff --git a/spec/frontend/header_search/store/mutations_spec.js b/spec/frontend/header_search/store/mutations_spec.js
index 8196c06099d..7f9b7631a7e 100644
--- a/spec/frontend/header_search/store/mutations_spec.js
+++ b/spec/frontend/header_search/store/mutations_spec.js
@@ -1,7 +1,7 @@
import * as types from '~/header_search/store/mutation_types';
import mutations from '~/header_search/store/mutations';
import createState from '~/header_search/store/state';
-import { MOCK_SEARCH } from '../mock_data';
+import { MOCK_SEARCH, MOCK_AUTOCOMPLETE_OPTIONS } from '../mock_data';
describe('Header Search Store Mutations', () => {
let state;
@@ -10,6 +10,33 @@ describe('Header Search Store Mutations', () => {
state = createState({});
});
+ describe('REQUEST_AUTOCOMPLETE', () => {
+ it('sets loading to true and empties autocompleteOptions array', () => {
+ mutations[types.REQUEST_AUTOCOMPLETE](state);
+
+ expect(state.loading).toBe(true);
+ expect(state.autocompleteOptions).toStrictEqual([]);
+ });
+ });
+
+ describe('RECEIVE_AUTOCOMPLETE_SUCCESS', () => {
+ it('sets loading to false and sets autocompleteOptions array', () => {
+ mutations[types.RECEIVE_AUTOCOMPLETE_SUCCESS](state, MOCK_AUTOCOMPLETE_OPTIONS);
+
+ expect(state.loading).toBe(false);
+ expect(state.autocompleteOptions).toStrictEqual(MOCK_AUTOCOMPLETE_OPTIONS);
+ });
+ });
+
+ describe('RECEIVE_AUTOCOMPLETE_ERROR', () => {
+ it('sets loading to false and empties autocompleteOptions array', () => {
+ mutations[types.RECEIVE_AUTOCOMPLETE_ERROR](state);
+
+ expect(state.loading).toBe(false);
+ expect(state.autocompleteOptions).toStrictEqual([]);
+ });
+ });
+
describe('SET_SEARCH', () => {
it('sets search to value', () => {
mutations[types.SET_SEARCH](state, MOCK_SEARCH);
diff --git a/spec/frontend/ide/components/jobs/detail_spec.js b/spec/frontend/ide/components/jobs/detail_spec.js
index 79ac0a8122a..3634599f328 100644
--- a/spec/frontend/ide/components/jobs/detail_spec.js
+++ b/spec/frontend/ide/components/jobs/detail_spec.js
@@ -41,7 +41,7 @@ describe('IDE jobs detail view', () => {
});
it('scrolls to bottom', () => {
- expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalled();
+ expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalled();
});
it('renders job output', () => {
@@ -125,15 +125,15 @@ describe('IDE jobs detail view', () => {
beforeEach(() => {
vm = vm.$mount();
- jest.spyOn(vm.$refs.buildTrace, 'scrollTo').mockImplementation();
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
});
it('scrolls build trace to bottom', () => {
- jest.spyOn(vm.$refs.buildTrace, 'scrollHeight', 'get').mockReturnValue(1000);
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(1000);
vm.scrollDown();
- expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalledWith(0, 1000);
+ expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 1000);
});
});
@@ -141,26 +141,26 @@ describe('IDE jobs detail view', () => {
beforeEach(() => {
vm = vm.$mount();
- jest.spyOn(vm.$refs.buildTrace, 'scrollTo').mockImplementation();
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
});
it('scrolls build trace to top', () => {
vm.scrollUp();
- expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalledWith(0, 0);
+ expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 0);
});
});
describe('scrollBuildLog', () => {
beforeEach(() => {
vm = vm.$mount();
- jest.spyOn(vm.$refs.buildTrace, 'scrollTo').mockImplementation();
- jest.spyOn(vm.$refs.buildTrace, 'offsetHeight', 'get').mockReturnValue(100);
- jest.spyOn(vm.$refs.buildTrace, 'scrollHeight', 'get').mockReturnValue(200);
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
+ jest.spyOn(vm.$refs.buildJobLog, 'offsetHeight', 'get').mockReturnValue(100);
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(200);
});
it('sets scrollPos to bottom when at the bottom', () => {
- jest.spyOn(vm.$refs.buildTrace, 'scrollTop', 'get').mockReturnValue(100);
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(100);
vm.scrollBuildLog();
@@ -168,7 +168,7 @@ describe('IDE jobs detail view', () => {
});
it('sets scrollPos to top when at the top', () => {
- jest.spyOn(vm.$refs.buildTrace, 'scrollTop', 'get').mockReturnValue(0);
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(0);
vm.scrollPos = 1;
vm.scrollBuildLog();
@@ -177,7 +177,7 @@ describe('IDE jobs detail view', () => {
});
it('resets scrollPos when not at top or bottom', () => {
- jest.spyOn(vm.$refs.buildTrace, 'scrollTop', 'get').mockReturnValue(10);
+ jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(10);
vm.scrollBuildLog();
diff --git a/spec/frontend/ide/stores/modules/commit/getters_spec.js b/spec/frontend/ide/stores/modules/commit/getters_spec.js
index 7a07ed05201..1e34087b290 100644
--- a/spec/frontend/ide/stores/modules/commit/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/getters_spec.js
@@ -126,7 +126,7 @@ describe('IDE commit module getters', () => {
);
expect(getters.preBuiltCommitMessage(state, null, rootState)).toBe(
- 'Update test-file, index.js files',
+ 'Update test-file, index.js',
);
});
diff --git a/spec/frontend/ide/stores/utils_spec.js b/spec/frontend/ide/stores/utils_spec.js
index 79b6b66319e..a8875e0cd02 100644
--- a/spec/frontend/ide/stores/utils_spec.js
+++ b/spec/frontend/ide/stores/utils_spec.js
@@ -94,7 +94,7 @@ describe('Multi-file store utils', () => {
{
action: commitActionTypes.move,
file_path: 'renamedFile',
- content: null,
+ content: undefined,
encoding: 'text',
last_commit_id: undefined,
previous_path: 'prevPath',
diff --git a/spec/frontend/import_entities/components/pagination_bar_spec.js b/spec/frontend/import_entities/components/pagination_bar_spec.js
new file mode 100644
index 00000000000..163ce11a8db
--- /dev/null
+++ b/spec/frontend/import_entities/components/pagination_bar_spec.js
@@ -0,0 +1,92 @@
+import { GlPagination, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import PaginationBar from '~/import_entities/components/pagination_bar.vue';
+import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
+
+describe('Pagination bar', () => {
+ const DEFAULT_PROPS = {
+ pageInfo: {
+ total: 50,
+ page: 1,
+ perPage: 20,
+ },
+ itemsCount: 17,
+ };
+ let wrapper;
+
+ const createComponent = (propsData) => {
+ wrapper = mount(PaginationBar, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...propsData,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('events', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('emits set-page event when page is selected', () => {
+ const NEXT_PAGE = 3;
+ // PaginationLinks uses prop instead of event for handling page change
+ // So we go one level deep to test this
+ wrapper
+ .findComponent(PaginationLinks)
+ .findComponent(GlPagination)
+ .vm.$emit('input', NEXT_PAGE);
+ expect(wrapper.emitted('set-page')).toEqual([[NEXT_PAGE]]);
+ });
+
+ it('emits set-page-size event when page size is selected', () => {
+ const firstItemInPageSizeDropdown = wrapper.findComponent(GlDropdownItem);
+ firstItemInPageSizeDropdown.vm.$emit('click');
+
+ const [emittedPageSizeChange] = wrapper.emitted('set-page-size')[0];
+ expect(firstItemInPageSizeDropdown.text()).toMatchInterpolatedText(
+ `${emittedPageSizeChange} items per page`,
+ );
+ });
+ });
+
+ it('renders current page size', () => {
+ const CURRENT_PAGE_SIZE = 40;
+
+ createComponent({
+ pageInfo: {
+ ...DEFAULT_PROPS.pageInfo,
+ perPage: CURRENT_PAGE_SIZE,
+ },
+ });
+
+ expect(wrapper.find(GlDropdown).find('button').text()).toMatchInterpolatedText(
+ `${CURRENT_PAGE_SIZE} items per page`,
+ );
+ });
+
+ it('renders current page information', () => {
+ createComponent();
+
+ expect(wrapper.find('[data-testid="information"]').text()).toMatchInterpolatedText(
+ 'Showing 1 - 17 of 50',
+ );
+ });
+
+ it('renders current page information when total count is over 1000', () => {
+ createComponent({
+ pageInfo: {
+ ...DEFAULT_PROPS.pageInfo,
+ total: 1200,
+ },
+ });
+
+ expect(wrapper.find('[data-testid="information"]').text()).toMatchInterpolatedText(
+ 'Showing 1 - 17 of 1000+',
+ );
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index ff602327592..0a9cbadb249 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -11,7 +11,7 @@ import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_field
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
import ResetConfirmationModal from '~/integrations/edit/components/reset_confirmation_modal.vue';
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
-import { integrationLevels } from '~/integrations/edit/constants';
+import { integrationLevels } from '~/integrations/constants';
import { createStore } from '~/integrations/edit/store';
describe('IntegrationForm', () => {
diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
index 2860d3cc37a..119afbfecfe 100644
--- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
@@ -1,6 +1,7 @@
import { GlFormCheckbox, GlFormInput } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { GET_JIRA_ISSUE_TYPES_EVENT } from '~/integrations/constants';
import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
import eventHub from '~/integrations/edit/event_hub';
import { createStore } from '~/integrations/edit/store';
@@ -207,7 +208,7 @@ describe('JiraIssuesFields', () => {
await setEnableCheckbox(true);
await findJiraForVulnerabilities().vm.$emit('request-get-issue-types');
- expect(eventHubEmitSpy).toHaveBeenCalledWith('getJiraIssueTypes');
+ expect(eventHubEmitSpy).toHaveBeenCalledWith(GET_JIRA_ISSUE_TYPES_EVENT);
});
});
});
diff --git a/spec/frontend/integrations/edit/components/override_dropdown_spec.js b/spec/frontend/integrations/edit/components/override_dropdown_spec.js
index eb43d940f5e..90facaff1f9 100644
--- a/spec/frontend/integrations/edit/components/override_dropdown_spec.js
+++ b/spec/frontend/integrations/edit/components/override_dropdown_spec.js
@@ -2,7 +2,7 @@ import { GlDropdown, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
-import { integrationLevels, overrideDropdownDescriptions } from '~/integrations/edit/constants';
+import { integrationLevels, overrideDropdownDescriptions } from '~/integrations/constants';
import { createStore } from '~/integrations/edit/store';
describe('OverrideDropdown', () => {
diff --git a/spec/frontend/integrations/integration_settings_form_spec.js b/spec/frontend/integrations/integration_settings_form_spec.js
index cbb2ef380ba..f8f3f0fd318 100644
--- a/spec/frontend/integrations/integration_settings_form_spec.js
+++ b/spec/frontend/integrations/integration_settings_form_spec.js
@@ -23,7 +23,7 @@ describe('IntegrationSettingsForm', () => {
it('should initialize form element refs on class object', () => {
// Form Reference
expect(integrationSettingsForm.$form).toBeDefined();
- expect(integrationSettingsForm.$form.prop('nodeName')).toEqual('FORM');
+ expect(integrationSettingsForm.$form.nodeName).toBe('FORM');
expect(integrationSettingsForm.formActive).toBeDefined();
});
@@ -43,14 +43,14 @@ describe('IntegrationSettingsForm', () => {
integrationSettingsForm.formActive = true;
integrationSettingsForm.toggleServiceState();
- expect(integrationSettingsForm.$form.attr('novalidate')).not.toBeDefined();
+ expect(integrationSettingsForm.$form.getAttribute('novalidate')).toBe(null);
});
it('should set `novalidate` attribute to form when called with `false`', () => {
integrationSettingsForm.formActive = false;
integrationSettingsForm.toggleServiceState();
- expect(integrationSettingsForm.$form.attr('novalidate')).toBeDefined();
+ expect(integrationSettingsForm.$form.getAttribute('novalidate')).toBeDefined();
});
});
@@ -67,8 +67,7 @@ describe('IntegrationSettingsForm', () => {
integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
integrationSettingsForm.init();
- // eslint-disable-next-line no-jquery/no-serialize
- formData = integrationSettingsForm.$form.serialize();
+ formData = new FormData(integrationSettingsForm.$form);
});
afterEach(() => {
@@ -145,8 +144,7 @@ describe('IntegrationSettingsForm', () => {
integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
integrationSettingsForm.init();
- // eslint-disable-next-line no-jquery/no-serialize
- formData = integrationSettingsForm.$form.serialize();
+ formData = new FormData(integrationSettingsForm.$form);
});
afterEach(() => {
diff --git a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
index dbed236d7df..ae89d05cead 100644
--- a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
+++ b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
@@ -1,16 +1,14 @@
-import { GlTable, GlLink, GlPagination } from '@gitlab/ui';
+import { GlTable, GlLink, GlPagination, GlAlert } from '@gitlab/ui';
+import * as Sentry from '@sentry/browser';
import { shallowMount, mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import { DEFAULT_PER_PAGE } from '~/api';
-import createFlash from '~/flash';
import IntegrationOverrides from '~/integrations/overrides/components/integration_overrides.vue';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
-jest.mock('~/flash');
-
const mockOverrides = Array(DEFAULT_PER_PAGE * 3)
.fill(1)
.map((_, index) => ({
@@ -62,6 +60,7 @@ describe('IntegrationOverrides', () => {
text: link.text(),
};
});
+ const findAlert = () => wrapper.findComponent(GlAlert);
describe('while loading', () => {
it('sets GlTable `busy` attribute to `true`', () => {
@@ -104,18 +103,26 @@ describe('IntegrationOverrides', () => {
describe('when request fails', () => {
beforeEach(async () => {
+ jest.spyOn(Sentry, 'captureException');
mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.INTERNAL_SERVER_ERROR);
+
createComponent();
await waitForPromises();
});
- it('calls createFlash', () => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
- message: IntegrationOverrides.i18n.defaultErrorMessage,
- captureError: true,
- error: expect.any(Error),
- });
+ it('displays error alert', () => {
+ const alert = findAlert();
+ expect(alert.exists()).toBe(true);
+ expect(alert.text()).toBe(IntegrationOverrides.i18n.defaultErrorMessage);
+ });
+
+ it('hides overrides table', () => {
+ const table = findGlTable();
+ expect(table.exists()).toBe(false);
+ });
+
+ it('captures exception in Sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalledWith(expect.any(Error));
});
});
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 95b1c55b82d..8c3c549a5eb 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -242,7 +242,7 @@ describe('InviteMembersModal', () => {
};
const expectedEmailRestrictedError =
- "email 'email@example.com' does not match the allowed domains: example1.org";
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.";
const expectedSyntaxError = 'email contains an invalid email address';
it('calls the API with the expected focus data when an areas_of_focus checkbox is clicked', () => {
@@ -421,7 +421,7 @@ describe('InviteMembersModal', () => {
await waitForPromises();
expect(membersFormGroupInvalidFeedback()).toBe(
- "root: User email 'admin@example.com' does not match the allowed domain of example2.com",
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
);
expect(findMembersSelect().props('validationState')).toBe(false);
});
diff --git a/spec/frontend/invite_members/mock_data/api_responses.js b/spec/frontend/invite_members/mock_data/api_responses.js
index 79b56a33708..dd84b4fd78f 100644
--- a/spec/frontend/invite_members/mock_data/api_responses.js
+++ b/spec/frontend/invite_members/mock_data/api_responses.js
@@ -9,7 +9,7 @@ const INVITATIONS_API_ERROR_EMAIL_INVALID = {
const INVITATIONS_API_EMAIL_RESTRICTED = {
message: {
'email@example.com':
- "Invite email 'email@example.com' does not match the allowed domains: example1.org",
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
},
status: 'error',
};
@@ -17,9 +17,9 @@ const INVITATIONS_API_EMAIL_RESTRICTED = {
const INVITATIONS_API_MULTIPLE_EMAIL_RESTRICTED = {
message: {
'email@example.com':
- "Invite email email 'email@example.com' does not match the allowed domains: example1.org",
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
'email4@example.com':
- "Invite email email 'email4@example.com' does not match the allowed domains: example1.org",
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check the Domain denylist.",
},
status: 'error',
};
@@ -36,7 +36,11 @@ const MEMBERS_API_MEMBER_ALREADY_EXISTS = {
};
const MEMBERS_API_SINGLE_USER_RESTRICTED = {
- message: { user: ["email 'email@example.com' does not match the allowed domains: example1.org"] },
+ message: {
+ user: [
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
+ ],
+ },
};
const MEMBERS_API_SINGLE_USER_ACCESS_LEVEL = {
@@ -49,7 +53,7 @@ const MEMBERS_API_SINGLE_USER_ACCESS_LEVEL = {
const MEMBERS_API_MULTIPLE_USERS_RESTRICTED = {
message:
- "root: User email 'admin@example.com' does not match the allowed domain of example2.com and user18: User email 'user18@example.org' does not match the allowed domain of example2.com",
+ "root: The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups. and user18: The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check the Domain denylist. and john_doe31: The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Email restrictions for sign-ups.",
status: 'error',
};
diff --git a/spec/frontend/invite_members/utils/response_message_parser_spec.js b/spec/frontend/invite_members/utils/response_message_parser_spec.js
index 3c88b5a2418..e2cc87c8547 100644
--- a/spec/frontend/invite_members/utils/response_message_parser_spec.js
+++ b/spec/frontend/invite_members/utils/response_message_parser_spec.js
@@ -2,18 +2,20 @@ import {
responseMessageFromSuccess,
responseMessageFromError,
} from '~/invite_members/utils/response_message_parser';
+import { membersApiResponse, invitationsApiResponse } from '../mock_data/api_responses';
describe('Response message parser', () => {
- const expectedMessage = 'expected display message';
+ const expectedMessage = 'expected display and message.';
describe('parse message from successful response', () => {
const exampleKeyedMsg = { 'email@example.com': expectedMessage };
+ const exampleFirstPartMultiple = 'username1: expected display and message.';
const exampleUserMsgMultiple =
- ' and username1: id not found and username2: email is restricted';
+ ' and username2: id not found and restricted email. and username3: email is restricted.';
it.each([
[[{ data: { message: expectedMessage } }]],
- [[{ data: { message: expectedMessage + exampleUserMsgMultiple } }]],
+ [[{ data: { message: exampleFirstPartMultiple + exampleUserMsgMultiple } }]],
[[{ data: { error: expectedMessage } }]],
[[{ data: { message: [expectedMessage] } }]],
[[{ data: { message: exampleKeyedMsg } }]],
@@ -33,4 +35,24 @@ describe('Response message parser', () => {
expect(responseMessageFromError(errorResponse)).toBe(expectedMessage);
});
});
+
+ describe('displaying only the first error when a response has messages for multiple users', () => {
+ const expected =
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.";
+
+ it.each([
+ [[{ data: membersApiResponse.MULTIPLE_USERS_RESTRICTED }]],
+ [[{ data: invitationsApiResponse.MULTIPLE_EMAIL_RESTRICTED }]],
+ [[{ data: invitationsApiResponse.EMAIL_RESTRICTED }]],
+ ])(`returns "${expectedMessage}" from success response: %j`, (restrictedResponse) => {
+ expect(responseMessageFromSuccess(restrictedResponse)).toBe(expected);
+ });
+
+ it.each([[{ response: { data: membersApiResponse.SINGLE_USER_RESTRICTED } }]])(
+ `returns "${expectedMessage}" from error response: %j`,
+ (singleRestrictedResponse) => {
+ expect(responseMessageFromError(singleRestrictedResponse)).toBe(expected);
+ },
+ );
+ });
});
diff --git a/spec/frontend/issuable/components/csv_export_modal_spec.js b/spec/frontend/issuable/components/csv_export_modal_spec.js
index 34094d22e68..ad4abda6912 100644
--- a/spec/frontend/issuable/components/csv_export_modal_spec.js
+++ b/spec/frontend/issuable/components/csv_export_modal_spec.js
@@ -61,11 +61,6 @@ describe('CsvExportModal', () => {
expect(wrapper.text()).toContain('10 issues selected');
expect(findIcon().exists()).toBe(true);
});
-
- it("doesn't display the info text when issuableCount is -1", () => {
- wrapper = createComponent({ props: { issuableCount: -1 } });
- expect(wrapper.text()).not.toContain('issues selected');
- });
});
describe('email info text', () => {
diff --git a/spec/frontend/issuable/components/csv_import_modal_spec.js b/spec/frontend/issuable/components/csv_import_modal_spec.js
index 0c88b6b1283..307323ef07a 100644
--- a/spec/frontend/issuable/components/csv_import_modal_spec.js
+++ b/spec/frontend/issuable/components/csv_import_modal_spec.js
@@ -17,7 +17,6 @@ describe('CsvImportModal', () => {
...props,
},
provide: {
- issuableType: 'issues',
...injectedProperties,
},
stubs: {
@@ -43,9 +42,9 @@ describe('CsvImportModal', () => {
const findAuthenticityToken = () => new FormData(findForm().element).get('authenticity_token');
describe('template', () => {
- it('displays modal title', () => {
+ it('passes correct title props to modal', () => {
wrapper = createComponent();
- expect(findModal().text()).toContain('Import issues');
+ expect(findModal().props('title')).toContain('Import issues');
});
it('displays a note about the maximum allowed file size', () => {
@@ -73,7 +72,7 @@ describe('CsvImportModal', () => {
});
it('submits the form when the primary action is clicked', () => {
- findPrimaryButton().trigger('click');
+ findModal().vm.$emit('primary');
expect(formSubmitSpy).toHaveBeenCalled();
});
diff --git a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
index 173d12757e3..ff6922989cb 100644
--- a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
+++ b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
@@ -1,5 +1,6 @@
import { mount, shallowMount } from '@vue/test-utils';
import AddIssuableForm from '~/related_issues/components/add_issuable_form.vue';
+import IssueToken from '~/related_issues/components/issue_token.vue';
import { issuableTypesMap, linkedIssueTypesMap, PathIdSeparator } from '~/related_issues/constants';
const issuable1 = {
@@ -22,7 +23,7 @@ const issuable2 = {
const pathIdSeparator = PathIdSeparator.Issue;
-const findFormInput = (wrapper) => wrapper.find('.js-add-issuable-form-input').element;
+const findFormInput = (wrapper) => wrapper.find('input').element;
const findRadioInput = (inputs, value) =>
inputs.filter((input) => input.element.value === value)[0];
@@ -105,11 +106,11 @@ describe('AddIssuableForm', () => {
});
it('should put input value in place', () => {
- expect(findFormInput(wrapper).value).toEqual(inputValue);
+ expect(findFormInput(wrapper).value).toBe(inputValue);
});
it('should render pending issuables items', () => {
- expect(wrapper.findAll('.js-add-issuable-form-token-list-item').length).toEqual(2);
+ expect(wrapper.findAllComponents(IssueToken)).toHaveLength(2);
});
it('should not have disabled submit button', () => {
diff --git a/spec/frontend/issuable_form_spec.js b/spec/frontend/issuable_form_spec.js
index bc7a87eb65c..c77fde4261e 100644
--- a/spec/frontend/issuable_form_spec.js
+++ b/spec/frontend/issuable_form_spec.js
@@ -20,16 +20,13 @@ describe('IssuableForm', () => {
describe('removeWip', () => {
it.each`
prefix
- ${'drAft '}
${'draFT: '}
${' [DRaft] '}
${'drAft:'}
${'[draFT]'}
- ${' dRaFt - '}
- ${'dRaFt - '}
${'(draft) '}
${' (DrafT)'}
- ${'draft draft - draft: [draft] (draft)'}
+ ${'draft: [draft] (draft)'}
`('removes "$prefix" from the beginning of the title', ({ prefix }) => {
instance.titleField.val(`${prefix}The Issuable's Title Value`);
@@ -48,4 +45,18 @@ describe('IssuableForm', () => {
expect(instance.titleField.val()).toBe("Draft: The Issuable's Title Value");
});
});
+
+ describe('workInProgress', () => {
+ it.each`
+ title | expected
+ ${'draFT: something is happening'} | ${true}
+ ${'draft something is happening'} | ${false}
+ ${'something is happening to drafts'} | ${false}
+ ${'something is happening'} | ${false}
+ `('returns $expected with "$title"', ({ title, expected }) => {
+ instance.titleField.val(title);
+
+ expect(instance.workInProgress()).toBe(expected);
+ });
+ });
});
diff --git a/spec/frontend/issuable_list/components/issuable_item_spec.js b/spec/frontend/issuable_list/components/issuable_item_spec.js
index ea36d59ff83..ac3bf7f3269 100644
--- a/spec/frontend/issuable_list/components/issuable_item_spec.js
+++ b/spec/frontend/issuable_list/components/issuable_item_spec.js
@@ -1,4 +1,4 @@
-import { GlLink, GlLabel, GlIcon, GlFormCheckbox } from '@gitlab/ui';
+import { GlLink, GlLabel, GlIcon, GlFormCheckbox, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { useFakeDate } from 'helpers/fake_date';
import IssuableItem from '~/issuable_list/components/issuable_item.vue';
@@ -16,6 +16,9 @@ const createComponent = ({ issuableSymbol = '#', issuable = mockIssuable, slots
showCheckbox: false,
},
slots,
+ stubs: {
+ GlSprintf,
+ },
});
const MOCK_GITLAB_URL = 'http://0.0.0.0:3000';
@@ -135,13 +138,6 @@ describe('IssuableItem', () => {
});
});
- describe('createdAt', () => {
- it('returns string containing timeago string based on `issuable.createdAt`', () => {
- expect(wrapper.vm.createdAt).toContain('created');
- expect(wrapper.vm.createdAt).toContain('ago');
- });
- });
-
describe('updatedAt', () => {
it('returns string containing timeago string based on `issuable.updatedAt`', () => {
expect(wrapper.vm.updatedAt).toContain('updated');
@@ -449,8 +445,7 @@ describe('IssuableItem', () => {
it('renders issuable updatedAt info', () => {
const updatedAtEl = wrapper.find('[data-testid="issuable-updated-at"]');
- expect(updatedAtEl.exists()).toBe(true);
- expect(updatedAtEl.find('span').attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
+ expect(updatedAtEl.attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
expect(updatedAtEl.text()).toBe(wrapper.vm.updatedAt);
});
diff --git a/spec/frontend/issuable_suggestions/components/item_spec.js b/spec/frontend/issuable_suggestions/components/item_spec.js
index 39083b3d8fb..45f96103e3e 100644
--- a/spec/frontend/issuable_suggestions/components/item_spec.js
+++ b/spec/frontend/issuable_suggestions/components/item_spec.js
@@ -6,10 +6,10 @@ import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_ima
import mockData from '../mock_data';
describe('Issuable suggestions suggestion component', () => {
- let vm;
+ let wrapper;
function createComponent(suggestion = {}) {
- vm = shallowMount(Suggestion, {
+ wrapper = shallowMount(Suggestion, {
propsData: {
suggestion: {
...mockData(),
@@ -19,37 +19,40 @@ describe('Issuable suggestions suggestion component', () => {
});
}
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findAuthorLink = () => wrapper.findAll(GlLink).at(1);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findTooltip = () => wrapper.findComponent(GlTooltip);
+ const findUserAvatar = () => wrapper.findComponent(UserAvatarImage);
+
afterEach(() => {
- vm.destroy();
+ wrapper.destroy();
});
it('renders title', () => {
createComponent();
- expect(vm.text()).toContain('Test issue');
+ expect(wrapper.text()).toContain('Test issue');
});
it('renders issue link', () => {
createComponent();
- const link = vm.find(GlLink);
-
- expect(link.attributes('href')).toBe(`${TEST_HOST}/test/issue/1`);
+ expect(findLink().attributes('href')).toBe(`${TEST_HOST}/test/issue/1`);
});
it('renders IID', () => {
createComponent();
- expect(vm.text()).toContain('#1');
+ expect(wrapper.text()).toContain('#1');
});
describe('opened state', () => {
it('renders icon', () => {
createComponent();
- const icon = vm.find(GlIcon);
-
- expect(icon.props('name')).toBe('issue-open-m');
+ expect(findIcon().props('name')).toBe('issue-open-m');
+ expect(findIcon().attributes('class')).toMatch('gl-text-green-500');
});
it('renders created timeago', () => {
@@ -57,10 +60,8 @@ describe('Issuable suggestions suggestion component', () => {
closedAt: '',
});
- const tooltip = vm.find(GlTooltip);
-
- expect(tooltip.find('.d-block').text()).toContain('Opened');
- expect(tooltip.text()).toContain('3 days ago');
+ expect(findTooltip().text()).toContain('Opened');
+ expect(findTooltip().text()).toContain('3 days ago');
});
});
@@ -70,18 +71,15 @@ describe('Issuable suggestions suggestion component', () => {
state: 'closed',
});
- const icon = vm.find(GlIcon);
-
- expect(icon.props('name')).toBe('issue-close');
+ expect(findIcon().props('name')).toBe('issue-close');
+ expect(findIcon().attributes('class')).toMatch('gl-text-blue-500');
});
it('renders closed timeago', () => {
createComponent();
- const tooltip = vm.find(GlTooltip);
-
- expect(tooltip.find('.d-block').text()).toContain('Opened');
- expect(tooltip.text()).toContain('1 day ago');
+ expect(findTooltip().text()).toContain('Opened');
+ expect(findTooltip().text()).toContain('1 day ago');
});
});
@@ -89,18 +87,14 @@ describe('Issuable suggestions suggestion component', () => {
it('renders author info', () => {
createComponent();
- const link = vm.findAll(GlLink).at(1);
-
- expect(link.text()).toContain('Author Name');
- expect(link.text()).toContain('@author.username');
+ expect(findAuthorLink().text()).toContain('Author Name');
+ expect(findAuthorLink().text()).toContain('@author.username');
});
it('renders author image', () => {
createComponent();
- const image = vm.find(UserAvatarImage);
-
- expect(image.props('imgSrc')).toBe(`${TEST_HOST}/avatar`);
+ expect(findUserAvatar().props('imgSrc')).toBe(`${TEST_HOST}/avatar`);
});
});
@@ -108,7 +102,7 @@ describe('Issuable suggestions suggestion component', () => {
it('renders upvotes count', () => {
createComponent();
- const count = vm.findAll('.suggestion-counts span').at(0);
+ const count = wrapper.findAll('.suggestion-counts span').at(0);
expect(count.text()).toContain('1');
expect(count.find(GlIcon).props('name')).toBe('thumb-up');
@@ -117,7 +111,7 @@ describe('Issuable suggestions suggestion component', () => {
it('renders notes count', () => {
createComponent();
- const count = vm.findAll('.suggestion-counts span').at(1);
+ const count = wrapper.findAll('.suggestion-counts span').at(1);
expect(count.text()).toContain('2');
expect(count.find(GlIcon).props('name')).toBe('comment');
@@ -130,10 +124,9 @@ describe('Issuable suggestions suggestion component', () => {
confidential: true,
});
- const icon = vm.find(GlIcon);
-
- expect(icon.props('name')).toBe('eye-slash');
- expect(icon.attributes('title')).toBe('Confidential');
+ expect(findIcon().props('name')).toBe('eye-slash');
+ expect(findIcon().attributes('class')).toMatch('gl-text-orange-500');
+ expect(findIcon().attributes('title')).toBe('Confidential');
});
});
});
diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js
index 8d79a5eed35..6b443062f12 100644
--- a/spec/frontend/issues_list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issues_list_app_spec.js
@@ -24,6 +24,7 @@ import IssuableByEmail from '~/issuable/components/issuable_by_email.vue';
import IssuableList from '~/issuable_list/components/issuable_list_root.vue';
import { IssuableListTabs, IssuableStates } from '~/issuable_list/constants';
import IssuesListApp from '~/issues_list/components/issues_list_app.vue';
+import NewIssueDropdown from '~/issues_list/components/new_issue_dropdown.vue';
import {
CREATED_DESC,
DUE_DATE_OVERDUE,
@@ -65,6 +66,7 @@ describe('IssuesListApp component', () => {
exportCsvPath: 'export/csv/path',
fullPath: 'path/to/project',
hasAnyIssues: true,
+ hasAnyProjects: true,
hasBlockedIssuesFeature: true,
hasIssueWeightsFeature: true,
hasIterationsFeature: true,
@@ -93,6 +95,7 @@ describe('IssuesListApp component', () => {
const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
const findGlLink = () => wrapper.findComponent(GlLink);
const findIssuableList = () => wrapper.findComponent(IssuableList);
+ const findNewIssueDropdown = () => wrapper.findComponent(NewIssueDropdown);
const mountComponent = ({
provide = {},
@@ -190,10 +193,7 @@ describe('IssuesListApp component', () => {
beforeEach(() => {
setWindowLocation(search);
- wrapper = mountComponent({
- provide: { isSignedIn: true },
- mountFn: mount,
- });
+ wrapper = mountComponent({ provide: { isSignedIn: true }, mountFn: mount });
jest.runOnlyPendingTimers();
});
@@ -208,7 +208,7 @@ describe('IssuesListApp component', () => {
describe('when user is not signed in', () => {
it('does not render', () => {
- wrapper = mountComponent({ provide: { isSignedIn: false } });
+ wrapper = mountComponent({ provide: { isSignedIn: false }, mountFn: mount });
expect(findCsvImportExportButtons().exists()).toBe(false);
});
@@ -216,7 +216,7 @@ describe('IssuesListApp component', () => {
describe('when in a group context', () => {
it('does not render', () => {
- wrapper = mountComponent({ provide: { isProject: false } });
+ wrapper = mountComponent({ provide: { isProject: false }, mountFn: mount });
expect(findCsvImportExportButtons().exists()).toBe(false);
});
@@ -231,7 +231,7 @@ describe('IssuesListApp component', () => {
});
it('does not render when user does not have permissions', () => {
- wrapper = mountComponent({ provide: { canBulkUpdate: false } });
+ wrapper = mountComponent({ provide: { canBulkUpdate: false }, mountFn: mount });
expect(findGlButtons().filter((button) => button.text() === 'Edit issues')).toHaveLength(0);
});
@@ -258,11 +258,25 @@ describe('IssuesListApp component', () => {
});
it('does not render when user does not have permissions', () => {
- wrapper = mountComponent({ provide: { showNewIssueLink: false } });
+ wrapper = mountComponent({ provide: { showNewIssueLink: false }, mountFn: mount });
expect(findGlButtons().filter((button) => button.text() === 'New issue')).toHaveLength(0);
});
});
+
+ describe('new issue split dropdown', () => {
+ it('does not render in a project context', () => {
+ wrapper = mountComponent({ provide: { isProject: true }, mountFn: mount });
+
+ expect(findNewIssueDropdown().exists()).toBe(false);
+ });
+
+ it('renders in a group context', () => {
+ wrapper = mountComponent({ provide: { isProject: false }, mountFn: mount });
+
+ expect(findNewIssueDropdown().exists()).toBe(true);
+ });
+ });
});
describe('initial url params', () => {
@@ -506,7 +520,7 @@ describe('IssuesListApp component', () => {
beforeEach(() => {
wrapper = mountComponent({
provide: {
- groupEpicsPath: '',
+ groupPath: '',
},
});
});
@@ -522,7 +536,7 @@ describe('IssuesListApp component', () => {
beforeEach(() => {
wrapper = mountComponent({
provide: {
- groupEpicsPath: '',
+ groupPath: '',
},
});
});
@@ -550,7 +564,7 @@ describe('IssuesListApp component', () => {
provide: {
isSignedIn: true,
projectIterationsPath: 'project/iterations/path',
- groupEpicsPath: 'group/epics/path',
+ groupPath: 'group/path',
hasIssueWeightsFeature: true,
},
});
diff --git a/spec/frontend/issues_list/components/new_issue_dropdown_spec.js b/spec/frontend/issues_list/components/new_issue_dropdown_spec.js
new file mode 100644
index 00000000000..1fcaa99cf5a
--- /dev/null
+++ b/spec/frontend/issues_list/components/new_issue_dropdown_spec.js
@@ -0,0 +1,131 @@
+import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import NewIssueDropdown from '~/issues_list/components/new_issue_dropdown.vue';
+import searchProjectsQuery from '~/issues_list/queries/search_projects.query.graphql';
+import { DASH_SCOPE, joinPaths } from '~/lib/utils/url_utility';
+import {
+ emptySearchProjectsQueryResponse,
+ project1,
+ project2,
+ searchProjectsQueryResponse,
+} from '../mock_data';
+
+describe('NewIssueDropdown component', () => {
+ let wrapper;
+
+ const localVue = createLocalVue();
+ localVue.use(VueApollo);
+
+ const mountComponent = ({
+ search = '',
+ queryResponse = searchProjectsQueryResponse,
+ mountFn = shallowMount,
+ } = {}) => {
+ const requestHandlers = [[searchProjectsQuery, jest.fn().mockResolvedValue(queryResponse)]];
+ const apolloProvider = createMockApollo(requestHandlers);
+
+ return mountFn(NewIssueDropdown, {
+ localVue,
+ apolloProvider,
+ provide: {
+ fullPath: 'mushroom-kingdom',
+ },
+ data() {
+ return { search };
+ },
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findInput = () => wrapper.findComponent(GlSearchBoxByType);
+ const showDropdown = async () => {
+ findDropdown().vm.$emit('shown');
+ await wrapper.vm.$apollo.queries.projects.refetch();
+ jest.runOnlyPendingTimers();
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a split dropdown', () => {
+ wrapper = mountComponent();
+
+ expect(findDropdown().props('split')).toBe(true);
+ });
+
+ it('renders a label for the dropdown toggle button', () => {
+ wrapper = mountComponent();
+
+ expect(findDropdown().attributes('toggle-text')).toBe(NewIssueDropdown.i18n.toggleButtonLabel);
+ });
+
+ it('focuses on input when dropdown is shown', async () => {
+ wrapper = mountComponent({ mountFn: mount });
+
+ const inputSpy = jest.spyOn(findInput().vm, 'focusInput');
+
+ await showDropdown();
+
+ expect(inputSpy).toHaveBeenCalledTimes(1);
+ });
+
+ it('renders expected dropdown items', async () => {
+ wrapper = mountComponent({ mountFn: mount });
+
+ await showDropdown();
+
+ const listItems = wrapper.findAll('li');
+
+ expect(listItems.at(0).text()).toBe(project1.nameWithNamespace);
+ expect(listItems.at(1).text()).toBe(project2.nameWithNamespace);
+ });
+
+ it('renders `No matches found` when there are no matches', async () => {
+ wrapper = mountComponent({
+ search: 'no matches',
+ queryResponse: emptySearchProjectsQueryResponse,
+ mountFn: mount,
+ });
+
+ await showDropdown();
+
+ expect(wrapper.find('li').text()).toBe(NewIssueDropdown.i18n.noMatchesFound);
+ });
+
+ describe('when no project is selected', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('dropdown button is not a link', () => {
+ expect(findDropdown().attributes('split-href')).toBeUndefined();
+ });
+
+ it('displays default text on the dropdown button', () => {
+ expect(findDropdown().props('text')).toBe(NewIssueDropdown.i18n.defaultDropdownText);
+ });
+ });
+
+ describe('when a project is selected', () => {
+ beforeEach(async () => {
+ wrapper = mountComponent({ mountFn: mount });
+
+ await showDropdown();
+
+ wrapper.findComponent(GlDropdownItem).vm.$emit('click', project1);
+ });
+
+ it('dropdown button is a link', () => {
+ const href = joinPaths(project1.webUrl, DASH_SCOPE, 'issues/new');
+
+ expect(findDropdown().attributes('split-href')).toBe(href);
+ });
+
+ it('displays project name on the dropdown button', () => {
+ expect(findDropdown().props('text')).toBe(`New issue in ${project1.name}`);
+ });
+ });
+});
diff --git a/spec/frontend/issues_list/mock_data.js b/spec/frontend/issues_list/mock_data.js
index 720f9cac986..3be256d8094 100644
--- a/spec/frontend/issues_list/mock_data.js
+++ b/spec/frontend/issues_list/mock_data.js
@@ -221,3 +221,37 @@ export const urlParamsWithSpecialValues = {
epic_id: 'None',
weight: 'None',
};
+
+export const project1 = {
+ id: 'gid://gitlab/Group/26',
+ name: 'Super Mario Project',
+ nameWithNamespace: 'Mushroom Kingdom / Super Mario Project',
+ webUrl: 'https://127.0.0.1:3000/mushroom-kingdom/super-mario-project',
+};
+
+export const project2 = {
+ id: 'gid://gitlab/Group/59',
+ name: 'Mario Kart Project',
+ nameWithNamespace: 'Mushroom Kingdom / Mario Kart Project',
+ webUrl: 'https://127.0.0.1:3000/mushroom-kingdom/mario-kart-project',
+};
+
+export const searchProjectsQueryResponse = {
+ data: {
+ group: {
+ projects: {
+ nodes: [project1, project2],
+ },
+ },
+ },
+};
+
+export const emptySearchProjectsQueryResponse = {
+ data: {
+ group: {
+ projects: {
+ nodes: [],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
index 891ba9c223c..9f5b772a5c7 100644
--- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
+++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
@@ -127,21 +127,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
>
<!---->
- <div
- class="gl-display-flex gl-flex-direction-row gl-justify-content-space-between gl-align-items-center gl-px-5"
- >
- <div
- class="gl-display-flex"
- >
- <!---->
- </div>
-
- <div
- class="gl-display-flex"
- >
- <!---->
- </div>
- </div>
+ <!---->
<div
class="gl-new-dropdown-contents"
@@ -272,21 +258,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
>
<!---->
- <div
- class="gl-display-flex gl-flex-direction-row gl-justify-content-space-between gl-align-items-center gl-px-5"
- >
- <div
- class="gl-display-flex"
- >
- <!---->
- </div>
-
- <div
- class="gl-display-flex"
- >
- <!---->
- </div>
- </div>
+ <!---->
<div
class="gl-new-dropdown-contents"
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index f8a0059bf21..07e6ee46c41 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -2,7 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vuex from 'vuex';
-import { getJSONFixture } from 'helpers/fixtures';
+import delayedJobFixture from 'test_fixtures/jobs/delayed.json';
import { TEST_HOST } from 'helpers/test_constants';
import EmptyState from '~/jobs/components/empty_state.vue';
import EnvironmentsBlock from '~/jobs/components/environments_block.vue';
@@ -19,8 +19,6 @@ describe('Job App', () => {
const localVue = createLocalVue();
localVue.use(Vuex);
- const delayedJobFixture = getJSONFixture('jobs/delayed.json');
-
let store;
let wrapper;
let mock;
@@ -47,9 +45,9 @@ describe('Job App', () => {
wrapper = mount(JobApp, { propsData: { ...props }, store });
};
- const setupAndMount = ({ jobData = {}, traceData = {} } = {}) => {
+ const setupAndMount = ({ jobData = {}, jobLogData = {} } = {}) => {
mock.onGet(initSettings.endpoint).replyOnce(200, { ...job, ...jobData });
- mock.onGet(`${initSettings.pagePath}/trace.json`).reply(200, traceData);
+ mock.onGet(`${initSettings.pagePath}/trace.json`).reply(200, jobLogData);
const asyncInit = store.dispatch('init', initSettings);
@@ -77,11 +75,10 @@ describe('Job App', () => {
const findEmptyState = () => wrapper.find(EmptyState);
const findJobNewIssueLink = () => wrapper.find('[data-testid="job-new-issue"]');
const findJobEmptyStateTitle = () => wrapper.find('[data-testid="job-empty-state-title"]');
- const findJobTraceScrollTop = () => wrapper.find('[data-testid="job-controller-scroll-top"]');
- const findJobTraceScrollBottom = () =>
- wrapper.find('[data-testid="job-controller-scroll-bottom"]');
- const findJobTraceController = () => wrapper.find('[data-testid="job-raw-link-controller"]');
- const findJobTraceEraseLink = () => wrapper.find('[data-testid="job-log-erase-link"]');
+ const findJobLogScrollTop = () => wrapper.find('[data-testid="job-controller-scroll-top"]');
+ const findJobLogScrollBottom = () => wrapper.find('[data-testid="job-controller-scroll-bottom"]');
+ const findJobLogController = () => wrapper.find('[data-testid="job-raw-link-controller"]');
+ const findJobLogEraseLink = () => wrapper.find('[data-testid="job-log-erase-link"]');
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -315,7 +312,7 @@ describe('Job App', () => {
});
describe('empty states block', () => {
- it('renders empty state when job does not have trace and is not running', () =>
+ it('renders empty state when job does not have log and is not running', () =>
setupAndMount({
jobData: {
has_trace: false,
@@ -342,7 +339,7 @@ describe('Job App', () => {
expect(findEmptyState().exists()).toBe(true);
}));
- it('does not render empty state when job does not have trace but it is running', () =>
+ it('does not render empty state when job does not have log but it is running', () =>
setupAndMount({
jobData: {
has_trace: false,
@@ -358,7 +355,7 @@ describe('Job App', () => {
expect(findEmptyState().exists()).toBe(false);
}));
- it('does not render empty state when job has trace but it is not running', () =>
+ it('does not render empty state when job has log but it is not running', () =>
setupAndMount({ jobData: { has_trace: true } }).then(() => {
expect(findEmptyState().exists()).toBe(false);
}));
@@ -424,10 +421,10 @@ describe('Job App', () => {
});
});
- describe('trace controls', () => {
+ describe('job log controls', () => {
beforeEach(() =>
setupAndMount({
- traceData: {
+ jobLogData: {
html: '<span>Update</span>',
status: 'success',
append: false,
@@ -439,16 +436,16 @@ describe('Job App', () => {
);
it('should render scroll buttons', () => {
- expect(findJobTraceScrollTop().exists()).toBe(true);
- expect(findJobTraceScrollBottom().exists()).toBe(true);
+ expect(findJobLogScrollTop().exists()).toBe(true);
+ expect(findJobLogScrollBottom().exists()).toBe(true);
});
it('should render link to raw ouput', () => {
- expect(findJobTraceController().exists()).toBe(true);
+ expect(findJobLogController().exists()).toBe(true);
});
it('should render link to erase job', () => {
- expect(findJobTraceEraseLink().exists()).toBe(true);
+ expect(findJobLogEraseLink().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/jobs/components/job_container_item_spec.js b/spec/frontend/jobs/components/job_container_item_spec.js
index 36038b69e64..6b488821bc1 100644
--- a/spec/frontend/jobs/components/job_container_item_spec.js
+++ b/spec/frontend/jobs/components/job_container_item_spec.js
@@ -1,12 +1,12 @@
import { GlIcon, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import delayedJobFixture from 'test_fixtures/jobs/delayed.json';
import JobContainerItem from '~/jobs/components/job_container_item.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import job from '../mock_data';
describe('JobContainerItem', () => {
let wrapper;
- const delayedJobFixture = getJSONFixture('jobs/delayed.json');
const findCiIconComponent = () => wrapper.findComponent(CiIcon);
const findGlIconComponent = () => wrapper.findComponent(GlIcon);
diff --git a/spec/frontend/jobs/components/job_log_controllers_spec.js b/spec/frontend/jobs/components/job_log_controllers_spec.js
index 97b0333cb32..0ba07522243 100644
--- a/spec/frontend/jobs/components/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job_log_controllers_spec.js
@@ -18,7 +18,7 @@ describe('Job log controllers', () => {
isScrollTopDisabled: false,
isScrollBottomDisabled: false,
isScrollingDown: true,
- isTraceSizeVisible: true,
+ isJobLogSizeVisible: true,
};
const createWrapper = (props) => {
@@ -38,7 +38,7 @@ describe('Job log controllers', () => {
const findScrollBottom = () => wrapper.find('[data-testid="job-controller-scroll-bottom"]');
describe('Truncate information', () => {
- describe('with isTraceSizeVisible', () => {
+ describe('with isJobLogSizeVisible', () => {
beforeEach(() => {
createWrapper();
});
@@ -47,31 +47,31 @@ describe('Job log controllers', () => {
expect(findTruncatedInfo().text()).toMatch('499.95 KiB');
});
- it('renders link to raw trace', () => {
+ it('renders link to raw job log', () => {
expect(findRawLink().attributes('href')).toBe(defaultProps.rawPath);
});
});
});
describe('links section', () => {
- describe('with raw trace path', () => {
+ describe('with raw job log path', () => {
beforeEach(() => {
createWrapper();
});
- it('renders raw trace link', () => {
+ it('renders raw job log link', () => {
expect(findRawLinkController().attributes('href')).toBe(defaultProps.rawPath);
});
});
- describe('without raw trace path', () => {
+ describe('without raw job log path', () => {
beforeEach(() => {
createWrapper({
rawPath: null,
});
});
- it('does not render raw trace link', () => {
+ it('does not render raw job log link', () => {
expect(findRawLinkController().exists()).toBe(false);
});
});
diff --git a/spec/frontend/jobs/components/log/collapsible_section_spec.js b/spec/frontend/jobs/components/log/collapsible_section_spec.js
index 4e23a3ba7b8..96bdf03796b 100644
--- a/spec/frontend/jobs/components/log/collapsible_section_spec.js
+++ b/spec/frontend/jobs/components/log/collapsible_section_spec.js
@@ -6,7 +6,7 @@ describe('Job Log Collapsible Section', () => {
let wrapper;
let origGon;
- const traceEndpoint = 'jobs/335';
+ const jobLogEndpoint = 'jobs/335';
const findCollapsibleLine = () => wrapper.find('.collapsible-line');
const findCollapsibleLineSvg = () => wrapper.find('.collapsible-line svg');
@@ -35,7 +35,7 @@ describe('Job Log Collapsible Section', () => {
beforeEach(() => {
createComponent({
section: collapsibleSectionClosed,
- traceEndpoint,
+ jobLogEndpoint,
});
});
@@ -52,7 +52,7 @@ describe('Job Log Collapsible Section', () => {
beforeEach(() => {
createComponent({
section: collapsibleSectionOpened,
- traceEndpoint,
+ jobLogEndpoint,
});
});
@@ -72,7 +72,7 @@ describe('Job Log Collapsible Section', () => {
it('emits onClickCollapsibleLine on click', () => {
createComponent({
section: collapsibleSectionOpened,
- traceEndpoint,
+ jobLogEndpoint,
});
findCollapsibleLine().trigger('click');
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index 99fb6846ce5..9a5522ab4cd 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -31,8 +31,8 @@ describe('Job Log', () => {
window.gon = { features: { infinitelyCollapsibleSections: false } };
state = {
- trace: logLinesParserLegacy(jobLog),
- traceEndpoint: 'jobs/id',
+ jobLog: logLinesParserLegacy(jobLog),
+ jobLogEndpoint: 'jobs/id',
};
store = new Vuex.Store({
@@ -59,7 +59,7 @@ describe('Job Log', () => {
});
it('links to the provided path and correct line number', () => {
- expect(wrapper.find('#L1').attributes('href')).toBe(`${state.traceEndpoint}#L1`);
+ expect(wrapper.find('#L1').attributes('href')).toBe(`${state.jobLogEndpoint}#L1`);
});
});
@@ -111,8 +111,8 @@ describe('Job Log, infinitelyCollapsibleSections feature flag enabled', () => {
window.gon = { features: { infinitelyCollapsibleSections: true } };
state = {
- trace: logLinesParser(jobLog).parsedLines,
- traceEndpoint: 'jobs/id',
+ jobLog: logLinesParser(jobLog).parsedLines,
+ jobLogEndpoint: 'jobs/id',
};
store = new Vuex.Store({
@@ -139,7 +139,7 @@ describe('Job Log, infinitelyCollapsibleSections feature flag enabled', () => {
});
it('links to the provided path and correct line number', () => {
- expect(wrapper.find('#L1').attributes('href')).toBe(`${state.traceEndpoint}#L1`);
+ expect(wrapper.find('#L1').attributes('href')).toBe(`${state.jobLogEndpoint}#L1`);
});
});
diff --git a/spec/frontend/jobs/mixins/delayed_job_mixin_spec.js b/spec/frontend/jobs/mixins/delayed_job_mixin_spec.js
index 838323df755..63dcd72f967 100644
--- a/spec/frontend/jobs/mixins/delayed_job_mixin_spec.js
+++ b/spec/frontend/jobs/mixins/delayed_job_mixin_spec.js
@@ -1,9 +1,9 @@
import { shallowMount } from '@vue/test-utils';
+import delayedJobFixture from 'test_fixtures/jobs/delayed.json';
import delayedJobMixin from '~/jobs/mixins/delayed_job_mixin';
describe('DelayedJobMixin', () => {
let wrapper;
- const delayedJobFixture = getJSONFixture('jobs/delayed.json');
const dummyComponent = {
props: {
job: {
diff --git a/spec/frontend/jobs/store/actions_spec.js b/spec/frontend/jobs/store/actions_spec.js
index a29bd15099f..16448d6a3ca 100644
--- a/spec/frontend/jobs/store/actions_spec.js
+++ b/spec/frontend/jobs/store/actions_spec.js
@@ -3,7 +3,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
import {
setJobEndpoint,
- setTraceOptions,
+ setJobLogOptions,
clearEtagPoll,
stopPolling,
requestJob,
@@ -12,12 +12,12 @@ import {
receiveJobError,
scrollTop,
scrollBottom,
- requestTrace,
- fetchTrace,
- startPollingTrace,
- stopPollingTrace,
- receiveTraceSuccess,
- receiveTraceError,
+ requestJobLog,
+ fetchJobLog,
+ startPollingJobLog,
+ stopPollingJobLog,
+ receiveJobLogSuccess,
+ receiveJobLogError,
toggleCollapsibleLine,
requestJobsForStage,
fetchJobsForStage,
@@ -51,13 +51,13 @@ describe('Job State actions', () => {
});
});
- describe('setTraceOptions', () => {
- it('should commit SET_TRACE_OPTIONS mutation', (done) => {
+ describe('setJobLogOptions', () => {
+ it('should commit SET_JOB_LOG_OPTIONS mutation', (done) => {
testAction(
- setTraceOptions,
+ setJobLogOptions,
{ pagePath: 'job/872324/trace.json' },
mockedState,
- [{ type: types.SET_TRACE_OPTIONS, payload: { pagePath: 'job/872324/trace.json' } }],
+ [{ type: types.SET_JOB_LOG_OPTIONS, payload: { pagePath: 'job/872324/trace.json' } }],
[],
done,
);
@@ -191,17 +191,17 @@ describe('Job State actions', () => {
});
});
- describe('requestTrace', () => {
- it('should commit REQUEST_TRACE mutation', (done) => {
- testAction(requestTrace, null, mockedState, [{ type: types.REQUEST_TRACE }], [], done);
+ describe('requestJobLog', () => {
+ it('should commit REQUEST_JOB_LOG mutation', (done) => {
+ testAction(requestJobLog, null, mockedState, [{ type: types.REQUEST_JOB_LOG }], [], done);
});
});
- describe('fetchTrace', () => {
+ describe('fetchJobLog', () => {
let mock;
beforeEach(() => {
- mockedState.traceEndpoint = `${TEST_HOST}/endpoint`;
+ mockedState.jobLogEndpoint = `${TEST_HOST}/endpoint`;
mock = new MockAdapter(axios);
});
@@ -212,14 +212,14 @@ describe('Job State actions', () => {
});
describe('success', () => {
- it('dispatches requestTrace, receiveTraceSuccess and stopPollingTrace when job is complete', (done) => {
+ it('dispatches requestJobLog, receiveJobLogSuccess and stopPollingJobLog when job is complete', (done) => {
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, {
html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
complete: true,
});
testAction(
- fetchTrace,
+ fetchJobLog,
null,
mockedState,
[],
@@ -233,10 +233,10 @@ describe('Job State actions', () => {
html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
complete: true,
},
- type: 'receiveTraceSuccess',
+ type: 'receiveJobLogSuccess',
},
{
- type: 'stopPollingTrace',
+ type: 'stopPollingJobLog',
},
],
done,
@@ -244,43 +244,43 @@ describe('Job State actions', () => {
});
describe('when job is incomplete', () => {
- let tracePayload;
+ let jobLogPayload;
beforeEach(() => {
- tracePayload = {
+ jobLogPayload = {
html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
complete: false,
};
- mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, tracePayload);
+ mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, jobLogPayload);
});
- it('dispatches startPollingTrace', (done) => {
+ it('dispatches startPollingJobLog', (done) => {
testAction(
- fetchTrace,
+ fetchJobLog,
null,
mockedState,
[],
[
{ type: 'toggleScrollisInBottom', payload: true },
- { type: 'receiveTraceSuccess', payload: tracePayload },
- { type: 'startPollingTrace' },
+ { type: 'receiveJobLogSuccess', payload: jobLogPayload },
+ { type: 'startPollingJobLog' },
],
done,
);
});
- it('does not dispatch startPollingTrace when timeout is non-empty', (done) => {
- mockedState.traceTimeout = 1;
+ it('does not dispatch startPollingJobLog when timeout is non-empty', (done) => {
+ mockedState.jobLogTimeout = 1;
testAction(
- fetchTrace,
+ fetchJobLog,
null,
mockedState,
[],
[
{ type: 'toggleScrollisInBottom', payload: true },
- { type: 'receiveTraceSuccess', payload: tracePayload },
+ { type: 'receiveJobLogSuccess', payload: jobLogPayload },
],
done,
);
@@ -293,15 +293,15 @@ describe('Job State actions', () => {
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).reply(500);
});
- it('dispatches requestTrace and receiveTraceError ', (done) => {
+ it('dispatches requestJobLog and receiveJobLogError ', (done) => {
testAction(
- fetchTrace,
+ fetchJobLog,
null,
mockedState,
[],
[
{
- type: 'receiveTraceError',
+ type: 'receiveJobLogError',
},
],
done,
@@ -310,7 +310,7 @@ describe('Job State actions', () => {
});
});
- describe('startPollingTrace', () => {
+ describe('startPollingJobLog', () => {
let dispatch;
let commit;
@@ -318,18 +318,18 @@ describe('Job State actions', () => {
dispatch = jest.fn();
commit = jest.fn();
- startPollingTrace({ dispatch, commit });
+ startPollingJobLog({ dispatch, commit });
});
afterEach(() => {
jest.clearAllTimers();
});
- it('should save the timeout id but not call fetchTrace', () => {
- expect(commit).toHaveBeenCalledWith(types.SET_TRACE_TIMEOUT, expect.any(Number));
+ it('should save the timeout id but not call fetchJobLog', () => {
+ expect(commit).toHaveBeenCalledWith(types.SET_JOB_LOG_TIMEOUT, expect.any(Number));
expect(commit.mock.calls[0][1]).toBeGreaterThan(0);
- expect(dispatch).not.toHaveBeenCalledWith('fetchTrace');
+ expect(dispatch).not.toHaveBeenCalledWith('fetchJobLog');
});
describe('after timeout has passed', () => {
@@ -337,14 +337,14 @@ describe('Job State actions', () => {
jest.advanceTimersByTime(4000);
});
- it('should clear the timeout id and fetchTrace', () => {
- expect(commit).toHaveBeenCalledWith(types.SET_TRACE_TIMEOUT, 0);
- expect(dispatch).toHaveBeenCalledWith('fetchTrace');
+ it('should clear the timeout id and fetchJobLog', () => {
+ expect(commit).toHaveBeenCalledWith(types.SET_JOB_LOG_TIMEOUT, 0);
+ expect(dispatch).toHaveBeenCalledWith('fetchJobLog');
});
});
});
- describe('stopPollingTrace', () => {
+ describe('stopPollingJobLog', () => {
let origTimeout;
beforeEach(() => {
@@ -358,40 +358,40 @@ describe('Job State actions', () => {
window.clearTimeout = origTimeout;
});
- it('should commit STOP_POLLING_TRACE mutation ', (done) => {
- const traceTimeout = 7;
+ it('should commit STOP_POLLING_JOB_LOG mutation ', (done) => {
+ const jobLogTimeout = 7;
testAction(
- stopPollingTrace,
+ stopPollingJobLog,
null,
- { ...mockedState, traceTimeout },
- [{ type: types.SET_TRACE_TIMEOUT, payload: 0 }, { type: types.STOP_POLLING_TRACE }],
+ { ...mockedState, jobLogTimeout },
+ [{ type: types.SET_JOB_LOG_TIMEOUT, payload: 0 }, { type: types.STOP_POLLING_JOB_LOG }],
[],
)
.then(() => {
- expect(window.clearTimeout).toHaveBeenCalledWith(traceTimeout);
+ expect(window.clearTimeout).toHaveBeenCalledWith(jobLogTimeout);
})
.then(done)
.catch(done.fail);
});
});
- describe('receiveTraceSuccess', () => {
- it('should commit RECEIVE_TRACE_SUCCESS mutation ', (done) => {
+ describe('receiveJobLogSuccess', () => {
+ it('should commit RECEIVE_JOB_LOG_SUCCESS mutation ', (done) => {
testAction(
- receiveTraceSuccess,
+ receiveJobLogSuccess,
'hello world',
mockedState,
- [{ type: types.RECEIVE_TRACE_SUCCESS, payload: 'hello world' }],
+ [{ type: types.RECEIVE_JOB_LOG_SUCCESS, payload: 'hello world' }],
[],
done,
);
});
});
- describe('receiveTraceError', () => {
- it('should commit stop polling trace', (done) => {
- testAction(receiveTraceError, null, mockedState, [], [{ type: 'stopPollingTrace' }], done);
+ describe('receiveJobLogError', () => {
+ it('should commit stop polling job log', (done) => {
+ testAction(receiveJobLogError, null, mockedState, [], [{ type: 'stopPollingJobLog' }], done);
});
});
diff --git a/spec/frontend/jobs/store/getters_spec.js b/spec/frontend/jobs/store/getters_spec.js
index 379114c3737..f26c0cf00fd 100644
--- a/spec/frontend/jobs/store/getters_spec.js
+++ b/spec/frontend/jobs/store/getters_spec.js
@@ -102,13 +102,13 @@ describe('Job Store Getters', () => {
});
});
- describe('hasTrace', () => {
+ describe('hasJobLog', () => {
describe('when has_trace is true', () => {
it('returns true', () => {
localState.job.has_trace = true;
localState.job.status = {};
- expect(getters.hasTrace(localState)).toEqual(true);
+ expect(getters.hasJobLog(localState)).toEqual(true);
});
});
@@ -117,7 +117,7 @@ describe('Job Store Getters', () => {
localState.job.has_trace = false;
localState.job.status = { group: 'running' };
- expect(getters.hasTrace(localState)).toEqual(true);
+ expect(getters.hasJobLog(localState)).toEqual(true);
});
});
@@ -126,7 +126,7 @@ describe('Job Store Getters', () => {
localState.job.has_trace = false;
localState.job.status = { group: 'pending' };
- expect(getters.hasTrace(localState)).toEqual(false);
+ expect(getters.hasJobLog(localState)).toEqual(false);
});
});
});
diff --git a/spec/frontend/jobs/store/mutations_spec.js b/spec/frontend/jobs/store/mutations_spec.js
index 159315330e4..b73aa8abf4e 100644
--- a/spec/frontend/jobs/store/mutations_spec.js
+++ b/spec/frontend/jobs/store/mutations_spec.js
@@ -45,39 +45,39 @@ describe('Jobs Store Mutations', () => {
});
});
- describe('RECEIVE_TRACE_SUCCESS', () => {
- describe('when trace has state', () => {
- it('sets traceState', () => {
+ describe('RECEIVE_JOB_LOG_SUCCESS', () => {
+ describe('when job log has state', () => {
+ it('sets jobLogState', () => {
const stateLog =
'eyJvZmZzZXQiOjczNDQ1MSwibl9vcGVuX3RhZ3MiOjAsImZnX2NvbG9yIjpudWxsLCJiZ19jb2xvciI6bnVsbCwic3R5bGVfbWFzayI6MH0=';
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
state: stateLog,
});
- expect(stateCopy.traceState).toEqual(stateLog);
+ expect(stateCopy.jobLogState).toEqual(stateLog);
});
});
- describe('when traceSize is smaller than the total size', () => {
- it('sets isTraceSizeVisible to true', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, { total: 51184600, size: 1231 });
+ describe('when jobLogSize is smaller than the total size', () => {
+ it('sets isJobLogSizeVisible to true', () => {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, { total: 51184600, size: 1231 });
- expect(stateCopy.isTraceSizeVisible).toEqual(true);
+ expect(stateCopy.isJobLogSizeVisible).toEqual(true);
});
});
- describe('when traceSize is bigger than the total size', () => {
- it('sets isTraceSizeVisible to false', () => {
- const copy = { ...stateCopy, traceSize: 5118460, size: 2321312 };
+ describe('when jobLogSize is bigger than the total size', () => {
+ it('sets isJobLogSizeVisible to false', () => {
+ const copy = { ...stateCopy, jobLogSize: 5118460, size: 2321312 };
- mutations[types.RECEIVE_TRACE_SUCCESS](copy, { total: 511846 });
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](copy, { total: 511846 });
- expect(copy.isTraceSizeVisible).toEqual(false);
+ expect(copy.isJobLogSizeVisible).toEqual(false);
});
});
- it('sets trace, trace size and isTraceComplete', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ it('sets job log size and isJobLogComplete', () => {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: true,
html,
size: 511846,
@@ -85,15 +85,15 @@ describe('Jobs Store Mutations', () => {
lines: [],
});
- expect(stateCopy.traceSize).toEqual(511846);
- expect(stateCopy.isTraceComplete).toEqual(true);
+ expect(stateCopy.jobLogSize).toEqual(511846);
+ expect(stateCopy.isJobLogComplete).toEqual(true);
});
describe('with new job log', () => {
describe('log.lines', () => {
describe('when append is true', () => {
it('sets the parsed log ', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: true,
size: 511846,
complete: true,
@@ -105,7 +105,7 @@ describe('Jobs Store Mutations', () => {
],
});
- expect(stateCopy.trace).toEqual([
+ expect(stateCopy.jobLog).toEqual([
{
offset: 1,
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
@@ -117,7 +117,7 @@ describe('Jobs Store Mutations', () => {
describe('when it is defined', () => {
it('sets the parsed log ', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: false,
size: 511846,
complete: true,
@@ -126,7 +126,7 @@ describe('Jobs Store Mutations', () => {
],
});
- expect(stateCopy.trace).toEqual([
+ expect(stateCopy.jobLog).toEqual([
{
offset: 0,
content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
@@ -138,7 +138,7 @@ describe('Jobs Store Mutations', () => {
describe('when it is null', () => {
it('sets the default value', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: true,
html,
size: 511846,
@@ -146,30 +146,30 @@ describe('Jobs Store Mutations', () => {
lines: null,
});
- expect(stateCopy.trace).toEqual([]);
+ expect(stateCopy.jobLog).toEqual([]);
});
});
});
});
});
- describe('SET_TRACE_TIMEOUT', () => {
- it('sets the traceTimeout id', () => {
+ describe('SET_JOB_LOG_TIMEOUT', () => {
+ it('sets the jobLogTimeout id', () => {
const id = 7;
- expect(stateCopy.traceTimeout).not.toEqual(id);
+ expect(stateCopy.jobLogTimeout).not.toEqual(id);
- mutations[types.SET_TRACE_TIMEOUT](stateCopy, id);
+ mutations[types.SET_JOB_LOG_TIMEOUT](stateCopy, id);
- expect(stateCopy.traceTimeout).toEqual(id);
+ expect(stateCopy.jobLogTimeout).toEqual(id);
});
});
- describe('STOP_POLLING_TRACE', () => {
- it('sets isTraceComplete to true', () => {
- mutations[types.STOP_POLLING_TRACE](stateCopy);
+ describe('STOP_POLLING_JOB_LOG', () => {
+ it('sets isJobLogComplete to true', () => {
+ mutations[types.STOP_POLLING_JOB_LOG](stateCopy);
- expect(stateCopy.isTraceComplete).toEqual(true);
+ expect(stateCopy.isJobLogComplete).toEqual(true);
});
});
@@ -296,12 +296,12 @@ describe('Job Store mutations, feature flag ON', () => {
window.gon = origGon;
});
- describe('RECEIVE_TRACE_SUCCESS', () => {
+ describe('RECEIVE_JOB_LOG_SUCCESS', () => {
describe('with new job log', () => {
describe('log.lines', () => {
describe('when append is true', () => {
it('sets the parsed log ', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: true,
size: 511846,
complete: true,
@@ -313,7 +313,7 @@ describe('Job Store mutations, feature flag ON', () => {
],
});
- expect(stateCopy.trace).toEqual([
+ expect(stateCopy.jobLog).toEqual([
{
offset: 1,
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
@@ -325,7 +325,7 @@ describe('Job Store mutations, feature flag ON', () => {
describe('when lines are defined', () => {
it('sets the parsed log ', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: false,
size: 511846,
complete: true,
@@ -334,7 +334,7 @@ describe('Job Store mutations, feature flag ON', () => {
],
});
- expect(stateCopy.trace).toEqual([
+ expect(stateCopy.jobLog).toEqual([
{
offset: 0,
content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
@@ -346,7 +346,7 @@ describe('Job Store mutations, feature flag ON', () => {
describe('when lines are null', () => {
it('sets the default value', () => {
- mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: true,
html,
size: 511846,
@@ -354,7 +354,7 @@ describe('Job Store mutations, feature flag ON', () => {
lines: null,
});
- expect(stateCopy.trace).toEqual([]);
+ expect(stateCopy.jobLog).toEqual([]);
});
});
});
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/jobs/store/utils_spec.js
index 0c5fa150002..92ac33c8792 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/jobs/store/utils_spec.js
@@ -1,7 +1,7 @@
import {
logLinesParser,
logLinesParserLegacy,
- updateIncrementalTrace,
+ updateIncrementalJobLog,
parseHeaderLine,
parseLine,
addDurationToHeader,
@@ -487,11 +487,11 @@ describe('Jobs Store Utils', () => {
});
});
- describe('updateIncrementalTrace', () => {
+ describe('updateIncrementalJobLog', () => {
describe('without repeated section', () => {
it('concats and parses both arrays', () => {
const oldLog = logLinesParserLegacy(originalTrace);
- const result = updateIncrementalTrace(regularIncremental, oldLog);
+ const result = updateIncrementalJobLog(regularIncremental, oldLog);
expect(result).toEqual([
{
@@ -519,7 +519,7 @@ describe('Jobs Store Utils', () => {
describe('with regular line repeated offset', () => {
it('updates the last line and formats with the incremental part', () => {
const oldLog = logLinesParserLegacy(originalTrace);
- const result = updateIncrementalTrace(regularIncrementalRepeated, oldLog);
+ const result = updateIncrementalJobLog(regularIncrementalRepeated, oldLog);
expect(result).toEqual([
{
@@ -538,7 +538,7 @@ describe('Jobs Store Utils', () => {
describe('with header line repeated', () => {
it('updates the header line and formats with the incremental part', () => {
const oldLog = logLinesParserLegacy(headerTrace);
- const result = updateIncrementalTrace(headerTraceIncremental, oldLog);
+ const result = updateIncrementalJobLog(headerTraceIncremental, oldLog);
expect(result).toEqual([
{
@@ -564,7 +564,7 @@ describe('Jobs Store Utils', () => {
describe('with collapsible line repeated', () => {
it('updates the collapsible line and formats with the incremental part', () => {
const oldLog = logLinesParserLegacy(collapsibleTrace);
- const result = updateIncrementalTrace(collapsibleTraceIncremental, oldLog);
+ const result = updateIncrementalJobLog(collapsibleTraceIncremental, oldLog);
expect(result).toEqual([
{
diff --git a/spec/frontend/lib/apollo/suppress_network_errors_during_navigation_link_spec.js b/spec/frontend/lib/apollo/suppress_network_errors_during_navigation_link_spec.js
new file mode 100644
index 00000000000..852106db44e
--- /dev/null
+++ b/spec/frontend/lib/apollo/suppress_network_errors_during_navigation_link_spec.js
@@ -0,0 +1,155 @@
+import { ApolloLink, Observable } from 'apollo-link';
+import waitForPromises from 'helpers/wait_for_promises';
+import { getSuppressNetworkErrorsDuringNavigationLink } from '~/lib/apollo/suppress_network_errors_during_navigation_link';
+import { isNavigatingAway } from '~/lib/utils/is_navigating_away';
+
+jest.mock('~/lib/utils/is_navigating_away');
+
+describe('getSuppressNetworkErrorsDuringNavigationLink', () => {
+ const originalGon = window.gon;
+ let subscription;
+
+ beforeEach(() => {
+ window.gon = originalGon;
+ });
+
+ afterEach(() => {
+ if (subscription) {
+ subscription.unsubscribe();
+ }
+ });
+
+ const makeMockGraphQLErrorLink = () =>
+ new ApolloLink(() =>
+ Observable.of({
+ errors: [
+ {
+ message: 'foo',
+ },
+ ],
+ }),
+ );
+
+ const makeMockNetworkErrorLink = () =>
+ new ApolloLink(
+ () =>
+ new Observable(() => {
+ throw new Error('NetworkError');
+ }),
+ );
+
+ const makeMockSuccessLink = () =>
+ new ApolloLink(() => Observable.of({ data: { foo: { id: 1 } } }));
+
+ const createSubscription = (otherLink, observer) => {
+ const mockOperation = { operationName: 'foo' };
+ const link = getSuppressNetworkErrorsDuringNavigationLink().concat(otherLink);
+ subscription = link.request(mockOperation).subscribe(observer);
+ };
+
+ describe('when disabled', () => {
+ it('returns null', () => {
+ expect(getSuppressNetworkErrorsDuringNavigationLink()).toBe(null);
+ });
+ });
+
+ describe('when enabled', () => {
+ beforeEach(() => {
+ window.gon = { features: { suppressApolloErrorsDuringNavigation: true } };
+ });
+
+ it('returns an ApolloLink', () => {
+ expect(getSuppressNetworkErrorsDuringNavigationLink()).toEqual(expect.any(ApolloLink));
+ });
+
+ describe('suppression case', () => {
+ describe('when navigating away', () => {
+ beforeEach(() => {
+ isNavigatingAway.mockReturnValue(true);
+ });
+
+ describe('given a network error', () => {
+ it('does not forward the error', async () => {
+ const spy = jest.fn();
+
+ createSubscription(makeMockNetworkErrorLink(), {
+ next: spy,
+ error: spy,
+ complete: spy,
+ });
+
+ // It's hard to test for something _not_ happening. The best we can
+ // do is wait a bit to make sure nothing happens.
+ await waitForPromises();
+ expect(spy).not.toHaveBeenCalled();
+ });
+ });
+ });
+ });
+
+ describe('non-suppression cases', () => {
+ describe('when not navigating away', () => {
+ beforeEach(() => {
+ isNavigatingAway.mockReturnValue(false);
+ });
+
+ it('forwards successful requests', (done) => {
+ createSubscription(makeMockSuccessLink(), {
+ next({ data }) {
+ expect(data).toEqual({ foo: { id: 1 } });
+ },
+ error: () => done.fail('Should not happen'),
+ complete: () => done(),
+ });
+ });
+
+ it('forwards GraphQL errors', (done) => {
+ createSubscription(makeMockGraphQLErrorLink(), {
+ next({ errors }) {
+ expect(errors).toEqual([{ message: 'foo' }]);
+ },
+ error: () => done.fail('Should not happen'),
+ complete: () => done(),
+ });
+ });
+
+ it('forwards network errors', (done) => {
+ createSubscription(makeMockNetworkErrorLink(), {
+ next: () => done.fail('Should not happen'),
+ error: (error) => {
+ expect(error.message).toBe('NetworkError');
+ done();
+ },
+ complete: () => done.fail('Should not happen'),
+ });
+ });
+ });
+
+ describe('when navigating away', () => {
+ beforeEach(() => {
+ isNavigatingAway.mockReturnValue(true);
+ });
+
+ it('forwards successful requests', (done) => {
+ createSubscription(makeMockSuccessLink(), {
+ next({ data }) {
+ expect(data).toEqual({ foo: { id: 1 } });
+ },
+ error: () => done.fail('Should not happen'),
+ complete: () => done(),
+ });
+ });
+
+ it('forwards GraphQL errors', (done) => {
+ createSubscription(makeMockGraphQLErrorLink(), {
+ next({ errors }) {
+ expect(errors).toEqual([{ message: 'foo' }]);
+ },
+ error: () => done.fail('Should not happen'),
+ complete: () => done(),
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/logger/__snapshots__/hello_spec.js.snap b/spec/frontend/lib/logger/__snapshots__/hello_spec.js.snap
index 791ec05befd..0b156049dab 100644
--- a/spec/frontend/lib/logger/__snapshots__/hello_spec.js.snap
+++ b/spec/frontend/lib/logger/__snapshots__/hello_spec.js.snap
@@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`~/lib/logger/hello logHello console logs a friendly hello message 1`] = `
+exports[`~/lib/logger/hello logHello when on dot_com console logs a friendly hello message including the careers page 1`] = `
Array [
Array [
"%cWelcome to GitLab!%c
@@ -8,7 +8,24 @@ Array [
Does this page need fixes or improvements? Open an issue or contribute a merge request to help make GitLab more lovable. At GitLab, everyone can contribute!
🤝 Contribute to GitLab: https://about.gitlab.com/community/contribute/
-🔎 Create a new GitLab issue: https://gitlab.com/gitlab-org/gitlab/-/issues/new",
+🔎 Create a new GitLab issue: https://gitlab.com/gitlab-org/gitlab/-/issues/new
+🚀 We like your curiosity! Help us improve GitLab by joining the team: https://about.gitlab.com/jobs/",
+ "padding-top: 0.5em; font-size: 2em;",
+ "padding-bottom: 0.5em;",
+ ],
+]
+`;
+
+exports[`~/lib/logger/hello logHello when on self managed console logs a friendly hello message without including the careers page 1`] = `
+Array [
+ Array [
+ "%cWelcome to GitLab!%c
+
+Does this page need fixes or improvements? Open an issue or contribute a merge request to help make GitLab more lovable. At GitLab, everyone can contribute!
+
+🤝 Contribute to GitLab: https://about.gitlab.com/community/contribute/
+🔎 Create a new GitLab issue: https://gitlab.com/gitlab-org/gitlab/-/issues/new
+",
"padding-top: 0.5em; font-size: 2em;",
"padding-bottom: 0.5em;",
],
diff --git a/spec/frontend/lib/logger/hello_spec.js b/spec/frontend/lib/logger/hello_spec.js
index 39abe0e0dd0..39c1b55313b 100644
--- a/spec/frontend/lib/logger/hello_spec.js
+++ b/spec/frontend/lib/logger/hello_spec.js
@@ -9,12 +9,32 @@ describe('~/lib/logger/hello', () => {
});
describe('logHello', () => {
- it('console logs a friendly hello message', () => {
- expect(consoleLogSpy).not.toHaveBeenCalled();
+ describe('when on dot_com', () => {
+ beforeEach(() => {
+ gon.dot_com = true;
+ });
- logHello();
+ it('console logs a friendly hello message including the careers page', () => {
+ expect(consoleLogSpy).not.toHaveBeenCalled();
- expect(consoleLogSpy.mock.calls).toMatchSnapshot();
+ logHello();
+
+ expect(consoleLogSpy.mock.calls).toMatchSnapshot();
+ });
+ });
+
+ describe('when on self managed', () => {
+ beforeEach(() => {
+ gon.dot_com = false;
+ });
+
+ it('console logs a friendly hello message without including the careers page', () => {
+ expect(consoleLogSpy).not.toHaveBeenCalled();
+
+ logHello();
+
+ expect(consoleLogSpy.mock.calls).toMatchSnapshot();
+ });
});
});
});
diff --git a/spec/frontend/lib/utils/color_utils_spec.js b/spec/frontend/lib/utils/color_utils_spec.js
index c6b88b2957c..87966cf9fba 100644
--- a/spec/frontend/lib/utils/color_utils_spec.js
+++ b/spec/frontend/lib/utils/color_utils_spec.js
@@ -1,4 +1,5 @@
import {
+ isValidColorExpression,
textColorForBackground,
hexToRgb,
validateHexColor,
@@ -72,4 +73,21 @@ describe('Color utils', () => {
},
);
});
+
+ describe('isValidColorExpression', () => {
+ it.each`
+ colorExpression | valid | desc
+ ${'#F00'} | ${true} | ${'valid'}
+ ${'rgba(0,0,0,0)'} | ${true} | ${'valid'}
+ ${'hsl(540,70%,50%)'} | ${true} | ${'valid'}
+ ${'red'} | ${true} | ${'valid'}
+ ${'F00'} | ${false} | ${'invalid'}
+ ${'F00'} | ${false} | ${'invalid'}
+ ${'gba(0,0,0,0)'} | ${false} | ${'invalid'}
+ ${'hls(540,70%,50%)'} | ${false} | ${'invalid'}
+ ${'hello'} | ${false} | ${'invalid'}
+ `('color expression $colorExpression is $desc', ({ colorExpression, valid }) => {
+ expect(isValidColorExpression(colorExpression)).toBe(valid);
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
index 942ba56196e..1adc70450e8 100644
--- a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
@@ -118,3 +118,18 @@ describe('date_format_utility.js', () => {
});
});
});
+
+describe('formatTimeAsSummary', () => {
+ it.each`
+ unit | value | result
+ ${'months'} | ${1.5} | ${'1.5M'}
+ ${'weeks'} | ${1.25} | ${'1.5w'}
+ ${'days'} | ${2} | ${'2d'}
+ ${'hours'} | ${10} | ${'10h'}
+ ${'minutes'} | ${20} | ${'20m'}
+ ${'seconds'} | ${10} | ${'<1m'}
+ ${'seconds'} | ${0} | ${'-'}
+ `('will format $value $unit to $result', ({ unit, value, result }) => {
+ expect(utils.formatTimeAsSummary({ [unit]: value })).toBe(result);
+ });
+});
diff --git a/spec/frontend/lib/utils/is_navigating_away_spec.js b/spec/frontend/lib/utils/is_navigating_away_spec.js
new file mode 100644
index 00000000000..e1230fe96bf
--- /dev/null
+++ b/spec/frontend/lib/utils/is_navigating_away_spec.js
@@ -0,0 +1,23 @@
+import { isNavigatingAway, setNavigatingForTestsOnly } from '~/lib/utils/is_navigating_away';
+
+describe('isNavigatingAway', () => {
+ beforeEach(() => {
+ // Make sure each test starts with the same state
+ setNavigatingForTestsOnly(false);
+ });
+
+ it.each([false, true])('it returns the navigation flag with value %s', (flag) => {
+ setNavigatingForTestsOnly(flag);
+ expect(isNavigatingAway()).toEqual(flag);
+ });
+
+ describe('when the browser starts navigating away', () => {
+ it('returns true', () => {
+ expect(isNavigatingAway()).toEqual(false);
+
+ window.dispatchEvent(new Event('beforeunload'));
+
+ expect(isNavigatingAway()).toEqual(true);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 1f3659b5c76..9570d2a831c 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -363,4 +363,25 @@ describe('text_utility', () => {
expect(textUtils.insertFinalNewline(input, '\r\n')).toBe(output);
});
});
+
+ describe('escapeShellString', () => {
+ it.each`
+ character | input | output
+ ${'"'} | ${'";echo "you_shouldnt_run_this'} | ${'\'";echo "you_shouldnt_run_this\''}
+ ${'$'} | ${'$IFS'} | ${"'$IFS'"}
+ ${'\\'} | ${'evil-branch-name\\'} | ${"'evil-branch-name\\'"}
+ ${'!'} | ${'!event'} | ${"'!event'"}
+ `(
+ 'should not escape the $character character but wrap in single-quotes',
+ ({ input, output }) => {
+ expect(textUtils.escapeShellString(input)).toBe(output);
+ },
+ );
+
+ it("should escape the ' character and wrap in single-quotes", () => {
+ expect(textUtils.escapeShellString("fix-'bug-behavior'")).toBe(
+ "'fix-'\\''bug-behavior'\\'''",
+ );
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 6f186ba3227..18b68d91e01 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -1004,4 +1004,39 @@ describe('URL utility', () => {
expect(urlUtils.isSameOriginUrl(url)).toBe(expected);
});
});
+
+ describe('constructWebIDEPath', () => {
+ let originalGl;
+ const projectIDEPath = '/foo/bar';
+ const sourceProj = 'my_-fancy-proj/boo';
+ const targetProj = 'boo/another-fancy-proj';
+ const mrIid = '7';
+
+ beforeEach(() => {
+ originalGl = window.gl;
+ window.gl = { webIDEPath: projectIDEPath };
+ });
+
+ afterEach(() => {
+ window.gl = originalGl;
+ });
+
+ it.each`
+ sourceProjectFullPath | targetProjectFullPath | iid | expectedPath
+ ${undefined} | ${undefined} | ${undefined} | ${projectIDEPath}
+ ${undefined} | ${undefined} | ${mrIid} | ${projectIDEPath}
+ ${undefined} | ${targetProj} | ${undefined} | ${projectIDEPath}
+ ${undefined} | ${targetProj} | ${mrIid} | ${projectIDEPath}
+ ${sourceProj} | ${undefined} | ${undefined} | ${projectIDEPath}
+ ${sourceProj} | ${targetProj} | ${undefined} | ${projectIDEPath}
+ ${sourceProj} | ${undefined} | ${mrIid} | ${`/-/ide/project/${sourceProj}/merge_requests/${mrIid}?target_project=`}
+ ${sourceProj} | ${sourceProj} | ${mrIid} | ${`/-/ide/project/${sourceProj}/merge_requests/${mrIid}?target_project=`}
+ ${sourceProj} | ${targetProj} | ${mrIid} | ${`/-/ide/project/${sourceProj}/merge_requests/${mrIid}?target_project=${encodeURIComponent(targetProj)}`}
+ `(
+ 'returns $expectedPath for "$sourceProjectFullPath + $targetProjectFullPath + $iid"',
+ ({ expectedPath, ...args } = {}) => {
+ expect(urlUtils.constructWebIDEPath(args)).toBe(expectedPath);
+ },
+ );
+ });
});
diff --git a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
index d8453d453e7..7eb0ea37fe6 100644
--- a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
@@ -45,7 +45,7 @@ describe('RemoveMemberButton', () => {
title: 'Remove member',
isAccessRequest: true,
isInvite: true,
- oncallSchedules: { name: 'user', schedules: [] },
+ userDeletionObstacles: { name: 'user', obstacles: [] },
...propsData,
},
directives: {
diff --git a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
index 0aa3780f030..10e451376c8 100644
--- a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import LeaveButton from '~/members/components/action_buttons/leave_button.vue';
import RemoveMemberButton from '~/members/components/action_buttons/remove_member_button.vue';
import UserActionButtons from '~/members/components/action_buttons/user_action_buttons.vue';
+import { parseUserDeletionObstacles } from '~/vue_shared/components/user_deletion_obstacles/utils';
import { member, orphanedMember } from '../../mock_data';
describe('UserActionButtons', () => {
@@ -45,9 +46,9 @@ describe('UserActionButtons', () => {
isAccessRequest: false,
isInvite: false,
icon: 'remove',
- oncallSchedules: {
+ userDeletionObstacles: {
name: member.user.name,
- schedules: member.user.oncallSchedules,
+ obstacles: parseUserDeletionObstacles(member.user),
},
});
});
diff --git a/spec/frontend/members/components/modals/leave_modal_spec.js b/spec/frontend/members/components/modals/leave_modal_spec.js
index 1dc913e5c78..f755f08dbf2 100644
--- a/spec/frontend/members/components/modals/leave_modal_spec.js
+++ b/spec/frontend/members/components/modals/leave_modal_spec.js
@@ -6,7 +6,8 @@ import { nextTick } from 'vue';
import Vuex from 'vuex';
import LeaveModal from '~/members/components/modals/leave_modal.vue';
import { LEAVE_MODAL_ID, MEMBER_TYPES } from '~/members/constants';
-import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
+import UserDeletionObstaclesList from '~/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list.vue';
+import { parseUserDeletionObstacles } from '~/vue_shared/components/user_deletion_obstacles/utils';
import { member } from '../../mock_data';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
@@ -51,7 +52,7 @@ describe('LeaveModal', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findForm = () => findModal().findComponent(GlForm);
- const findOncallSchedulesList = () => findModal().findComponent(OncallSchedulesList);
+ const findUserDeletionObstaclesList = () => findModal().findComponent(UserDeletionObstaclesList);
const getByText = (text, options) =>
createWrapper(within(findModal().element).getByText(text, options));
@@ -89,25 +90,27 @@ describe('LeaveModal', () => {
);
});
- describe('On-call schedules list', () => {
- it("displays oncall schedules list when member's user is part of on-call schedules ", () => {
- const schedulesList = findOncallSchedulesList();
- expect(schedulesList.exists()).toBe(true);
- expect(schedulesList.props()).toMatchObject({
+ describe('User deletion obstacles list', () => {
+ it("displays obstacles list when member's user is part of on-call management", () => {
+ const obstaclesList = findUserDeletionObstaclesList();
+ expect(obstaclesList.exists()).toBe(true);
+ expect(obstaclesList.props()).toMatchObject({
isCurrentUser: true,
- schedules: member.user.oncallSchedules,
+ obstacles: parseUserDeletionObstacles(member.user),
});
});
- it("does NOT display oncall schedules list when member's user is NOT a part of on-call schedules ", async () => {
+ it("does NOT display obstacles list when member's user is NOT a part of on-call management", async () => {
wrapper.destroy();
- const memberWithoutOncallSchedules = cloneDeep(member);
- delete memberWithoutOncallSchedules.user.oncallSchedules;
- createComponent({ member: memberWithoutOncallSchedules });
+ const memberWithoutOncall = cloneDeep(member);
+ delete memberWithoutOncall.user.oncallSchedules;
+ delete memberWithoutOncall.user.escalationPolicies;
+
+ createComponent({ member: memberWithoutOncall });
await nextTick();
- expect(findOncallSchedulesList().exists()).toBe(false);
+ expect(findUserDeletionObstaclesList().exists()).toBe(false);
});
});
diff --git a/spec/frontend/members/components/modals/remove_member_modal_spec.js b/spec/frontend/members/components/modals/remove_member_modal_spec.js
index 1dc41582c12..1d39c4b3175 100644
--- a/spec/frontend/members/components/modals/remove_member_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_member_modal_spec.js
@@ -4,15 +4,19 @@ import Vue from 'vue';
import Vuex from 'vuex';
import RemoveMemberModal from '~/members/components/modals/remove_member_modal.vue';
import { MEMBER_TYPES } from '~/members/constants';
-import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
+import { OBSTACLE_TYPES } from '~/vue_shared/components/user_deletion_obstacles/constants';
+import UserDeletionObstaclesList from '~/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list.vue';
Vue.use(Vuex);
describe('RemoveMemberModal', () => {
const memberPath = '/gitlab-org/gitlab-test/-/project_members/90';
- const mockSchedules = {
+ const mockObstacles = {
name: 'User1',
- schedules: [{ id: 1, name: 'Schedule 1' }],
+ obstacles: [
+ { name: 'Schedule 1', type: OBSTACLE_TYPES.oncallSchedules },
+ { name: 'Policy 1', type: OBSTACLE_TYPES.escalationPolicies },
+ ],
};
let wrapper;
@@ -44,18 +48,18 @@ describe('RemoveMemberModal', () => {
const findForm = () => wrapper.find({ ref: 'form' });
const findGlModal = () => wrapper.findComponent(GlModal);
- const findOnCallSchedulesList = () => wrapper.findComponent(OncallSchedulesList);
+ const findUserDeletionObstaclesList = () => wrapper.findComponent(UserDeletionObstaclesList);
afterEach(() => {
wrapper.destroy();
});
describe.each`
- state | memberType | isAccessRequest | isInvite | actionText | removeSubMembershipsCheckboxExpected | unassignIssuablesCheckboxExpected | message | onCallSchedules
- ${'removing a group member'} | ${'GroupMember'} | ${false} | ${false} | ${'Remove member'} | ${true} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${{}}
- ${'removing a project member'} | ${'ProjectMember'} | ${false} | ${false} | ${'Remove member'} | ${false} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
- ${'denying an access request'} | ${'ProjectMember'} | ${true} | ${false} | ${'Deny access request'} | ${false} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"} | ${{}}
- ${'revoking invite'} | ${'ProjectMember'} | ${false} | ${true} | ${'Revoke invite'} | ${false} | ${false} | ${'Are you sure you want to revoke the invitation for foo@bar.com to join the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
+ state | memberType | isAccessRequest | isInvite | actionText | removeSubMembershipsCheckboxExpected | unassignIssuablesCheckboxExpected | message | userDeletionObstacles | isPartOfOncall
+ ${'removing a group member'} | ${'GroupMember'} | ${false} | ${false} | ${'Remove member'} | ${true} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${{}} | ${false}
+ ${'removing a project member'} | ${'ProjectMember'} | ${false} | ${false} | ${'Remove member'} | ${false} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${mockObstacles} | ${true}
+ ${'denying an access request'} | ${'ProjectMember'} | ${true} | ${false} | ${'Deny access request'} | ${false} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"} | ${{}} | ${false}
+ ${'revoking invite'} | ${'ProjectMember'} | ${false} | ${true} | ${'Revoke invite'} | ${false} | ${false} | ${'Are you sure you want to revoke the invitation for foo@bar.com to join the Gitlab Org / Gitlab Test project?'} | ${mockObstacles} | ${false}
`(
'when $state',
({
@@ -66,7 +70,8 @@ describe('RemoveMemberModal', () => {
message,
removeSubMembershipsCheckboxExpected,
unassignIssuablesCheckboxExpected,
- onCallSchedules,
+ userDeletionObstacles,
+ isPartOfOncall,
}) => {
beforeEach(() => {
createComponent({
@@ -75,12 +80,10 @@ describe('RemoveMemberModal', () => {
message,
memberPath,
memberType,
- onCallSchedules,
+ userDeletionObstacles,
});
});
- const isPartOfOncallSchedules = Boolean(isAccessRequest && onCallSchedules.schedules?.length);
-
it(`has the title ${actionText}`, () => {
expect(findGlModal().attributes('title')).toBe(actionText);
});
@@ -109,8 +112,8 @@ describe('RemoveMemberModal', () => {
);
});
- it(`shows ${isPartOfOncallSchedules ? 'all' : 'no'} related on-call schedules`, () => {
- expect(findOnCallSchedulesList().exists()).toBe(isPartOfOncallSchedules);
+ it(`shows ${isPartOfOncall ? 'all' : 'no'} related on-call schedules or policies`, () => {
+ expect(findUserDeletionObstaclesList().exists()).toBe(isPartOfOncall);
});
it('submits the form when the modal is submitted', () => {
diff --git a/spec/frontend/members/components/table/expires_at_spec.js b/spec/frontend/members/components/table/expires_at_spec.js
deleted file mode 100644
index 2b8e6ab8f2a..00000000000
--- a/spec/frontend/members/components/table/expires_at_spec.js
+++ /dev/null
@@ -1,86 +0,0 @@
-import { within } from '@testing-library/dom';
-import { mount, createWrapper } from '@vue/test-utils';
-import { useFakeDate } from 'helpers/fake_date';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import ExpiresAt from '~/members/components/table/expires_at.vue';
-
-describe('ExpiresAt', () => {
- // March 15th, 2020
- useFakeDate(2020, 2, 15);
-
- let wrapper;
-
- const createComponent = (propsData) => {
- wrapper = mount(ExpiresAt, {
- propsData,
- directives: {
- GlTooltip: createMockDirective(),
- },
- });
- };
-
- const getByText = (text, options) =>
- createWrapper(within(wrapper.element).getByText(text, options));
-
- const getTooltipDirective = (elementWrapper) => getBinding(elementWrapper.element, 'gl-tooltip');
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('when no expiration date is set', () => {
- it('displays "No expiration set"', () => {
- createComponent({ date: null });
-
- expect(getByText('No expiration set').exists()).toBe(true);
- });
- });
-
- describe('when expiration date is in the past', () => {
- let expiredText;
-
- beforeEach(() => {
- createComponent({ date: '2019-03-15T00:00:00.000' });
-
- expiredText = getByText('Expired');
- });
-
- it('displays "Expired"', () => {
- expect(expiredText.exists()).toBe(true);
- expect(expiredText.classes()).toContain('gl-text-red-500');
- });
-
- it('displays tooltip with formatted date', () => {
- const tooltipDirective = getTooltipDirective(expiredText);
-
- expect(tooltipDirective).not.toBeUndefined();
- expect(expiredText.attributes('title')).toBe('Mar 15, 2019 12:00am UTC');
- });
- });
-
- describe('when expiration date is in the future', () => {
- it.each`
- date | expected | warningColor
- ${'2020-03-23T00:00:00.000'} | ${'in 8 days'} | ${false}
- ${'2020-03-20T00:00:00.000'} | ${'in 5 days'} | ${true}
- ${'2020-03-16T00:00:00.000'} | ${'in 1 day'} | ${true}
- ${'2020-03-15T05:00:00.000'} | ${'in about 5 hours'} | ${true}
- ${'2020-03-15T01:00:00.000'} | ${'in about 1 hour'} | ${true}
- ${'2020-03-15T00:30:00.000'} | ${'in 30 minutes'} | ${true}
- ${'2020-03-15T00:01:15.000'} | ${'in 1 minute'} | ${true}
- ${'2020-03-15T00:00:15.000'} | ${'in less than a minute'} | ${true}
- `('displays "$expected"', ({ date, expected, warningColor }) => {
- createComponent({ date });
-
- const expiredText = getByText(expected);
-
- expect(expiredText.exists()).toBe(true);
-
- if (warningColor) {
- expect(expiredText.classes()).toContain('gl-text-orange-500');
- } else {
- expect(expiredText.classes()).not.toContain('gl-text-orange-500');
- }
- });
- });
-});
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 6885da53b26..580e5edd652 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -1,22 +1,24 @@
import { GlBadge, GlPagination, GlTable } from '@gitlab/ui';
-import {
- getByText as getByTextHelper,
- getByTestId as getByTestIdHelper,
- within,
-} from '@testing-library/dom';
-import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
+import { createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import CreatedAt from '~/members/components/table/created_at.vue';
import ExpirationDatepicker from '~/members/components/table/expiration_datepicker.vue';
-import ExpiresAt from '~/members/components/table/expires_at.vue';
import MemberActionButtons from '~/members/components/table/member_action_buttons.vue';
import MemberAvatar from '~/members/components/table/member_avatar.vue';
import MemberSource from '~/members/components/table/member_source.vue';
import MembersTable from '~/members/components/table/members_table.vue';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
-import { MEMBER_TYPES, TAB_QUERY_PARAM_VALUES } from '~/members/constants';
+import {
+ MEMBER_TYPES,
+ MEMBER_STATE_CREATED,
+ MEMBER_STATE_AWAITING,
+ MEMBER_STATE_ACTIVE,
+ USER_STATE_BLOCKED_PENDING_APPROVAL,
+ BADGE_LABELS_PENDING_OWNER_APPROVAL,
+ TAB_QUERY_PARAM_VALUES,
+} from '~/members/constants';
import * as initUserPopovers from '~/user_popovers';
import {
member as memberMock,
@@ -53,7 +55,7 @@ describe('MembersTable', () => {
};
const createComponent = (state, provide = {}) => {
- wrapper = mount(MembersTable, {
+ wrapper = mountExtended(MembersTable, {
localVue,
propsData: {
tabQueryParamValue: TAB_QUERY_PARAM_VALUES.invite,
@@ -68,7 +70,6 @@ describe('MembersTable', () => {
stubs: [
'member-avatar',
'member-source',
- 'expires-at',
'created-at',
'member-action-buttons',
'role-dropdown',
@@ -81,17 +82,11 @@ describe('MembersTable', () => {
const url = 'https://localhost/foo-bar/-/project_members?tab=invited';
- const getByText = (text, options) =>
- createWrapper(getByTextHelper(wrapper.element, text, options));
-
- const getByTestId = (id, options) =>
- createWrapper(getByTestIdHelper(wrapper.element, id, options));
-
const findTable = () => wrapper.find(GlTable);
const findTableCellByMemberId = (tableCellLabel, memberId) =>
- getByTestId(`members-table-row-${memberId}`).find(
- `[data-label="${tableCellLabel}"][role="cell"]`,
- );
+ wrapper
+ .findByTestId(`members-table-row-${memberId}`)
+ .find(`[data-label="${tableCellLabel}"][role="cell"]`);
const findPagination = () => extendedWrapper(wrapper.find(GlPagination));
@@ -103,7 +98,6 @@ describe('MembersTable', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('fields', () => {
@@ -119,7 +113,6 @@ describe('MembersTable', () => {
${'granted'} | ${'Access granted'} | ${memberMock} | ${CreatedAt}
${'invited'} | ${'Invited'} | ${invite} | ${CreatedAt}
${'requested'} | ${'Requested'} | ${accessRequest} | ${CreatedAt}
- ${'expires'} | ${'Access expires'} | ${memberMock} | ${ExpiresAt}
${'maxRole'} | ${'Max role'} | ${memberCanUpdate} | ${RoleDropdown}
${'expiration'} | ${'Expiration'} | ${memberMock} | ${ExpirationDatepicker}
`('renders the $label field', ({ field, label, member, expectedComponent }) => {
@@ -128,7 +121,7 @@ describe('MembersTable', () => {
tableFields: [field],
});
- expect(getByText(label, { selector: '[role="columnheader"]' }).exists()).toBe(true);
+ expect(wrapper.findByText(label, { selector: '[role="columnheader"]' }).exists()).toBe(true);
if (expectedComponent) {
expect(
@@ -137,11 +130,50 @@ describe('MembersTable', () => {
}
});
+ describe('Invited column', () => {
+ describe.each`
+ state | userState | expectedBadgeLabel
+ ${MEMBER_STATE_CREATED} | ${null} | ${''}
+ ${MEMBER_STATE_CREATED} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_APPROVAL}
+ ${MEMBER_STATE_AWAITING} | ${''} | ${''}
+ ${MEMBER_STATE_AWAITING} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_APPROVAL}
+ ${MEMBER_STATE_AWAITING} | ${'something_else'} | ${BADGE_LABELS_PENDING_OWNER_APPROVAL}
+ ${MEMBER_STATE_ACTIVE} | ${null} | ${''}
+ ${MEMBER_STATE_ACTIVE} | ${'something_else'} | ${''}
+ `('Invited Badge', ({ state, userState, expectedBadgeLabel }) => {
+ it(`${
+ expectedBadgeLabel ? 'shows' : 'hides'
+ } invited badge if user status: '${userState}' and member state: '${state}'`, () => {
+ createComponent({
+ members: [
+ {
+ ...invite,
+ state,
+ invite: {
+ ...invite.invite,
+ userState,
+ },
+ },
+ ],
+ tableFields: ['invited'],
+ });
+
+ const invitedTab = wrapper.findByTestId('invited-badge');
+
+ if (expectedBadgeLabel) {
+ expect(invitedTab.text()).toBe(expectedBadgeLabel);
+ } else {
+ expect(invitedTab.exists()).toBe(false);
+ }
+ });
+ });
+ });
+
describe('"Actions" field', () => {
it('renders "Actions" field for screen readers', () => {
createComponent({ members: [memberCanUpdate], tableFields: ['actions'] });
- const actionField = getByTestId('col-actions');
+ const actionField = wrapper.findByTestId('col-actions');
expect(actionField.exists()).toBe(true);
expect(actionField.classes('gl-sr-only')).toBe(true);
@@ -154,7 +186,7 @@ describe('MembersTable', () => {
it('does not render the "Actions" field', () => {
createComponent({ tableFields: ['actions'] }, { currentUserId: null });
- expect(within(wrapper.element).queryByTestId('col-actions')).toBe(null);
+ expect(wrapper.findByTestId('col-actions').exists()).toBe(false);
});
});
@@ -177,7 +209,7 @@ describe('MembersTable', () => {
it('renders the "Actions" field', () => {
createComponent({ members, tableFields: ['actions'] });
- expect(getByTestId('col-actions').exists()).toBe(true);
+ expect(wrapper.findByTestId('col-actions').exists()).toBe(true);
expect(findTableCellByMemberId('Actions', members[0].id).classes()).toStrictEqual([
'col-actions',
@@ -199,7 +231,7 @@ describe('MembersTable', () => {
it('does not render the "Actions" field', () => {
createComponent({ members, tableFields: ['actions'] });
- expect(within(wrapper.element).queryByTestId('col-actions')).toBe(null);
+ expect(wrapper.findByTestId('col-actions').exists()).toBe(false);
});
});
});
@@ -209,7 +241,7 @@ describe('MembersTable', () => {
it('displays a "No members found" message', () => {
createComponent();
- expect(getByText('No members found').exists()).toBe(true);
+ expect(wrapper.findByText('No members found').exists()).toBe(true);
});
});
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index eb9f905fea2..f42ee295511 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -1,4 +1,4 @@
-import { MEMBER_TYPES } from '~/members/constants';
+import { MEMBER_TYPES, MEMBER_STATE_CREATED } from '~/members/constants';
export const member = {
requestedAt: null,
@@ -14,6 +14,7 @@ export const member = {
webUrl: 'https://gitlab.com/groups/foo-bar',
},
type: 'GroupMember',
+ state: MEMBER_STATE_CREATED,
user: {
id: 123,
name: 'Administrator',
@@ -23,6 +24,7 @@ export const member = {
blocked: false,
twoFactorEnabled: false,
oncallSchedules: [{ name: 'schedule 1' }],
+ escalationPolicies: [{ name: 'policy 1' }],
},
id: 238,
createdAt: '2020-07-17T16:22:46.923Z',
@@ -63,12 +65,13 @@ export const modalData = {
memberPath: '/groups/foo-bar/-/group_members/1',
memberType: 'GroupMember',
message: 'Are you sure you want to remove John Smith?',
- oncallSchedules: { name: 'user', schedules: [] },
+ userDeletionObstacles: { name: 'user', obstacles: [] },
};
const { user, ...memberNoUser } = member;
export const invite = {
...memberNoUser,
+ state: MEMBER_STATE_CREATED,
invite: {
email: 'jewel@hudsonwalter.biz',
avatarUrl: 'https://www.gravatar.com/avatar/cbab7510da7eec2f60f638261b05436d?s=80&d=identicon',
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index 05538dbaeee..47b6c463377 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -37,6 +37,7 @@ exports[`Dashboard template matches the default snapshot 1`] = `
category="primary"
class="flex-grow-1"
clearalltext="Clear all"
+ clearalltextclass="gl-px-5"
data-qa-selector="environments_dropdown"
headertext=""
hideheaderborder="true"
@@ -44,7 +45,6 @@ exports[`Dashboard template matches the default snapshot 1`] = `
highlighteditemstitleclass="gl-px-5"
id="monitor-environments-dropdown"
menu-class="monitor-environment-dropdown-menu"
- showhighlighteditemstitle="true"
size="medium"
text="production"
toggleclass="dropdown-menu-toggle"
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
index d20a111c701..6a19815883a 100644
--- a/spec/frontend/monitoring/fixture_data.js
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -1,3 +1,4 @@
+import fixture from 'test_fixtures/metrics_dashboard/environment_metrics_dashboard.json';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { metricStates } from '~/monitoring/constants';
import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
@@ -5,10 +6,7 @@ import { stateAndPropsFromDataset } from '~/monitoring/utils';
import { metricsResult } from './mock_data';
-// Use globally available `getJSONFixture` so this file can be imported by both karma and jest specs
-export const metricsDashboardResponse = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
+export const metricsDashboardResponse = fixture;
export const metricsDashboardPayload = metricsDashboardResponse.dashboard;
diff --git a/spec/frontend/namespace_select_spec.js b/spec/frontend/namespace_select_spec.js
deleted file mode 100644
index a38508dd601..00000000000
--- a/spec/frontend/namespace_select_spec.js
+++ /dev/null
@@ -1,65 +0,0 @@
-import initDeprecatedJQueryDropdown from '~/deprecated_jquery_dropdown';
-import NamespaceSelect from '~/namespace_select';
-
-jest.mock('~/deprecated_jquery_dropdown');
-
-describe('NamespaceSelect', () => {
- it('initializes deprecatedJQueryDropdown', () => {
- const dropdown = document.createElement('div');
-
- // eslint-disable-next-line no-new
- new NamespaceSelect({ dropdown });
-
- expect(initDeprecatedJQueryDropdown).toHaveBeenCalled();
- });
-
- describe('as input', () => {
- let deprecatedJQueryDropdownOptions;
-
- beforeEach(() => {
- const dropdown = document.createElement('div');
- // eslint-disable-next-line no-new
- new NamespaceSelect({ dropdown });
- [[, deprecatedJQueryDropdownOptions]] = initDeprecatedJQueryDropdown.mock.calls;
- });
-
- it('prevents click events', () => {
- const dummyEvent = new Event('dummy');
- jest.spyOn(dummyEvent, 'preventDefault').mockImplementation(() => {});
-
- // expect(foo).toContain('test');
- deprecatedJQueryDropdownOptions.clicked({ e: dummyEvent });
-
- expect(dummyEvent.preventDefault).toHaveBeenCalled();
- });
- });
-
- describe('as filter', () => {
- let deprecatedJQueryDropdownOptions;
-
- beforeEach(() => {
- const dropdown = document.createElement('div');
- dropdown.dataset.isFilter = 'true';
- // eslint-disable-next-line no-new
- new NamespaceSelect({ dropdown });
- [[, deprecatedJQueryDropdownOptions]] = initDeprecatedJQueryDropdown.mock.calls;
- });
-
- it('does not prevent click events', () => {
- const dummyEvent = new Event('dummy');
- jest.spyOn(dummyEvent, 'preventDefault').mockImplementation(() => {});
-
- deprecatedJQueryDropdownOptions.clicked({ e: dummyEvent });
-
- expect(dummyEvent.preventDefault).not.toHaveBeenCalled();
- });
-
- it('sets URL of dropdown items', () => {
- const dummyNamespace = { id: 'eal' };
-
- const itemUrl = deprecatedJQueryDropdownOptions.url(dummyNamespace);
-
- expect(itemUrl).toContain(`namespace_id=${dummyNamespace.id}`);
- });
- });
-});
diff --git a/spec/frontend/notebook/cells/code_spec.js b/spec/frontend/notebook/cells/code_spec.js
index e14767f2594..669bdc2f89a 100644
--- a/spec/frontend/notebook/cells/code_spec.js
+++ b/spec/frontend/notebook/cells/code_spec.js
@@ -1,14 +1,17 @@
import Vue from 'vue';
+import fixture from 'test_fixtures/blob/notebook/basic.json';
import CodeComponent from '~/notebook/cells/code.vue';
const Component = Vue.extend(CodeComponent);
describe('Code component', () => {
let vm;
+
let json;
beforeEach(() => {
- json = getJSONFixture('blob/notebook/basic.json');
+ // Clone fixture as it could be modified by tests
+ json = JSON.parse(JSON.stringify(fixture));
});
const setupComponent = (cell) => {
diff --git a/spec/frontend/notebook/cells/markdown_spec.js b/spec/frontend/notebook/cells/markdown_spec.js
index 707efa21528..36b1e91f15f 100644
--- a/spec/frontend/notebook/cells/markdown_spec.js
+++ b/spec/frontend/notebook/cells/markdown_spec.js
@@ -1,6 +1,9 @@
import { mount } from '@vue/test-utils';
import katex from 'katex';
import Vue from 'vue';
+import markdownTableJson from 'test_fixtures/blob/notebook/markdown-table.json';
+import basicJson from 'test_fixtures/blob/notebook/basic.json';
+import mathJson from 'test_fixtures/blob/notebook/math.json';
import MarkdownComponent from '~/notebook/cells/markdown.vue';
const Component = Vue.extend(MarkdownComponent);
@@ -35,7 +38,7 @@ describe('Markdown component', () => {
let json;
beforeEach(() => {
- json = getJSONFixture('blob/notebook/basic.json');
+ json = basicJson;
// eslint-disable-next-line prefer-destructuring
cell = json.cells[1];
@@ -104,7 +107,7 @@ describe('Markdown component', () => {
describe('tables', () => {
beforeEach(() => {
- json = getJSONFixture('blob/notebook/markdown-table.json');
+ json = markdownTableJson;
});
it('renders images and text', () => {
@@ -135,7 +138,7 @@ describe('Markdown component', () => {
describe('katex', () => {
beforeEach(() => {
- json = getJSONFixture('blob/notebook/math.json');
+ json = mathJson;
});
it('renders multi-line katex', async () => {
diff --git a/spec/frontend/notebook/cells/output/index_spec.js b/spec/frontend/notebook/cells/output/index_spec.js
index 2985abf0f4f..7ece73d375c 100644
--- a/spec/frontend/notebook/cells/output/index_spec.js
+++ b/spec/frontend/notebook/cells/output/index_spec.js
@@ -1,11 +1,11 @@
import Vue from 'vue';
+import json from 'test_fixtures/blob/notebook/basic.json';
import CodeComponent from '~/notebook/cells/output/index.vue';
const Component = Vue.extend(CodeComponent);
describe('Output component', () => {
let vm;
- let json;
const createComponent = (output) => {
vm = new Component({
@@ -17,11 +17,6 @@ describe('Output component', () => {
vm.$mount();
};
- beforeEach(() => {
- // This is the output after rendering a jupyter notebook
- json = getJSONFixture('blob/notebook/basic.json');
- });
-
describe('text output', () => {
beforeEach((done) => {
const textType = json.cells[2];
diff --git a/spec/frontend/notebook/index_spec.js b/spec/frontend/notebook/index_spec.js
index 4d0dacaf37e..cd531d628b3 100644
--- a/spec/frontend/notebook/index_spec.js
+++ b/spec/frontend/notebook/index_spec.js
@@ -1,18 +1,13 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+import json from 'test_fixtures/blob/notebook/basic.json';
+import jsonWithWorksheet from 'test_fixtures/blob/notebook/worksheets.json';
import Notebook from '~/notebook/index.vue';
const Component = Vue.extend(Notebook);
describe('Notebook component', () => {
let vm;
- let json;
- let jsonWithWorksheet;
-
- beforeEach(() => {
- json = getJSONFixture('blob/notebook/basic.json');
- jsonWithWorksheet = getJSONFixture('blob/notebook/worksheets.json');
- });
function buildComponent(notebook) {
return mount(Component, {
diff --git a/spec/frontend/notes/components/comment_type_dropdown_spec.js b/spec/frontend/notes/components/comment_type_dropdown_spec.js
index 5e1cb813369..8ac6144e5c8 100644
--- a/spec/frontend/notes/components/comment_type_dropdown_spec.js
+++ b/spec/frontend/notes/components/comment_type_dropdown_spec.js
@@ -47,8 +47,18 @@ describe('CommentTypeDropdown component', () => {
it('Should emit `change` event when clicking on an alternate dropdown option', () => {
mountComponent({ props: { noteType: constants.DISCUSSION } });
- findCommentDropdownOption().vm.$emit('click');
- findDiscussionDropdownOption().vm.$emit('click');
+ const event = {
+ type: 'click',
+ stopPropagation: jest.fn(),
+ preventDefault: jest.fn(),
+ };
+
+ findCommentDropdownOption().vm.$emit('click', event);
+ findDiscussionDropdownOption().vm.$emit('click', event);
+
+ // ensure the native events don't trigger anything
+ expect(event.stopPropagation).toHaveBeenCalledTimes(2);
+ expect(event.preventDefault).toHaveBeenCalledTimes(2);
expect(wrapper.emitted('change')[0]).toEqual([constants.COMMENT]);
expect(wrapper.emitted('change').length).toEqual(1);
diff --git a/spec/frontend/notes/components/diff_with_note_spec.js b/spec/frontend/notes/components/diff_with_note_spec.js
index e997fc4da50..c352265654b 100644
--- a/spec/frontend/notes/components/diff_with_note_spec.js
+++ b/spec/frontend/notes/components/diff_with_note_spec.js
@@ -1,10 +1,9 @@
import { shallowMount } from '@vue/test-utils';
+import discussionFixture from 'test_fixtures/merge_requests/diff_discussion.json';
+import imageDiscussionFixture from 'test_fixtures/merge_requests/image_diff_discussion.json';
import { createStore } from '~/mr_notes/stores';
import DiffWithNote from '~/notes/components/diff_with_note.vue';
-const discussionFixture = 'merge_requests/diff_discussion.json';
-const imageDiscussionFixture = 'merge_requests/image_diff_discussion.json';
-
describe('diff_with_note', () => {
let store;
let wrapper;
@@ -35,7 +34,7 @@ describe('diff_with_note', () => {
describe('text diff', () => {
beforeEach(() => {
- const diffDiscussion = getJSONFixture(discussionFixture)[0];
+ const diffDiscussion = discussionFixture[0];
wrapper = shallowMount(DiffWithNote, {
propsData: {
@@ -75,7 +74,7 @@ describe('diff_with_note', () => {
describe('image diff', () => {
beforeEach(() => {
- const imageDiscussion = getJSONFixture(imageDiscussionFixture)[0];
+ const imageDiscussion = imageDiscussionFixture[0];
wrapper = shallowMount(DiffWithNote, {
propsData: { discussion: imageDiscussion, diffFile: {} },
store,
diff --git a/spec/frontend/notes/components/note_form_spec.js b/spec/frontend/notes/components/note_form_spec.js
index 92137d3190f..abc888cd245 100644
--- a/spec/frontend/notes/components/note_form_spec.js
+++ b/spec/frontend/notes/components/note_form_spec.js
@@ -150,6 +150,16 @@ describe('issue_note_form component', () => {
expect(handleFormUpdate.length).toBe(1);
});
+
+ it('should disable textarea when ctrl+enter is pressed', async () => {
+ textarea.trigger('keydown.enter', { ctrlKey: true });
+
+ expect(textarea.attributes('disabled')).toBeUndefined();
+
+ await nextTick();
+
+ expect(textarea.attributes('disabled')).toBe('disabled');
+ });
});
});
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index a364a524e7b..727ef02dcbb 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import discussionWithTwoUnresolvedNotes from 'test_fixtures/merge_requests/resolved_diff_discussion.json';
import { trimText } from 'helpers/text_helper';
import mockDiffFile from 'jest/diffs/mock_data/diff_file';
import DiscussionNotes from '~/notes/components/discussion_notes.vue';
@@ -17,8 +18,6 @@ import {
userDataMock,
} from '../mock_data';
-const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
-
describe('noteable_discussion component', () => {
let store;
let wrapper;
@@ -119,7 +118,7 @@ describe('noteable_discussion component', () => {
describe('for resolved thread', () => {
beforeEach(() => {
- const discussion = getJSONFixture(discussionWithTwoUnresolvedNotes)[0];
+ const discussion = discussionWithTwoUnresolvedNotes[0];
wrapper.setProps({ discussion });
});
@@ -133,7 +132,7 @@ describe('noteable_discussion component', () => {
describe('for unresolved thread', () => {
beforeEach(() => {
const discussion = {
- ...getJSONFixture(discussionWithTwoUnresolvedNotes)[0],
+ ...discussionWithTwoUnresolvedNotes[0],
expanded: true,
};
discussion.resolved = false;
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index 467a8bec21b..038aff3be04 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -189,6 +189,27 @@ describe('issue_note', () => {
createWrapper();
});
+ describe('avatar sizes in diffs', () => {
+ const line = {
+ line_code: 'abc_1_1',
+ type: null,
+ old_line: '1',
+ new_line: '1',
+ };
+
+ it('should render 24px avatars', async () => {
+ wrapper.setProps({
+ note: { ...note },
+ discussionRoot: true,
+ line,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.findComponent(UserAvatarLink).props('imgSize')).toBe(24);
+ });
+ });
+
it('should render user information', () => {
const { author } = note;
const avatar = wrapper.findComponent(UserAvatarLink);
diff --git a/spec/frontend/notes/stores/getters_spec.js b/spec/frontend/notes/stores/getters_spec.js
index 3adb5da020e..9a11fdba508 100644
--- a/spec/frontend/notes/stores/getters_spec.js
+++ b/spec/frontend/notes/stores/getters_spec.js
@@ -1,3 +1,4 @@
+import discussionWithTwoUnresolvedNotes from 'test_fixtures/merge_requests/resolved_diff_discussion.json';
import { DESC, ASC } from '~/notes/constants';
import * as getters from '~/notes/stores/getters';
import {
@@ -17,8 +18,6 @@ import {
draftDiffDiscussion,
} from '../mock_data';
-const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
-
// Helper function to ensure that we're using the same schema across tests.
const createDiscussionNeighborParams = (discussionId, diffOrder, step) => ({
discussionId,
@@ -123,7 +122,7 @@ describe('Getters Notes Store', () => {
describe('resolvedDiscussionsById', () => {
it('ignores unresolved system notes', () => {
- const [discussion] = getJSONFixture(discussionWithTwoUnresolvedNotes);
+ const [discussion] = discussionWithTwoUnresolvedNotes;
discussion.notes[0].resolved = true;
discussion.notes[1].resolved = false;
state.discussions.push(discussion);
diff --git a/spec/frontend/oauth_remember_me_spec.js b/spec/frontend/oauth_remember_me_spec.js
index 70bda1d9f9e..3187cbf6547 100644
--- a/spec/frontend/oauth_remember_me_spec.js
+++ b/spec/frontend/oauth_remember_me_spec.js
@@ -3,7 +3,7 @@ import OAuthRememberMe from '~/pages/sessions/new/oauth_remember_me';
describe('OAuthRememberMe', () => {
const findFormAction = (selector) => {
- return $(`#oauth-container .oauth-login${selector}`).parent('form').attr('action');
+ return $(`#oauth-container .js-oauth-login${selector}`).parent('form').attr('action');
};
beforeEach(() => {
diff --git a/spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap
deleted file mode 100644
index a3423e3f4d7..00000000000
--- a/spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap
+++ /dev/null
@@ -1,36 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`ConanInstallation renders all the messages 1`] = `
-<div>
- <installation-title-stub
- options="[object Object]"
- packagetype="conan"
- />
-
- <code-instruction-stub
- copytext="Copy Conan Command"
- instruction="foo/command"
- label="Conan Command"
- trackingaction="copy_conan_command"
- trackinglabel="code_instruction"
- />
-
- <h3
- class="gl-font-lg"
- >
- Registry setup
- </h3>
-
- <code-instruction-stub
- copytext="Copy Conan Setup Command"
- instruction="foo/setup"
- label="Add Conan Remote"
- trackingaction="copy_conan_setup_command"
- trackinglabel="code_instruction"
- />
-
- <gl-sprintf-stub
- message="For more information on the Conan registry, %{linkStart}see the documentation%{linkEnd}."
- />
-</div>
-`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap
deleted file mode 100644
index 39469bf4fd0..00000000000
--- a/spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap
+++ /dev/null
@@ -1,34 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`DependencyRow renders full dependency 1`] = `
-<div
- class="gl-responsive-table-row"
->
- <div
- class="table-section section-50"
- >
- <strong
- class="gl-text-body"
- >
- Test.Dependency
- </strong>
-
- <span
- data-testid="target-framework"
- >
- (.NETStandard2.0)
- </span>
- </div>
-
- <div
- class="table-section section-50 gl-display-flex gl-md-justify-content-end"
- data-testid="version-pattern"
- >
- <span
- class="gl-text-body"
- >
- 2.3.7
- </span>
- </div>
-</div>
-`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap
deleted file mode 100644
index 8a2793c0010..00000000000
--- a/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap
+++ /dev/null
@@ -1,112 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`MavenInstallation groovy renders all the messages 1`] = `
-<div>
- <installation-title-stub
- options="[object Object],[object Object],[object Object]"
- packagetype="maven"
- />
-
- <code-instruction-stub
- class="gl-mb-5"
- copytext="Copy Gradle Groovy DSL install command"
- instruction="foo/gradle/groovy/install"
- label="Gradle Groovy DSL install command"
- trackingaction="copy_gradle_install_command"
- trackinglabel="code_instruction"
- />
-
- <code-instruction-stub
- copytext="Copy add Gradle Groovy DSL repository command"
- instruction="foo/gradle/groovy/add/source"
- label="Add Gradle Groovy DSL repository command"
- multiline="true"
- trackingaction="copy_gradle_add_to_source_command"
- trackinglabel="code_instruction"
- />
-</div>
-`;
-
-exports[`MavenInstallation kotlin renders all the messages 1`] = `
-<div>
- <installation-title-stub
- options="[object Object],[object Object],[object Object]"
- packagetype="maven"
- />
-
- <code-instruction-stub
- class="gl-mb-5"
- copytext="Copy Gradle Kotlin DSL install command"
- instruction="foo/gradle/kotlin/install"
- label="Gradle Kotlin DSL install command"
- trackingaction="copy_kotlin_install_command"
- trackinglabel="code_instruction"
- />
-
- <code-instruction-stub
- copytext="Copy add Gradle Kotlin DSL repository command"
- instruction="foo/gradle/kotlin/add/source"
- label="Add Gradle Kotlin DSL repository command"
- multiline="true"
- trackingaction="copy_kotlin_add_to_source_command"
- trackinglabel="code_instruction"
- />
-</div>
-`;
-
-exports[`MavenInstallation maven renders all the messages 1`] = `
-<div>
- <installation-title-stub
- options="[object Object],[object Object],[object Object]"
- packagetype="maven"
- />
-
- <p>
- <gl-sprintf-stub
- message="Copy and paste this inside your %{codeStart}pom.xml%{codeEnd} %{codeStart}dependencies%{codeEnd} block."
- />
- </p>
-
- <code-instruction-stub
- copytext="Copy Maven XML"
- instruction="foo/xml"
- label=""
- multiline="true"
- trackingaction="copy_maven_xml"
- trackinglabel="code_instruction"
- />
-
- <code-instruction-stub
- copytext="Copy Maven command"
- instruction="foo/command"
- label="Maven Command"
- trackingaction="copy_maven_command"
- trackinglabel="code_instruction"
- />
-
- <h3
- class="gl-font-lg"
- >
- Registry setup
- </h3>
-
- <p>
- <gl-sprintf-stub
- message="If you haven't already done so, you will need to add the below to your %{codeStart}pom.xml%{codeEnd} file."
- />
- </p>
-
- <code-instruction-stub
- copytext="Copy Maven registry XML"
- instruction="foo/setup"
- label=""
- multiline="true"
- trackingaction="copy_maven_setup_xml"
- trackinglabel="code_instruction"
- />
-
- <gl-sprintf-stub
- message="For more information on the Maven registry, %{linkStart}see the documentation%{linkEnd}."
- />
-</div>
-`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap
deleted file mode 100644
index 015c7b94dde..00000000000
--- a/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap
+++ /dev/null
@@ -1,36 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`NpmInstallation renders all the messages 1`] = `
-<div>
- <installation-title-stub
- options="[object Object],[object Object]"
- packagetype="npm"
- />
-
- <code-instruction-stub
- copytext="Copy npm command"
- instruction="npm i @Test/package"
- label=""
- trackingaction="copy_npm_install_command"
- trackinglabel="code_instruction"
- />
-
- <h3
- class="gl-font-lg"
- >
- Registry setup
- </h3>
-
- <code-instruction-stub
- copytext="Copy npm setup command"
- instruction="echo @Test:registry=undefined/ >> .npmrc"
- label=""
- trackingaction="copy_npm_setup_command"
- trackinglabel="code_instruction"
- />
-
- <gl-sprintf-stub
- message="You may also need to setup authentication using an auth token. %{linkStart}See the documentation%{linkEnd} to find out more."
- />
-</div>
-`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap
deleted file mode 100644
index 04532743952..00000000000
--- a/spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap
+++ /dev/null
@@ -1,36 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`NugetInstallation renders all the messages 1`] = `
-<div>
- <installation-title-stub
- options="[object Object]"
- packagetype="nuget"
- />
-
- <code-instruction-stub
- copytext="Copy NuGet Command"
- instruction="foo/command"
- label="NuGet Command"
- trackingaction="copy_nuget_install_command"
- trackinglabel="code_instruction"
- />
-
- <h3
- class="gl-font-lg"
- >
- Registry setup
- </h3>
-
- <code-instruction-stub
- copytext="Copy NuGet Setup Command"
- instruction="foo/setup"
- label="Add NuGet Source"
- trackingaction="copy_nuget_setup_command"
- trackinglabel="code_instruction"
- />
-
- <gl-sprintf-stub
- message="For more information on the NuGet registry, %{linkStart}see the documentation%{linkEnd}."
- />
-</div>
-`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
deleted file mode 100644
index 318cea98b92..00000000000
--- a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
+++ /dev/null
@@ -1,168 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`PackageTitle renders with tags 1`] = `
-<div
- class="gl-display-flex gl-flex-direction-column"
- data-qa-selector="package_title"
->
- <div
- class="gl-display-flex gl-justify-content-space-between gl-py-3"
- >
- <div
- class="gl-flex-direction-column gl-flex-grow-1"
- >
- <div
- class="gl-display-flex"
- >
- <!---->
-
- <div
- class="gl-display-flex gl-flex-direction-column"
- >
- <h1
- class="gl-font-size-h1 gl-mt-3 gl-mb-2"
- data-testid="title"
- >
- Test package
- </h1>
-
- <div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
- >
- <gl-icon-stub
- class="gl-mr-3"
- name="eye"
- size="16"
- />
-
- <gl-sprintf-stub
- message="v%{version} published %{timeAgo}"
- />
- </div>
- </div>
- </div>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
- >
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-type"
- icon="package"
- link=""
- size="s"
- text="maven"
- texttooltip=""
- />
- </div>
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-size"
- icon="disk"
- link=""
- size="s"
- text="300 bytes"
- texttooltip=""
- />
- </div>
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <package-tags-stub
- hidelabel="true"
- tagdisplaylimit="2"
- tags="[object Object],[object Object],[object Object],[object Object]"
- />
- </div>
- </div>
- </div>
-
- <!---->
- </div>
-
- <p />
-</div>
-`;
-
-exports[`PackageTitle renders without tags 1`] = `
-<div
- class="gl-display-flex gl-flex-direction-column"
- data-qa-selector="package_title"
->
- <div
- class="gl-display-flex gl-justify-content-space-between gl-py-3"
- >
- <div
- class="gl-flex-direction-column gl-flex-grow-1"
- >
- <div
- class="gl-display-flex"
- >
- <!---->
-
- <div
- class="gl-display-flex gl-flex-direction-column"
- >
- <h1
- class="gl-font-size-h1 gl-mt-3 gl-mb-2"
- data-testid="title"
- >
- Test package
- </h1>
-
- <div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
- >
- <gl-icon-stub
- class="gl-mr-3"
- name="eye"
- size="16"
- />
-
- <gl-sprintf-stub
- message="v%{version} published %{timeAgo}"
- />
- </div>
- </div>
- </div>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
- >
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-type"
- icon="package"
- link=""
- size="s"
- text="maven"
- texttooltip=""
- />
- </div>
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-size"
- icon="disk"
- link=""
- size="s"
- text="300 bytes"
- texttooltip=""
- />
- </div>
- </div>
- </div>
-
- <!---->
- </div>
-
- <p />
-</div>
-`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap
deleted file mode 100644
index d5bb825d8d1..00000000000
--- a/spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap
+++ /dev/null
@@ -1,45 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`PypiInstallation renders all the messages 1`] = `
-<div>
- <installation-title-stub
- options="[object Object]"
- packagetype="pypi"
- />
-
- <code-instruction-stub
- copytext="Copy Pip command"
- data-testid="pip-command"
- instruction="pip install"
- label="Pip Command"
- trackingaction="copy_pip_install_command"
- trackinglabel="code_instruction"
- />
-
- <h3
- class="gl-font-lg"
- >
- Registry setup
- </h3>
-
- <p>
- <gl-sprintf-stub
- message="If you haven't already done so, you will need to add the below to your %{codeStart}.pypirc%{codeEnd} file."
- />
- </p>
-
- <code-instruction-stub
- copytext="Copy .pypirc content"
- data-testid="pypi-setup-content"
- instruction="python setup"
- label=""
- multiline="true"
- trackingaction="copy_pypi_setup_command"
- trackinglabel="code_instruction"
- />
-
- <gl-sprintf-stub
- message="For more information on the PyPi registry, %{linkStart}see the documentation%{linkEnd}."
- />
-</div>
-`;
diff --git a/spec/frontend/packages/details/components/additional_metadata_spec.js b/spec/frontend/packages/details/components/additional_metadata_spec.js
deleted file mode 100644
index b339aa84348..00000000000
--- a/spec/frontend/packages/details/components/additional_metadata_spec.js
+++ /dev/null
@@ -1,119 +0,0 @@
-import { GlLink, GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import component from '~/packages/details/components/additional_metadata.vue';
-import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
-
-import { mavenPackage, conanPackage, nugetPackage, npmPackage } from '../../mock_data';
-
-describe('Package Additional Metadata', () => {
- let wrapper;
- const defaultProps = {
- packageEntity: { ...mavenPackage },
- };
-
- const mountComponent = (props) => {
- wrapper = shallowMount(component, {
- propsData: { ...defaultProps, ...props },
- stubs: {
- DetailsRow,
- GlSprintf,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findTitle = () => wrapper.find('[data-testid="title"]');
- const findMainArea = () => wrapper.find('[data-testid="main"]');
- const findNugetSource = () => wrapper.find('[data-testid="nuget-source"]');
- const findNugetLicense = () => wrapper.find('[data-testid="nuget-license"]');
- const findConanRecipe = () => wrapper.find('[data-testid="conan-recipe"]');
- const findMavenApp = () => wrapper.find('[data-testid="maven-app"]');
- const findMavenGroup = () => wrapper.find('[data-testid="maven-group"]');
- const findElementLink = (container) => container.find(GlLink);
-
- it('has the correct title', () => {
- mountComponent();
-
- const title = findTitle();
-
- expect(title.exists()).toBe(true);
- expect(title.text()).toBe('Additional Metadata');
- });
-
- describe.each`
- packageEntity | visible | metadata
- ${mavenPackage} | ${true} | ${'maven_metadatum'}
- ${conanPackage} | ${true} | ${'conan_metadatum'}
- ${nugetPackage} | ${true} | ${'nuget_metadatum'}
- ${npmPackage} | ${false} | ${null}
- `('Component visibility', ({ packageEntity, visible, metadata }) => {
- it(`Is ${visible} that the component markup is visible when the package is ${packageEntity.package_type}`, () => {
- mountComponent({ packageEntity });
-
- expect(findTitle().exists()).toBe(visible);
- expect(findMainArea().exists()).toBe(visible);
- });
-
- it(`The component is hidden if ${metadata} is missing`, () => {
- mountComponent({ packageEntity: { ...packageEntity, [metadata]: null } });
-
- expect(findTitle().exists()).toBe(false);
- expect(findMainArea().exists()).toBe(false);
- });
- });
-
- describe('nuget metadata', () => {
- beforeEach(() => {
- mountComponent({ packageEntity: nugetPackage });
- });
-
- it.each`
- name | finderFunction | text | link | icon
- ${'source'} | ${findNugetSource} | ${'Source project located at project-foo-url'} | ${'project_url'} | ${'project'}
- ${'license'} | ${findNugetLicense} | ${'License information located at license-foo-url'} | ${'license_url'} | ${'license'}
- `('$name element', ({ finderFunction, text, link, icon }) => {
- const element = finderFunction();
- expect(element.exists()).toBe(true);
- expect(element.text()).toBe(text);
- expect(element.props('icon')).toBe(icon);
- expect(findElementLink(element).attributes('href')).toBe(nugetPackage.nuget_metadatum[link]);
- });
- });
-
- describe('conan metadata', () => {
- beforeEach(() => {
- mountComponent({ packageEntity: conanPackage });
- });
-
- it.each`
- name | finderFunction | text | icon
- ${'recipe'} | ${findConanRecipe} | ${'Recipe: conan-package/1.0.0@conan+conan-package/stable'} | ${'information-o'}
- `('$name element', ({ finderFunction, text, icon }) => {
- const element = finderFunction();
- expect(element.exists()).toBe(true);
- expect(element.text()).toBe(text);
- expect(element.props('icon')).toBe(icon);
- });
- });
-
- describe('maven metadata', () => {
- beforeEach(() => {
- mountComponent();
- });
-
- it.each`
- name | finderFunction | text | icon
- ${'app'} | ${findMavenApp} | ${'App name: test-app'} | ${'information-o'}
- ${'group'} | ${findMavenGroup} | ${'App group: com.test.app'} | ${'information-o'}
- `('$name element', ({ finderFunction, text, icon }) => {
- const element = finderFunction();
- expect(element.exists()).toBe(true);
- expect(element.text()).toBe(text);
- expect(element.props('icon')).toBe(icon);
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/composer_installation_spec.js b/spec/frontend/packages/details/components/composer_installation_spec.js
deleted file mode 100644
index 18d11c7dd57..00000000000
--- a/spec/frontend/packages/details/components/composer_installation_spec.js
+++ /dev/null
@@ -1,133 +0,0 @@
-import { GlSprintf, GlLink } from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import { registryUrl as composerHelpPath } from 'jest/packages/details/mock_data';
-import { composerPackage as packageEntity } from 'jest/packages/mock_data';
-import ComposerInstallation from '~/packages/details/components/composer_installation.vue';
-import InstallationTitle from '~/packages/details/components/installation_title.vue';
-
-import { TrackingActions } from '~/packages/details/constants';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('ComposerInstallation', () => {
- let wrapper;
- let store;
-
- const composerRegistryIncludeStr = 'foo/registry';
- const composerPackageIncludeStr = 'foo/package';
-
- const createStore = (groupExists = true) => {
- store = new Vuex.Store({
- state: { packageEntity, composerHelpPath },
- getters: {
- composerRegistryInclude: () => composerRegistryIncludeStr,
- composerPackageInclude: () => composerPackageIncludeStr,
- groupExists: () => groupExists,
- },
- });
- };
-
- const findRootNode = () => wrapper.find('[data-testid="root-node"]');
- const findRegistryInclude = () => wrapper.find('[data-testid="registry-include"]');
- const findPackageInclude = () => wrapper.find('[data-testid="package-include"]');
- const findHelpText = () => wrapper.find('[data-testid="help-text"]');
- const findHelpLink = () => wrapper.find(GlLink);
- const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
-
- function createComponent() {
- wrapper = shallowMount(ComposerInstallation, {
- localVue,
- store,
- stubs: {
- GlSprintf,
- },
- });
- }
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('install command switch', () => {
- it('has the installation title component', () => {
- createStore();
- createComponent();
-
- expect(findInstallationTitle().exists()).toBe(true);
- expect(findInstallationTitle().props()).toMatchObject({
- packageType: 'composer',
- options: [{ value: 'composer', label: 'Show Composer commands' }],
- });
- });
- });
-
- describe('registry include command', () => {
- beforeEach(() => {
- createStore();
- createComponent();
- });
-
- it('uses code_instructions', () => {
- const registryIncludeCommand = findRegistryInclude();
- expect(registryIncludeCommand.exists()).toBe(true);
- expect(registryIncludeCommand.props()).toMatchObject({
- instruction: composerRegistryIncludeStr,
- copyText: 'Copy registry include',
- trackingAction: TrackingActions.COPY_COMPOSER_REGISTRY_INCLUDE_COMMAND,
- });
- });
-
- it('has the correct title', () => {
- expect(findRegistryInclude().props('label')).toBe('Add composer registry');
- });
- });
-
- describe('package include command', () => {
- beforeEach(() => {
- createStore();
- createComponent();
- });
-
- it('uses code_instructions', () => {
- const registryIncludeCommand = findPackageInclude();
- expect(registryIncludeCommand.exists()).toBe(true);
- expect(registryIncludeCommand.props()).toMatchObject({
- instruction: composerPackageIncludeStr,
- copyText: 'Copy require package include',
- trackingAction: TrackingActions.COPY_COMPOSER_PACKAGE_INCLUDE_COMMAND,
- });
- });
-
- it('has the correct title', () => {
- expect(findPackageInclude().props('label')).toBe('Install package version');
- });
-
- it('has the correct help text', () => {
- expect(findHelpText().text()).toBe(
- 'For more information on Composer packages in GitLab, see the documentation.',
- );
- expect(findHelpLink().attributes()).toMatchObject({
- href: composerHelpPath,
- target: '_blank',
- });
- });
- });
-
- describe('root node', () => {
- it('is normally rendered', () => {
- createStore();
- createComponent();
-
- expect(findRootNode().exists()).toBe(true);
- });
-
- it('is not rendered when the group does not exist', () => {
- createStore(false);
- createComponent();
-
- expect(findRootNode().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/conan_installation_spec.js b/spec/frontend/packages/details/components/conan_installation_spec.js
deleted file mode 100644
index 78a7d265a21..00000000000
--- a/spec/frontend/packages/details/components/conan_installation_spec.js
+++ /dev/null
@@ -1,72 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import ConanInstallation from '~/packages/details/components/conan_installation.vue';
-import InstallationTitle from '~/packages/details/components/installation_title.vue';
-import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
-import { conanPackage as packageEntity } from '../../mock_data';
-import { registryUrl as conanPath } from '../mock_data';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('ConanInstallation', () => {
- let wrapper;
-
- const conanInstallationCommandStr = 'foo/command';
- const conanSetupCommandStr = 'foo/setup';
-
- const store = new Vuex.Store({
- state: {
- packageEntity,
- conanPath,
- },
- getters: {
- conanInstallationCommand: () => conanInstallationCommandStr,
- conanSetupCommand: () => conanSetupCommandStr,
- },
- });
-
- const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
- const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
-
- function createComponent() {
- wrapper = shallowMount(ConanInstallation, {
- localVue,
- store,
- });
- }
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders all the messages', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('install command switch', () => {
- it('has the installation title component', () => {
- expect(findInstallationTitle().exists()).toBe(true);
- expect(findInstallationTitle().props()).toMatchObject({
- packageType: 'conan',
- options: [{ value: 'conan', label: 'Show Conan commands' }],
- });
- });
- });
-
- describe('installation commands', () => {
- it('renders the correct command', () => {
- expect(findCodeInstructions().at(0).props('instruction')).toBe(conanInstallationCommandStr);
- });
- });
-
- describe('setup commands', () => {
- it('renders the correct command', () => {
- expect(findCodeInstructions().at(1).props('instruction')).toBe(conanSetupCommandStr);
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/dependency_row_spec.js b/spec/frontend/packages/details/components/dependency_row_spec.js
deleted file mode 100644
index 7d3ee92908d..00000000000
--- a/spec/frontend/packages/details/components/dependency_row_spec.js
+++ /dev/null
@@ -1,62 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import DependencyRow from '~/packages/details/components/dependency_row.vue';
-import { dependencyLinks } from '../../mock_data';
-
-describe('DependencyRow', () => {
- let wrapper;
-
- const { withoutFramework, withoutVersion, fullLink } = dependencyLinks;
-
- function createComponent({ dependencyLink = fullLink } = {}) {
- wrapper = shallowMount(DependencyRow, {
- propsData: {
- dependency: dependencyLink,
- },
- });
- }
-
- const dependencyVersion = () => wrapper.find('[data-testid="version-pattern"]');
- const dependencyFramework = () => wrapper.find('[data-testid="target-framework"]');
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('renders', () => {
- it('full dependency', () => {
- createComponent();
-
- expect(wrapper.element).toMatchSnapshot();
- });
- });
-
- describe('version', () => {
- it('does not render any version information when not supplied', () => {
- createComponent({ dependencyLink: withoutVersion });
-
- expect(dependencyVersion().exists()).toBe(false);
- });
-
- it('does render version info when it exists', () => {
- createComponent();
-
- expect(dependencyVersion().exists()).toBe(true);
- expect(dependencyVersion().text()).toBe(fullLink.version_pattern);
- });
- });
-
- describe('target framework', () => {
- it('does not render any framework information when not supplied', () => {
- createComponent({ dependencyLink: withoutFramework });
-
- expect(dependencyFramework().exists()).toBe(false);
- });
-
- it('does render framework info when it exists', () => {
- createComponent();
-
- expect(dependencyFramework().exists()).toBe(true);
- expect(dependencyFramework().text()).toBe(`(${fullLink.target_framework})`);
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/installation_title_spec.js b/spec/frontend/packages/details/components/installation_title_spec.js
deleted file mode 100644
index 14e990d3011..00000000000
--- a/spec/frontend/packages/details/components/installation_title_spec.js
+++ /dev/null
@@ -1,58 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import InstallationTitle from '~/packages/details/components/installation_title.vue';
-import PersistedDropdownSelection from '~/vue_shared/components/registry/persisted_dropdown_selection.vue';
-
-describe('InstallationTitle', () => {
- let wrapper;
-
- const defaultProps = { packageType: 'foo', options: [{ value: 'foo', label: 'bar' }] };
-
- const findPersistedDropdownSelection = () => wrapper.findComponent(PersistedDropdownSelection);
- const findTitle = () => wrapper.find('h3');
-
- function createComponent({ props = {} } = {}) {
- wrapper = shallowMount(InstallationTitle, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
- }
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('has a title', () => {
- createComponent();
-
- expect(findTitle().exists()).toBe(true);
- expect(findTitle().text()).toBe('Installation');
- });
-
- describe('persisted dropdown selection', () => {
- it('exists', () => {
- createComponent();
-
- expect(findPersistedDropdownSelection().exists()).toBe(true);
- });
-
- it('has the correct props', () => {
- createComponent();
-
- expect(findPersistedDropdownSelection().props()).toMatchObject({
- storageKey: 'package_foo_installation_instructions',
- options: defaultProps.options,
- });
- });
-
- it('on change event emits a change event', () => {
- createComponent();
-
- findPersistedDropdownSelection().vm.$emit('change', 'baz');
-
- expect(wrapper.emitted('change')).toEqual([['baz']]);
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/installations_commands_spec.js b/spec/frontend/packages/details/components/installations_commands_spec.js
deleted file mode 100644
index 164f9f69741..00000000000
--- a/spec/frontend/packages/details/components/installations_commands_spec.js
+++ /dev/null
@@ -1,61 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import ComposerInstallation from '~/packages/details/components/composer_installation.vue';
-import ConanInstallation from '~/packages/details/components/conan_installation.vue';
-import InstallationCommands from '~/packages/details/components/installation_commands.vue';
-
-import MavenInstallation from '~/packages/details/components/maven_installation.vue';
-import NpmInstallation from '~/packages/details/components/npm_installation.vue';
-import NugetInstallation from '~/packages/details/components/nuget_installation.vue';
-import PypiInstallation from '~/packages/details/components/pypi_installation.vue';
-import TerraformInstallation from '~/packages_and_registries/infrastructure_registry/components/terraform_installation.vue';
-
-import {
- conanPackage,
- mavenPackage,
- npmPackage,
- nugetPackage,
- pypiPackage,
- composerPackage,
- terraformModule,
-} from '../../mock_data';
-
-describe('InstallationCommands', () => {
- let wrapper;
-
- function createComponent(propsData) {
- wrapper = shallowMount(InstallationCommands, {
- propsData,
- });
- }
-
- const npmInstallation = () => wrapper.find(NpmInstallation);
- const mavenInstallation = () => wrapper.find(MavenInstallation);
- const conanInstallation = () => wrapper.find(ConanInstallation);
- const nugetInstallation = () => wrapper.find(NugetInstallation);
- const pypiInstallation = () => wrapper.find(PypiInstallation);
- const composerInstallation = () => wrapper.find(ComposerInstallation);
- const terraformInstallation = () => wrapper.findComponent(TerraformInstallation);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('installation instructions', () => {
- describe.each`
- packageEntity | selector
- ${conanPackage} | ${conanInstallation}
- ${mavenPackage} | ${mavenInstallation}
- ${npmPackage} | ${npmInstallation}
- ${nugetPackage} | ${nugetInstallation}
- ${pypiPackage} | ${pypiInstallation}
- ${composerPackage} | ${composerInstallation}
- ${terraformModule} | ${terraformInstallation}
- `('renders', ({ packageEntity, selector }) => {
- it(`${packageEntity.package_type} instructions exist`, () => {
- createComponent({ packageEntity });
-
- expect(selector()).toExist();
- });
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/maven_installation_spec.js b/spec/frontend/packages/details/components/maven_installation_spec.js
deleted file mode 100644
index 4972fe70a3d..00000000000
--- a/spec/frontend/packages/details/components/maven_installation_spec.js
+++ /dev/null
@@ -1,184 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import Vuex from 'vuex';
-import { registryUrl as mavenPath } from 'jest/packages/details/mock_data';
-import { mavenPackage as packageEntity } from 'jest/packages/mock_data';
-import InstallationTitle from '~/packages/details/components/installation_title.vue';
-import MavenInstallation from '~/packages/details/components/maven_installation.vue';
-import { TrackingActions } from '~/packages/details/constants';
-import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('MavenInstallation', () => {
- let wrapper;
-
- const xmlCodeBlock = 'foo/xml';
- const mavenCommandStr = 'foo/command';
- const mavenSetupXml = 'foo/setup';
- const gradleGroovyInstallCommandText = 'foo/gradle/groovy/install';
- const gradleGroovyAddSourceCommandText = 'foo/gradle/groovy/add/source';
- const gradleKotlinInstallCommandText = 'foo/gradle/kotlin/install';
- const gradleKotlinAddSourceCommandText = 'foo/gradle/kotlin/add/source';
-
- const store = new Vuex.Store({
- state: {
- packageEntity,
- mavenPath,
- },
- getters: {
- mavenInstallationXml: () => xmlCodeBlock,
- mavenInstallationCommand: () => mavenCommandStr,
- mavenSetupXml: () => mavenSetupXml,
- gradleGroovyInstalCommand: () => gradleGroovyInstallCommandText,
- gradleGroovyAddSourceCommand: () => gradleGroovyAddSourceCommandText,
- gradleKotlinInstalCommand: () => gradleKotlinInstallCommandText,
- gradleKotlinAddSourceCommand: () => gradleKotlinAddSourceCommandText,
- },
- });
-
- const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
- const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
-
- function createComponent({ data = {} } = {}) {
- wrapper = shallowMount(MavenInstallation, {
- localVue,
- store,
- data() {
- return data;
- },
- });
- }
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('install command switch', () => {
- it('has the installation title component', () => {
- createComponent();
-
- expect(findInstallationTitle().exists()).toBe(true);
- expect(findInstallationTitle().props()).toMatchObject({
- packageType: 'maven',
- options: [
- { value: 'maven', label: 'Maven XML' },
- { value: 'groovy', label: 'Gradle Groovy DSL' },
- { value: 'kotlin', label: 'Gradle Kotlin DSL' },
- ],
- });
- });
-
- it('on change event updates the instructions to show', async () => {
- createComponent();
-
- expect(findCodeInstructions().at(0).props('instruction')).toBe(xmlCodeBlock);
- findInstallationTitle().vm.$emit('change', 'groovy');
-
- await nextTick();
-
- expect(findCodeInstructions().at(0).props('instruction')).toBe(
- gradleGroovyInstallCommandText,
- );
- });
- });
-
- describe('maven', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders all the messages', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('installation commands', () => {
- it('renders the correct xml block', () => {
- expect(findCodeInstructions().at(0).props()).toMatchObject({
- instruction: xmlCodeBlock,
- multiline: true,
- trackingAction: TrackingActions.COPY_MAVEN_XML,
- });
- });
-
- it('renders the correct maven command', () => {
- expect(findCodeInstructions().at(1).props()).toMatchObject({
- instruction: mavenCommandStr,
- multiline: false,
- trackingAction: TrackingActions.COPY_MAVEN_COMMAND,
- });
- });
- });
-
- describe('setup commands', () => {
- it('renders the correct xml block', () => {
- expect(findCodeInstructions().at(2).props()).toMatchObject({
- instruction: mavenSetupXml,
- multiline: true,
- trackingAction: TrackingActions.COPY_MAVEN_SETUP,
- });
- });
- });
- });
-
- describe('groovy', () => {
- beforeEach(() => {
- createComponent({ data: { instructionType: 'groovy' } });
- });
-
- it('renders all the messages', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('installation commands', () => {
- it('renders the gradle install command', () => {
- expect(findCodeInstructions().at(0).props()).toMatchObject({
- instruction: gradleGroovyInstallCommandText,
- multiline: false,
- trackingAction: TrackingActions.COPY_GRADLE_INSTALL_COMMAND,
- });
- });
- });
-
- describe('setup commands', () => {
- it('renders the correct gradle command', () => {
- expect(findCodeInstructions().at(1).props()).toMatchObject({
- instruction: gradleGroovyAddSourceCommandText,
- multiline: true,
- trackingAction: TrackingActions.COPY_GRADLE_ADD_TO_SOURCE_COMMAND,
- });
- });
- });
- });
-
- describe('kotlin', () => {
- beforeEach(() => {
- createComponent({ data: { instructionType: 'kotlin' } });
- });
-
- it('renders all the messages', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('installation commands', () => {
- it('renders the gradle install command', () => {
- expect(findCodeInstructions().at(0).props()).toMatchObject({
- instruction: gradleKotlinInstallCommandText,
- multiline: false,
- trackingAction: TrackingActions.COPY_KOTLIN_INSTALL_COMMAND,
- });
- });
- });
-
- describe('setup commands', () => {
- it('renders the correct gradle command', () => {
- expect(findCodeInstructions().at(1).props()).toMatchObject({
- instruction: gradleKotlinAddSourceCommandText,
- multiline: true,
- trackingAction: TrackingActions.COPY_KOTLIN_ADD_TO_SOURCE_COMMAND,
- });
- });
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/npm_installation_spec.js b/spec/frontend/packages/details/components/npm_installation_spec.js
deleted file mode 100644
index 1c49110bdf8..00000000000
--- a/spec/frontend/packages/details/components/npm_installation_spec.js
+++ /dev/null
@@ -1,123 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import Vuex from 'vuex';
-import { registryUrl as nugetPath } from 'jest/packages/details/mock_data';
-import { npmPackage as packageEntity } from 'jest/packages/mock_data';
-import InstallationTitle from '~/packages/details/components/installation_title.vue';
-import NpmInstallation from '~/packages/details/components/npm_installation.vue';
-import { TrackingActions } from '~/packages/details/constants';
-import { npmInstallationCommand, npmSetupCommand } from '~/packages/details/store/getters';
-import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('NpmInstallation', () => {
- let wrapper;
-
- const npmInstallationCommandLabel = 'npm i @Test/package';
- const yarnInstallationCommandLabel = 'yarn add @Test/package';
-
- const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
- const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
-
- function createComponent({ data = {} } = {}) {
- const store = new Vuex.Store({
- state: {
- packageEntity,
- nugetPath,
- },
- getters: {
- npmInstallationCommand,
- npmSetupCommand,
- },
- });
-
- wrapper = shallowMount(NpmInstallation, {
- localVue,
- store,
- data() {
- return data;
- },
- });
- }
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders all the messages', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('install command switch', () => {
- it('has the installation title component', () => {
- expect(findInstallationTitle().exists()).toBe(true);
- expect(findInstallationTitle().props()).toMatchObject({
- packageType: 'npm',
- options: [
- { value: 'npm', label: 'Show NPM commands' },
- { value: 'yarn', label: 'Show Yarn commands' },
- ],
- });
- });
-
- it('on change event updates the instructions to show', async () => {
- createComponent();
-
- expect(findCodeInstructions().at(0).props('instruction')).toBe(npmInstallationCommandLabel);
- findInstallationTitle().vm.$emit('change', 'yarn');
-
- await nextTick();
-
- expect(findCodeInstructions().at(0).props('instruction')).toBe(yarnInstallationCommandLabel);
- });
- });
-
- describe('npm', () => {
- beforeEach(() => {
- createComponent();
- });
- it('renders the correct installation command', () => {
- expect(findCodeInstructions().at(0).props()).toMatchObject({
- instruction: npmInstallationCommandLabel,
- multiline: false,
- trackingAction: TrackingActions.COPY_NPM_INSTALL_COMMAND,
- });
- });
-
- it('renders the correct setup command', () => {
- expect(findCodeInstructions().at(1).props()).toMatchObject({
- instruction: 'echo @Test:registry=undefined/ >> .npmrc',
- multiline: false,
- trackingAction: TrackingActions.COPY_NPM_SETUP_COMMAND,
- });
- });
- });
-
- describe('yarn', () => {
- beforeEach(() => {
- createComponent({ data: { instructionType: 'yarn' } });
- });
-
- it('renders the correct setup command', () => {
- expect(findCodeInstructions().at(0).props()).toMatchObject({
- instruction: yarnInstallationCommandLabel,
- multiline: false,
- trackingAction: TrackingActions.COPY_YARN_INSTALL_COMMAND,
- });
- });
-
- it('renders the correct registry command', () => {
- expect(findCodeInstructions().at(1).props()).toMatchObject({
- instruction: 'echo \\"@Test:registry\\" \\"undefined/\\" >> .yarnrc',
- multiline: false,
- trackingAction: TrackingActions.COPY_YARN_SETUP_COMMAND,
- });
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/nuget_installation_spec.js b/spec/frontend/packages/details/components/nuget_installation_spec.js
deleted file mode 100644
index 8839a8f1108..00000000000
--- a/spec/frontend/packages/details/components/nuget_installation_spec.js
+++ /dev/null
@@ -1,79 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import { registryUrl as nugetPath } from 'jest/packages/details/mock_data';
-import { nugetPackage as packageEntity } from 'jest/packages/mock_data';
-import InstallationTitle from '~/packages/details/components/installation_title.vue';
-import NugetInstallation from '~/packages/details/components/nuget_installation.vue';
-import { TrackingActions } from '~/packages/details/constants';
-import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('NugetInstallation', () => {
- let wrapper;
-
- const nugetInstallationCommandStr = 'foo/command';
- const nugetSetupCommandStr = 'foo/setup';
-
- const store = new Vuex.Store({
- state: {
- packageEntity,
- nugetPath,
- },
- getters: {
- nugetInstallationCommand: () => nugetInstallationCommandStr,
- nugetSetupCommand: () => nugetSetupCommandStr,
- },
- });
-
- const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
- const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
-
- function createComponent() {
- wrapper = shallowMount(NugetInstallation, {
- localVue,
- store,
- });
- }
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders all the messages', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('install command switch', () => {
- it('has the installation title component', () => {
- expect(findInstallationTitle().exists()).toBe(true);
- expect(findInstallationTitle().props()).toMatchObject({
- packageType: 'nuget',
- options: [{ value: 'nuget', label: 'Show Nuget commands' }],
- });
- });
- });
-
- describe('installation commands', () => {
- it('renders the correct command', () => {
- expect(findCodeInstructions().at(0).props()).toMatchObject({
- instruction: nugetInstallationCommandStr,
- trackingAction: TrackingActions.COPY_NUGET_INSTALL_COMMAND,
- });
- });
- });
-
- describe('setup commands', () => {
- it('renders the correct command', () => {
- expect(findCodeInstructions().at(1).props()).toMatchObject({
- instruction: nugetSetupCommandStr,
- trackingAction: TrackingActions.COPY_NUGET_SETUP_COMMAND,
- });
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/package_title_spec.js b/spec/frontend/packages/details/components/package_title_spec.js
deleted file mode 100644
index 512cec85b40..00000000000
--- a/spec/frontend/packages/details/components/package_title_spec.js
+++ /dev/null
@@ -1,189 +0,0 @@
-import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import PackageTitle from '~/packages/details/components/package_title.vue';
-import PackageTags from '~/packages/shared/components/package_tags.vue';
-import TitleArea from '~/vue_shared/components/registry/title_area.vue';
-import {
- conanPackage,
- mavenFiles,
- mavenPackage,
- mockTags,
- npmFiles,
- npmPackage,
- nugetPackage,
-} from '../../mock_data';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('PackageTitle', () => {
- let wrapper;
- let store;
-
- function createComponent({
- packageEntity = mavenPackage,
- packageFiles = mavenFiles,
- icon = null,
- } = {}) {
- store = new Vuex.Store({
- state: {
- packageEntity,
- packageFiles,
- },
- getters: {
- packageTypeDisplay: ({ packageEntity: { package_type: type } }) => type,
- packagePipeline: ({ packageEntity: { pipeline = null } }) => pipeline,
- packageIcon: () => icon,
- },
- });
-
- wrapper = shallowMount(PackageTitle, {
- localVue,
- store,
- stubs: {
- TitleArea,
- },
- });
- return wrapper.vm.$nextTick();
- }
-
- const findTitleArea = () => wrapper.find(TitleArea);
- const packageType = () => wrapper.find('[data-testid="package-type"]');
- const packageSize = () => wrapper.find('[data-testid="package-size"]');
- const pipelineProject = () => wrapper.find('[data-testid="pipeline-project"]');
- const packageRef = () => wrapper.find('[data-testid="package-ref"]');
- const packageTags = () => wrapper.find(PackageTags);
- const packageBadges = () => wrapper.findAll('[data-testid="tag-badge"]');
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('renders', () => {
- it('without tags', async () => {
- await createComponent();
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('with tags', async () => {
- await createComponent({ packageEntity: { ...mavenPackage, tags: mockTags } });
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('with tags on mobile', async () => {
- jest.spyOn(GlBreakpointInstance, 'isDesktop').mockReturnValue(false);
- await createComponent({ packageEntity: { ...mavenPackage, tags: mockTags } });
- await wrapper.vm.$nextTick();
-
- expect(packageBadges()).toHaveLength(mockTags.length);
- });
- });
-
- describe('package title', () => {
- it('is correctly bound', async () => {
- await createComponent();
-
- expect(findTitleArea().props('title')).toBe('Test package');
- });
- });
-
- describe('package icon', () => {
- const fakeSrc = 'a-fake-src';
-
- it('binds an icon when provided one from vuex', async () => {
- await createComponent({ icon: fakeSrc });
-
- expect(findTitleArea().props('avatar')).toBe(fakeSrc);
- });
-
- it('do not binds an icon when not provided one', async () => {
- await createComponent();
-
- expect(findTitleArea().props('avatar')).toBe(null);
- });
- });
-
- describe.each`
- packageEntity | text
- ${conanPackage} | ${'conan'}
- ${mavenPackage} | ${'maven'}
- ${npmPackage} | ${'npm'}
- ${nugetPackage} | ${'nuget'}
- `(`package type`, ({ packageEntity, text }) => {
- beforeEach(() => createComponent({ packageEntity }));
-
- it(`${packageEntity.package_type} should render from Vuex getters ${text}`, () => {
- expect(packageType().props()).toEqual(expect.objectContaining({ text, icon: 'package' }));
- });
- });
-
- describe('calculates the package size', () => {
- it('correctly calculates when there is only 1 file', async () => {
- await createComponent({ packageEntity: npmPackage, packageFiles: npmFiles });
-
- expect(packageSize().props()).toMatchObject({ text: '200 bytes', icon: 'disk' });
- });
-
- it('correctly calulates when there are multiple files', async () => {
- await createComponent();
-
- expect(packageSize().props('text')).toBe('300 bytes');
- });
- });
-
- describe('package tags', () => {
- it('displays the package-tags component when the package has tags', async () => {
- await createComponent({
- packageEntity: {
- ...npmPackage,
- tags: mockTags,
- },
- });
-
- expect(packageTags().exists()).toBe(true);
- });
-
- it('does not display the package-tags component when there are no tags', async () => {
- await createComponent();
-
- expect(packageTags().exists()).toBe(false);
- });
- });
-
- describe('package ref', () => {
- it('does not display the ref if missing', async () => {
- await createComponent();
-
- expect(packageRef().exists()).toBe(false);
- });
-
- it('correctly shows the package ref if there is one', async () => {
- await createComponent({ packageEntity: npmPackage });
- expect(packageRef().props()).toMatchObject({
- text: npmPackage.pipeline.ref,
- icon: 'branch',
- });
- });
- });
-
- describe('pipeline project', () => {
- it('does not display the project if missing', async () => {
- await createComponent();
-
- expect(pipelineProject().exists()).toBe(false);
- });
-
- it('correctly shows the pipeline project if there is one', async () => {
- await createComponent({ packageEntity: npmPackage });
-
- expect(pipelineProject().props()).toMatchObject({
- text: npmPackage.pipeline.project.name,
- icon: 'review-list',
- link: npmPackage.pipeline.project.web_url,
- });
- });
- });
-});
diff --git a/spec/frontend/packages/details/components/pypi_installation_spec.js b/spec/frontend/packages/details/components/pypi_installation_spec.js
deleted file mode 100644
index 2cec84282d9..00000000000
--- a/spec/frontend/packages/details/components/pypi_installation_spec.js
+++ /dev/null
@@ -1,72 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import { pypiPackage as packageEntity } from 'jest/packages/mock_data';
-import InstallationTitle from '~/packages/details/components/installation_title.vue';
-import PypiInstallation from '~/packages/details/components/pypi_installation.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('PypiInstallation', () => {
- let wrapper;
-
- const pipCommandStr = 'pip install';
- const pypiSetupStr = 'python setup';
-
- const store = new Vuex.Store({
- state: {
- packageEntity,
- pypiHelpPath: 'foo',
- },
- getters: {
- pypiPipCommand: () => pipCommandStr,
- pypiSetupCommand: () => pypiSetupStr,
- },
- });
-
- const pipCommand = () => wrapper.find('[data-testid="pip-command"]');
- const setupInstruction = () => wrapper.find('[data-testid="pypi-setup-content"]');
-
- const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
-
- function createComponent() {
- wrapper = shallowMount(PypiInstallation, {
- localVue,
- store,
- });
- }
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('install command switch', () => {
- it('has the installation title component', () => {
- expect(findInstallationTitle().exists()).toBe(true);
- expect(findInstallationTitle().props()).toMatchObject({
- packageType: 'pypi',
- options: [{ value: 'pypi', label: 'Show PyPi commands' }],
- });
- });
- });
-
- it('renders all the messages', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('installation commands', () => {
- it('renders the correct pip command', () => {
- expect(pipCommand().props('instruction')).toBe(pipCommandStr);
- });
- });
-
- describe('setup commands', () => {
- it('renders the correct setup block', () => {
- expect(setupInstruction().props('instruction')).toBe(pypiSetupStr);
- });
- });
-});
diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js
deleted file mode 100644
index 8210511bf8f..00000000000
--- a/spec/frontend/packages/details/store/getters_spec.js
+++ /dev/null
@@ -1,295 +0,0 @@
-import { NpmManager } from '~/packages/details/constants';
-import {
- conanInstallationCommand,
- conanSetupCommand,
- packagePipeline,
- packageTypeDisplay,
- packageIcon,
- mavenInstallationXml,
- mavenInstallationCommand,
- mavenSetupXml,
- npmInstallationCommand,
- npmSetupCommand,
- nugetInstallationCommand,
- nugetSetupCommand,
- pypiPipCommand,
- pypiSetupCommand,
- composerRegistryInclude,
- composerPackageInclude,
- groupExists,
- gradleGroovyInstalCommand,
- gradleGroovyAddSourceCommand,
- gradleKotlinInstalCommand,
- gradleKotlinAddSourceCommand,
-} from '~/packages/details/store/getters';
-import {
- conanPackage,
- npmPackage,
- nugetPackage,
- mockPipelineInfo,
- mavenPackage as packageWithoutBuildInfo,
- pypiPackage,
- rubygemsPackage,
-} from '../../mock_data';
-import {
- generateMavenCommand,
- generateXmlCodeBlock,
- generateMavenSetupXml,
- registryUrl,
- pypiSetupCommandStr,
-} from '../mock_data';
-
-describe('Getters PackageDetails Store', () => {
- let state;
-
- const defaultState = {
- packageEntity: packageWithoutBuildInfo,
- conanPath: registryUrl,
- mavenPath: registryUrl,
- npmPath: registryUrl,
- nugetPath: registryUrl,
- pypiPath: registryUrl,
- };
-
- const setupState = (testState = {}) => {
- state = {
- ...defaultState,
- ...testState,
- };
- };
-
- const conanInstallationCommandStr = `conan install ${conanPackage.name} --remote=gitlab`;
- const conanSetupCommandStr = `conan remote add gitlab ${registryUrl}`;
-
- const mavenCommandStr = generateMavenCommand(packageWithoutBuildInfo.maven_metadatum);
- const mavenInstallationXmlBlock = generateXmlCodeBlock(packageWithoutBuildInfo.maven_metadatum);
- const mavenSetupXmlBlock = generateMavenSetupXml();
-
- const npmInstallStr = `npm i ${npmPackage.name}`;
- const npmSetupStr = `echo @Test:registry=${registryUrl}/ >> .npmrc`;
- const yarnInstallStr = `yarn add ${npmPackage.name}`;
- const yarnSetupStr = `echo \\"@Test:registry\\" \\"${registryUrl}/\\" >> .yarnrc`;
-
- const nugetInstallationCommandStr = `nuget install ${nugetPackage.name} -Source "GitLab"`;
- const nugetSetupCommandStr = `nuget source Add -Name "GitLab" -Source "${registryUrl}" -UserName <your_username> -Password <your_token>`;
-
- const pypiPipCommandStr = `pip install ${pypiPackage.name} --extra-index-url ${registryUrl}`;
- const composerRegistryIncludeStr =
- 'composer config repositories.gitlab.com/123 \'{"type": "composer", "url": "foo"}\'';
- const composerPackageIncludeStr = `composer req ${[packageWithoutBuildInfo.name]}:${
- packageWithoutBuildInfo.version
- }`;
-
- describe('packagePipeline', () => {
- it('should return the pipeline info when pipeline exists', () => {
- setupState({
- packageEntity: {
- ...npmPackage,
- pipeline: mockPipelineInfo,
- },
- });
-
- expect(packagePipeline(state)).toEqual(mockPipelineInfo);
- });
-
- it('should return null when build_info does not exist', () => {
- setupState();
-
- expect(packagePipeline(state)).toBe(null);
- });
- });
-
- describe('packageTypeDisplay', () => {
- describe.each`
- packageEntity | expectedResult
- ${conanPackage} | ${'Conan'}
- ${packageWithoutBuildInfo} | ${'Maven'}
- ${npmPackage} | ${'npm'}
- ${nugetPackage} | ${'NuGet'}
- ${pypiPackage} | ${'PyPI'}
- ${rubygemsPackage} | ${'RubyGems'}
- `(`package type`, ({ packageEntity, expectedResult }) => {
- beforeEach(() => setupState({ packageEntity }));
-
- it(`${packageEntity.package_type} should show as ${expectedResult}`, () => {
- expect(packageTypeDisplay(state)).toBe(expectedResult);
- });
- });
- });
-
- describe('packageIcon', () => {
- describe('nuget packages', () => {
- it('should return nuget package icon', () => {
- setupState({ packageEntity: nugetPackage });
-
- expect(packageIcon(state)).toBe(nugetPackage.nuget_metadatum.icon_url);
- });
-
- it('should return null when nuget package does not have an icon', () => {
- setupState({ packageEntity: { ...nugetPackage, nuget_metadatum: {} } });
-
- expect(packageIcon(state)).toBe(null);
- });
- });
-
- it('should not find icons for other package types', () => {
- setupState({ packageEntity: npmPackage });
-
- expect(packageIcon(state)).toBe(null);
- });
- });
-
- describe('conan string getters', () => {
- it('gets the correct conanInstallationCommand', () => {
- setupState({ packageEntity: conanPackage });
-
- expect(conanInstallationCommand(state)).toBe(conanInstallationCommandStr);
- });
-
- it('gets the correct conanSetupCommand', () => {
- setupState({ packageEntity: conanPackage });
-
- expect(conanSetupCommand(state)).toBe(conanSetupCommandStr);
- });
- });
-
- describe('maven string getters', () => {
- it('gets the correct mavenInstallationXml', () => {
- setupState();
-
- expect(mavenInstallationXml(state)).toBe(mavenInstallationXmlBlock);
- });
-
- it('gets the correct mavenInstallationCommand', () => {
- setupState();
-
- expect(mavenInstallationCommand(state)).toBe(mavenCommandStr);
- });
-
- it('gets the correct mavenSetupXml', () => {
- setupState();
-
- expect(mavenSetupXml(state)).toBe(mavenSetupXmlBlock);
- });
- });
-
- describe('npm string getters', () => {
- it('gets the correct npmInstallationCommand for npm', () => {
- setupState({ packageEntity: npmPackage });
-
- expect(npmInstallationCommand(state)(NpmManager.NPM)).toBe(npmInstallStr);
- });
-
- it('gets the correct npmSetupCommand for npm', () => {
- setupState({ packageEntity: npmPackage });
-
- expect(npmSetupCommand(state)(NpmManager.NPM)).toBe(npmSetupStr);
- });
-
- it('gets the correct npmInstallationCommand for Yarn', () => {
- setupState({ packageEntity: npmPackage });
-
- expect(npmInstallationCommand(state)(NpmManager.YARN)).toBe(yarnInstallStr);
- });
-
- it('gets the correct npmSetupCommand for Yarn', () => {
- setupState({ packageEntity: npmPackage });
-
- expect(npmSetupCommand(state)(NpmManager.YARN)).toBe(yarnSetupStr);
- });
- });
-
- describe('nuget string getters', () => {
- it('gets the correct nugetInstallationCommand', () => {
- setupState({ packageEntity: nugetPackage });
-
- expect(nugetInstallationCommand(state)).toBe(nugetInstallationCommandStr);
- });
-
- it('gets the correct nugetSetupCommand', () => {
- setupState({ packageEntity: nugetPackage });
-
- expect(nugetSetupCommand(state)).toBe(nugetSetupCommandStr);
- });
- });
-
- describe('pypi string getters', () => {
- it('gets the correct pypiPipCommand', () => {
- setupState({ packageEntity: pypiPackage });
-
- expect(pypiPipCommand(state)).toBe(pypiPipCommandStr);
- });
-
- it('gets the correct pypiSetupCommand', () => {
- setupState({ pypiSetupPath: 'foo' });
-
- expect(pypiSetupCommand(state)).toBe(pypiSetupCommandStr);
- });
- });
-
- describe('composer string getters', () => {
- it('gets the correct composerRegistryInclude command', () => {
- setupState({ composerPath: 'foo', composerConfigRepositoryName: 'gitlab.com/123' });
-
- expect(composerRegistryInclude(state)).toBe(composerRegistryIncludeStr);
- });
-
- it('gets the correct composerPackageInclude command', () => {
- setupState();
-
- expect(composerPackageInclude(state)).toBe(composerPackageIncludeStr);
- });
- });
-
- describe('gradle groovy string getters', () => {
- it('gets the correct gradleGroovyInstalCommand', () => {
- setupState();
-
- expect(gradleGroovyInstalCommand(state)).toMatchInlineSnapshot(
- `"implementation 'com.test.app:test-app:1.0-SNAPSHOT'"`,
- );
- });
-
- it('gets the correct gradleGroovyAddSourceCommand', () => {
- setupState();
-
- expect(gradleGroovyAddSourceCommand(state)).toMatchInlineSnapshot(`
- "maven {
- url 'foo/registry'
- }"
- `);
- });
- });
-
- describe('gradle kotlin string getters', () => {
- it('gets the correct gradleKotlinInstalCommand', () => {
- setupState();
-
- expect(gradleKotlinInstalCommand(state)).toMatchInlineSnapshot(
- `"implementation(\\"com.test.app:test-app:1.0-SNAPSHOT\\")"`,
- );
- });
-
- it('gets the correct gradleKotlinAddSourceCommand', () => {
- setupState();
-
- expect(gradleKotlinAddSourceCommand(state)).toMatchInlineSnapshot(
- `"maven(\\"foo/registry\\")"`,
- );
- });
- });
-
- describe('check if group', () => {
- it('is set', () => {
- setupState({ groupListUrl: '/groups/composer/-/packages' });
-
- expect(groupExists(state)).toBe(true);
- });
-
- it('is not set', () => {
- setupState({ groupListUrl: '' });
-
- expect(groupExists(state)).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/packages/shared/components/package_list_row_spec.js b/spec/frontend/packages/shared/components/package_list_row_spec.js
index bd15d48c4eb..5f2fc8ddfbd 100644
--- a/spec/frontend/packages/shared/components/package_list_row_spec.js
+++ b/spec/frontend/packages/shared/components/package_list_row_spec.js
@@ -1,5 +1,5 @@
import { GlLink } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
@@ -19,14 +19,14 @@ describe('packages_list_row', () => {
const InfrastructureIconAndName = { name: 'InfrastructureIconAndName', template: '<div></div>' };
const PackageIconAndName = { name: 'PackageIconAndName', template: '<div></div>' };
- const findPackageTags = () => wrapper.find(PackageTags);
- const findPackagePath = () => wrapper.find(PackagePath);
- const findDeleteButton = () => wrapper.find('[data-testid="action-delete"]');
- const findPackageIconAndName = () => wrapper.find(PackageIconAndName);
+ const findPackageTags = () => wrapper.findComponent(PackageTags);
+ const findPackagePath = () => wrapper.findComponent(PackagePath);
+ const findDeleteButton = () => wrapper.findByTestId('action-delete');
+ const findPackageIconAndName = () => wrapper.findComponent(PackageIconAndName);
const findInfrastructureIconAndName = () => wrapper.findComponent(InfrastructureIconAndName);
const findListItem = () => wrapper.findComponent(ListItem);
const findPackageLink = () => wrapper.findComponent(GlLink);
- const findWarningIcon = () => wrapper.find('[data-testid="warning-icon"]');
+ const findWarningIcon = () => wrapper.findByTestId('warning-icon');
const mountComponent = ({
isGroup = false,
@@ -35,7 +35,7 @@ describe('packages_list_row', () => {
disableDelete = false,
provide,
} = {}) => {
- wrapper = shallowMount(PackagesListRow, {
+ wrapper = shallowMountExtended(PackagesListRow, {
store,
provide,
stubs: {
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
new file mode 100644
index 00000000000..1f0252965b0
--- /dev/null
+++ b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
@@ -0,0 +1,173 @@
+import { GlFormInputGroup, GlFormGroup, GlSkeletonLoader, GlSprintf } from '@gitlab/ui';
+import { createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import DependencyProxyApp from '~/packages_and_registries/dependency_proxy/app.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+
+import getDependencyProxyDetailsQuery from '~/packages_and_registries/dependency_proxy/graphql/queries/get_dependency_proxy_details.query.graphql';
+
+import { proxyDetailsQuery, proxyData } from './mock_data';
+
+const localVue = createLocalVue();
+
+describe('DependencyProxyApp', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const provideDefaults = {
+ groupPath: 'gitlab-org',
+ dependencyProxyAvailable: true,
+ };
+
+ function createComponent({
+ provide = provideDefaults,
+ resolver = jest.fn().mockResolvedValue(proxyDetailsQuery()),
+ } = {}) {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[getDependencyProxyDetailsQuery, resolver]];
+
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMountExtended(DependencyProxyApp, {
+ localVue,
+ apolloProvider,
+ provide,
+ stubs: {
+ GlFormInputGroup,
+ GlFormGroup,
+ GlSprintf,
+ },
+ });
+ }
+
+ const findProxyNotAvailableAlert = () => wrapper.findByTestId('proxy-not-available');
+ const findProxyDisabledAlert = () => wrapper.findByTestId('proxy-disabled');
+ const findClipBoardButton = () => wrapper.findComponent(ClipboardButton);
+ const findFormGroup = () => wrapper.findComponent(GlFormGroup);
+ const findFormInputGroup = () => wrapper.findComponent(GlFormInputGroup);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findMainArea = () => wrapper.findByTestId('main-area');
+ const findProxyCountText = () => wrapper.findByTestId('proxy-count');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when the dependency proxy is not available', () => {
+ const createComponentArguments = {
+ provide: { ...provideDefaults, dependencyProxyAvailable: false },
+ };
+
+ it('renders an info alert', () => {
+ createComponent(createComponentArguments);
+
+ expect(findProxyNotAvailableAlert().text()).toBe(
+ DependencyProxyApp.i18n.proxyNotAvailableText,
+ );
+ });
+
+ it('does not render the main area', () => {
+ createComponent(createComponentArguments);
+
+ expect(findMainArea().exists()).toBe(false);
+ });
+
+ it('does not call the graphql endpoint', async () => {
+ const resolver = jest.fn().mockResolvedValue(proxyDetailsQuery());
+ createComponent({ ...createComponentArguments, resolver });
+
+ await waitForPromises();
+
+ expect(resolver).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when the dependency proxy is available', () => {
+ describe('when is loading', () => {
+ it('renders the skeleton loader', () => {
+ createComponent();
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('does not show the main section', () => {
+ createComponent();
+
+ expect(findMainArea().exists()).toBe(false);
+ });
+
+ it('does not render the info alert', () => {
+ createComponent();
+
+ expect(findProxyNotAvailableAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('when the app is loaded', () => {
+ describe('when the dependency proxy is enabled', () => {
+ beforeEach(() => {
+ createComponent();
+ return waitForPromises();
+ });
+
+ it('does not render the info alert', () => {
+ expect(findProxyNotAvailableAlert().exists()).toBe(false);
+ });
+
+ it('renders the main area', () => {
+ expect(findMainArea().exists()).toBe(true);
+ });
+
+ it('renders a form group with a label', () => {
+ expect(findFormGroup().attributes('label')).toBe(
+ DependencyProxyApp.i18n.proxyImagePrefix,
+ );
+ });
+
+ it('renders a form input group', () => {
+ expect(findFormInputGroup().exists()).toBe(true);
+ expect(findFormInputGroup().props('value')).toBe(proxyData().dependencyProxyImagePrefix);
+ });
+
+ it('form input group has a clipboard button', () => {
+ expect(findClipBoardButton().exists()).toBe(true);
+ expect(findClipBoardButton().props()).toMatchObject({
+ text: proxyData().dependencyProxyImagePrefix,
+ title: DependencyProxyApp.i18n.copyImagePrefixText,
+ });
+ });
+
+ it('from group has a description with proxy count', () => {
+ expect(findProxyCountText().text()).toBe('Contains 2 blobs of images (1024 Bytes)');
+ });
+ });
+ describe('when the dependency proxy is disabled', () => {
+ beforeEach(() => {
+ createComponent({
+ resolver: jest
+ .fn()
+ .mockResolvedValue(proxyDetailsQuery({ extendSettings: { enabled: false } })),
+ });
+ return waitForPromises();
+ });
+
+ it('does not show the main area', () => {
+ expect(findMainArea().exists()).toBe(false);
+ });
+
+ it('does not show the loader', () => {
+ expect(findSkeletonLoader().exists()).toBe(false);
+ });
+
+ it('shows a proxy disabled alert', () => {
+ expect(findProxyDisabledAlert().text()).toBe(DependencyProxyApp.i18n.proxyDisabledText);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/mock_data.js b/spec/frontend/packages_and_registries/dependency_proxy/mock_data.js
new file mode 100644
index 00000000000..23d42e109f9
--- /dev/null
+++ b/spec/frontend/packages_and_registries/dependency_proxy/mock_data.js
@@ -0,0 +1,21 @@
+export const proxyData = () => ({
+ dependencyProxyBlobCount: 2,
+ dependencyProxyTotalSize: '1024 Bytes',
+ dependencyProxyImagePrefix: 'gdk.test:3000/private-group/dependency_proxy/containers',
+ dependencyProxySetting: { enabled: true, __typename: 'DependencyProxySetting' },
+});
+
+export const proxySettings = (extend = {}) => ({ enabled: true, ...extend });
+
+export const proxyDetailsQuery = ({ extendSettings = {} } = {}) => ({
+ data: {
+ group: {
+ ...proxyData(),
+ __typename: 'Group',
+ dependencyProxySetting: {
+ ...proxySettings(extendSettings),
+ __typename: 'DependencyProxySetting',
+ },
+ },
+ },
+});
diff --git a/spec/frontend/packages/details/components/__snapshots__/file_sha_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
index 881d441e116..881d441e116 100644
--- a/spec/frontend/packages/details/components/__snapshots__/file_sha_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/__snapshots__/terraform_installation_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap
index 03236737572..03236737572 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/__snapshots__/terraform_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap
diff --git a/spec/frontend/packages/details/components/app_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js
index 377e7e05f09..c7c10cef504 100644
--- a/spec/frontend/packages/details/components/app_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js
@@ -5,28 +5,19 @@ import Vuex from 'vuex';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import stubChildren from 'helpers/stub_children';
-import AdditionalMetadata from '~/packages/details/components/additional_metadata.vue';
-import PackagesApp from '~/packages/details/components/app.vue';
-import DependencyRow from '~/packages/details/components/dependency_row.vue';
-import InstallationCommands from '~/packages/details/components/installation_commands.vue';
-import PackageFiles from '~/packages/details/components/package_files.vue';
-import PackageHistory from '~/packages/details/components/package_history.vue';
-import PackageTitle from '~/packages/details/components/package_title.vue';
-import * as getters from '~/packages/details/store/getters';
+import PackagesApp from '~/packages_and_registries/infrastructure_registry/details/components/app.vue';
+import PackageFiles from '~/packages_and_registries/infrastructure_registry/details/components/package_files.vue';
+import PackageHistory from '~/packages_and_registries/infrastructure_registry/details/components/package_history.vue';
+import * as getters from '~/packages_and_registries/infrastructure_registry/details/store/getters';
import PackageListRow from '~/packages/shared/components/package_list_row.vue';
import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue';
import { TrackingActions } from '~/packages/shared/constants';
import * as SharedUtils from '~/packages/shared/utils';
+import TerraformTitle from '~/packages_and_registries/infrastructure_registry/details/components/details_title.vue';
+import TerraformInstallation from '~/packages_and_registries/infrastructure_registry/details/components/terraform_installation.vue';
import Tracking from '~/tracking';
-import {
- composerPackage,
- conanPackage,
- mavenPackage,
- mavenFiles,
- npmPackage,
- nugetPackage,
-} from '../../mock_data';
+import { mavenPackage, mavenFiles, npmPackage } from 'jest/packages/mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -73,7 +64,7 @@ describe('PackagesApp', () => {
store,
stubs: {
...stubChildren(PackagesApp),
- PackageTitle: false,
+ TerraformTitle: false,
TitleArea: false,
GlButton: false,
GlModal: false,
@@ -84,23 +75,18 @@ describe('PackagesApp', () => {
});
}
- const packageTitle = () => wrapper.find(PackageTitle);
- const emptyState = () => wrapper.find(GlEmptyState);
+ const packageTitle = () => wrapper.findComponent(TerraformTitle);
+ const emptyState = () => wrapper.findComponent(GlEmptyState);
const deleteButton = () => wrapper.find('.js-delete-button');
const findDeleteModal = () => wrapper.find({ ref: 'deleteModal' });
const findDeleteFileModal = () => wrapper.find({ ref: 'deleteFileModal' });
const versionsTab = () => wrapper.find('.js-versions-tab > a');
- const packagesLoader = () => wrapper.find(PackagesListLoader);
- const packagesVersionRows = () => wrapper.findAll(PackageListRow);
+ const packagesLoader = () => wrapper.findComponent(PackagesListLoader);
+ const packagesVersionRows = () => wrapper.findAllComponents(PackageListRow);
const noVersionsMessage = () => wrapper.find('[data-testid="no-versions-message"]');
- const dependenciesTab = () => wrapper.find('.js-dependencies-tab > a');
- const dependenciesCountBadge = () => wrapper.find('[data-testid="dependencies-badge"]');
- const noDependenciesMessage = () => wrapper.find('[data-testid="no-dependencies-message"]');
- const dependencyRows = () => wrapper.findAll(DependencyRow);
- const findPackageHistory = () => wrapper.find(PackageHistory);
- const findAdditionalMetadata = () => wrapper.find(AdditionalMetadata);
- const findInstallationCommands = () => wrapper.find(InstallationCommands);
- const findPackageFiles = () => wrapper.find(PackageFiles);
+ const findPackageHistory = () => wrapper.findComponent(PackageHistory);
+ const findTerraformInstallation = () => wrapper.findComponent(TerraformInstallation);
+ const findPackageFiles = () => wrapper.findComponent(PackageFiles);
afterEach(() => {
wrapper.destroy();
@@ -129,21 +115,10 @@ describe('PackagesApp', () => {
expect(findPackageHistory().props('projectName')).toEqual(wrapper.vm.projectName);
});
- it('additional metadata has the right props', () => {
+ it('terraform installation exists', () => {
createComponent();
- expect(findAdditionalMetadata().exists()).toBe(true);
- expect(findAdditionalMetadata().props('packageEntity')).toEqual(wrapper.vm.packageEntity);
- });
-
- it('installation commands has the right props', () => {
- createComponent();
- expect(findInstallationCommands().exists()).toBe(true);
- expect(findInstallationCommands().props('packageEntity')).toEqual(wrapper.vm.packageEntity);
- });
- it('hides the files table if package type is COMPOSER', () => {
- createComponent({ packageEntity: composerPackage });
- expect(findPackageFiles().exists()).toBe(false);
+ expect(findTerraformInstallation().exists()).toBe(true);
});
describe('deleting packages', () => {
@@ -198,45 +173,6 @@ describe('PackagesApp', () => {
});
});
- describe('dependency links', () => {
- it('does not show the dependency links for a non nuget package', () => {
- createComponent();
-
- expect(dependenciesTab().exists()).toBe(false);
- });
-
- it('shows the dependencies tab with 0 count when a nuget package with no dependencies', () => {
- createComponent({
- packageEntity: {
- ...nugetPackage,
- dependency_links: [],
- },
- });
-
- return wrapper.vm.$nextTick(() => {
- const dependenciesBadge = dependenciesCountBadge();
-
- expect(dependenciesTab().exists()).toBe(true);
- expect(dependenciesBadge.exists()).toBe(true);
- expect(dependenciesBadge.text()).toBe('0');
- expect(noDependenciesMessage().exists()).toBe(true);
- });
- });
-
- it('renders the correct number of dependency rows for a nuget package', () => {
- createComponent({ packageEntity: nugetPackage });
-
- return wrapper.vm.$nextTick(() => {
- const dependenciesBadge = dependenciesCountBadge();
-
- expect(dependenciesTab().exists()).toBe(true);
- expect(dependenciesBadge.exists()).toBe(true);
- expect(dependenciesBadge.text()).toBe(nugetPackage.dependency_links.length.toString());
- expect(dependencyRows()).toHaveLength(nugetPackage.dependency_links.length);
- });
- });
- });
-
describe('tracking and delete', () => {
describe('delete package', () => {
const originalReferrer = document.referrer;
@@ -305,9 +241,9 @@ describe('PackagesApp', () => {
});
it('tracking category calls packageTypeToTrackCategory', () => {
- createComponent({ packageEntity: conanPackage });
+ createComponent({ packageEntity: npmPackage });
expect(wrapper.vm.tracking.category).toBe(category);
- expect(utilSpy).toHaveBeenCalledWith('conan');
+ expect(utilSpy).toHaveBeenCalledWith('npm');
});
it(`delete button on delete modal call event with ${TrackingActions.DELETE_PACKAGE}`, () => {
@@ -371,7 +307,7 @@ describe('PackagesApp', () => {
});
it(`file download link call event with ${TrackingActions.PULL_PACKAGE}`, () => {
- createComponent({ packageEntity: conanPackage });
+ createComponent({ packageEntity: npmPackage });
findPackageFiles().vm.$emit('download-file');
expect(eventSpy).toHaveBeenCalledWith(
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
index 87e0059344c..a012ec4ab05 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details_title_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
@@ -1,7 +1,7 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { terraformModule, mavenFiles, npmPackage } from 'jest/packages/mock_data';
-import component from '~/packages_and_registries/infrastructure_registry/components/details_title.vue';
+import component from '~/packages_and_registries/infrastructure_registry/details/components/details_title.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
const localVue = createLocalVue();
diff --git a/spec/frontend/packages/details/components/file_sha_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/file_sha_spec.js
index 7bfcf78baab..9ce590bfb51 100644
--- a/spec/frontend/packages/details/components/file_sha_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/file_sha_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import FileSha from '~/packages/details/components/file_sha.vue';
+import FileSha from '~/packages_and_registries/infrastructure_registry/details/components/file_sha.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
diff --git a/spec/frontend/packages/details/components/package_files_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
index e8e5a24d3a3..0c5aa30223b 100644
--- a/spec/frontend/packages/details/components/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
@@ -2,11 +2,11 @@ import { GlDropdown, GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue/';
import stubChildren from 'helpers/stub_children';
-import component from '~/packages/details/components/package_files.vue';
+import component from '~/packages_and_registries/infrastructure_registry/details/components/package_files.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import { npmFiles, mavenFiles } from '../../mock_data';
+import { npmFiles, mavenFiles } from 'jest/packages/mock_data';
describe('Package Files', () => {
let wrapper;
diff --git a/spec/frontend/packages/details/components/package_history_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js
index 244805a9c82..4987af9f5b0 100644
--- a/spec/frontend/packages/details/components/package_history_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js
@@ -1,12 +1,12 @@
import { GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
-import component from '~/packages/details/components/package_history.vue';
-import { HISTORY_PIPELINES_LIMIT } from '~/packages/details/constants';
+import component from '~/packages_and_registries/infrastructure_registry/details/components/package_history.vue';
+import { HISTORY_PIPELINES_LIMIT } from '~/packages_and_registries/shared/constants';
import HistoryItem from '~/vue_shared/components/registry/history_item.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import { mavenPackage, mockPipelineInfo } from '../../mock_data';
+import { mavenPackage, mockPipelineInfo } from 'jest/packages/mock_data';
describe('Package History', () => {
let wrapper;
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/terraform_installation_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js
index ee1548ed5eb..c26784a4b75 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/terraform_installation_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js
@@ -1,7 +1,7 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { terraformModule as packageEntity } from 'jest/packages/mock_data';
-import TerraformInstallation from '~/packages_and_registries/infrastructure_registry/components/terraform_installation.vue';
+import TerraformInstallation from '~/packages_and_registries/infrastructure_registry/details/components/terraform_installation.vue';
import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
const localVue = createLocalVue();
diff --git a/spec/frontend/packages/details/mock_data.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/mock_data.js
index d43abcedb2e..d43abcedb2e 100644
--- a/spec/frontend/packages/details/mock_data.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/mock_data.js
diff --git a/spec/frontend/packages/details/store/actions_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js
index b16e50debc4..61fa69c2f7a 100644
--- a/spec/frontend/packages/details/store/actions_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js
@@ -1,19 +1,19 @@
import testAction from 'helpers/vuex_action_helper';
import Api from '~/api';
import createFlash from '~/flash';
-import { FETCH_PACKAGE_VERSIONS_ERROR } from '~/packages/details/constants';
+import { FETCH_PACKAGE_VERSIONS_ERROR } from '~/packages_and_registries/infrastructure_registry/details/constants';
import {
fetchPackageVersions,
deletePackage,
deletePackageFile,
-} from '~/packages/details/store/actions';
-import * as types from '~/packages/details/store/mutation_types';
+} from '~/packages_and_registries/infrastructure_registry/details/store/actions';
+import * as types from '~/packages_and_registries/infrastructure_registry/details/store/mutation_types';
import {
DELETE_PACKAGE_ERROR_MESSAGE,
DELETE_PACKAGE_FILE_ERROR_MESSAGE,
DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
} from '~/packages/shared/constants';
-import { npmPackage as packageEntity } from '../../mock_data';
+import { npmPackage as packageEntity } from '../../../../../packages/mock_data';
jest.mock('~/flash.js');
jest.mock('~/api.js');
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js
new file mode 100644
index 00000000000..8740691a8ee
--- /dev/null
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js
@@ -0,0 +1,40 @@
+import { packagePipeline } from '~/packages_and_registries/infrastructure_registry/details/store/getters';
+import {
+ npmPackage,
+ mockPipelineInfo,
+ mavenPackage as packageWithoutBuildInfo,
+} from 'jest/packages/mock_data';
+
+describe('Getters PackageDetails Store', () => {
+ let state;
+
+ const defaultState = {
+ packageEntity: packageWithoutBuildInfo,
+ };
+
+ const setupState = (testState = {}) => {
+ state = {
+ ...defaultState,
+ ...testState,
+ };
+ };
+
+ describe('packagePipeline', () => {
+ it('should return the pipeline info when pipeline exists', () => {
+ setupState({
+ packageEntity: {
+ ...npmPackage,
+ pipeline: mockPipelineInfo,
+ },
+ });
+
+ expect(packagePipeline(state)).toEqual(mockPipelineInfo);
+ });
+
+ it('should return null when build_info does not exist', () => {
+ setupState({ pipeline: undefined });
+
+ expect(packagePipeline(state)).toBe(null);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/store/mutations_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/mutations_spec.js
index 296ed02d786..6efefea4a14 100644
--- a/spec/frontend/packages/details/store/mutations_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/mutations_spec.js
@@ -1,6 +1,6 @@
-import * as types from '~/packages/details/store/mutation_types';
-import mutations from '~/packages/details/store/mutations';
-import { npmPackage as packageEntity } from '../../mock_data';
+import * as types from '~/packages_and_registries/infrastructure_registry/details/store/mutation_types';
+import mutations from '~/packages_and_registries/infrastructure_registry/details/store/mutations';
+import { npmPackage as packageEntity } from 'jest/packages/mock_data';
describe('Mutations package details Store', () => {
let mockState;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
index 6a7f14dc33f..d5649e39561 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
@@ -21,6 +21,15 @@ exports[`NpmInstallation renders all the messages 1`] = `
Registry setup
</h3>
+ <gl-form-radio-group-stub
+ checked="instance"
+ disabledfield="disabled"
+ htmlfield="html"
+ options="[object Object],[object Object]"
+ textfield="text"
+ valuefield="value"
+ />
+
<code-instruction-stub
copytext="Copy npm setup command"
instruction="echo @gitlab-org:registry=npmPath/ >> .npmrc"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js
index 279900edff2..f759fe7a81c 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js
@@ -9,9 +9,8 @@ import { PACKAGE_TYPE_NUGET } from '~/packages_and_registries/package_registry/c
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
-const nugetPackage = { packageType: PACKAGE_TYPE_NUGET, metadata: nugetMetadata() };
-
describe('Nuget Metadata', () => {
+ let nugetPackage = { packageType: PACKAGE_TYPE_NUGET, metadata: nugetMetadata() };
let wrapper;
const mountComponent = () => {
@@ -52,4 +51,30 @@ describe('Nuget Metadata', () => {
expect(element.props('icon')).toBe(icon);
expect(findElementLink(element).attributes('href')).toBe(nugetPackage.metadata[link]);
});
+
+ describe('without source', () => {
+ beforeAll(() => {
+ nugetPackage = {
+ packageType: PACKAGE_TYPE_NUGET,
+ metadata: { iconUrl: 'iconUrl', licenseUrl: 'licenseUrl' },
+ };
+ });
+
+ it('does not show additional metadata', () => {
+ expect(findNugetSource().exists()).toBe(false);
+ });
+ });
+
+ describe('without license', () => {
+ beforeAll(() => {
+ nugetPackage = {
+ packageType: PACKAGE_TYPE_NUGET,
+ metadata: { iconUrl: 'iconUrl', projectUrl: 'projectUrl' },
+ };
+ });
+
+ it('does not show additional metadata', () => {
+ expect(findNugetLicense().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js
index 083c6858ad0..b89410ede13 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/npm_installation_spec.js
@@ -1,3 +1,4 @@
+import { GlFormRadioGroup } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -12,6 +13,8 @@ import {
PACKAGE_TYPE_NPM,
NPM_PACKAGE_MANAGER,
YARN_PACKAGE_MANAGER,
+ PROJECT_PACKAGE_ENDPOINT_TYPE,
+ INSTANCE_PACKAGE_ENDPOINT_TYPE,
} from '~/packages_and_registries/package_registry/constants';
import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
@@ -25,12 +28,14 @@ describe('NpmInstallation', () => {
const findCodeInstructions = () => wrapper.findAllComponents(CodeInstructions);
const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
+ const findEndPointTypeSector = () => wrapper.findComponent(GlFormRadioGroup);
function createComponent({ data = {} } = {}) {
wrapper = shallowMountExtended(NpmInstallation, {
provide: {
npmHelpPath: 'npmHelpPath',
npmPath: 'npmPath',
+ npmProjectPath: 'npmProjectPath',
},
propsData: {
packageEntity,
@@ -53,6 +58,19 @@ describe('NpmInstallation', () => {
expect(wrapper.element).toMatchSnapshot();
});
+ describe('endpoint type selector', () => {
+ it('has the endpoint type selector', () => {
+ expect(findEndPointTypeSector().exists()).toBe(true);
+ expect(findEndPointTypeSector().vm.$attrs.checked).toBe(INSTANCE_PACKAGE_ENDPOINT_TYPE);
+ expect(findEndPointTypeSector().props()).toMatchObject({
+ options: [
+ { value: INSTANCE_PACKAGE_ENDPOINT_TYPE, text: 'Instance-level' },
+ { value: PROJECT_PACKAGE_ENDPOINT_TYPE, text: 'Project-level' },
+ ],
+ });
+ });
+ });
+
describe('install command switch', () => {
it('has the installation title component', () => {
expect(findInstallationTitle().exists()).toBe(true);
@@ -96,6 +114,28 @@ describe('NpmInstallation', () => {
trackingAction: TRACKING_ACTION_COPY_NPM_SETUP_COMMAND,
});
});
+
+ it('renders the correct setup command for different endpoint types', async () => {
+ findEndPointTypeSector().vm.$emit('change', PROJECT_PACKAGE_ENDPOINT_TYPE);
+
+ await nextTick();
+
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: `echo @gitlab-org:registry=npmProjectPath/ >> .npmrc`,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_NPM_SETUP_COMMAND,
+ });
+
+ findEndPointTypeSector().vm.$emit('change', INSTANCE_PACKAGE_ENDPOINT_TYPE);
+
+ await nextTick();
+
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: `echo @gitlab-org:registry=npmPath/ >> .npmrc`,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_NPM_SETUP_COMMAND,
+ });
+ });
});
describe('yarn', () => {
@@ -118,5 +158,27 @@ describe('NpmInstallation', () => {
trackingAction: TRACKING_ACTION_COPY_YARN_SETUP_COMMAND,
});
});
+
+ it('renders the correct setup command for different endpoint types', async () => {
+ findEndPointTypeSector().vm.$emit('change', PROJECT_PACKAGE_ENDPOINT_TYPE);
+
+ await nextTick();
+
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: `echo \\"@gitlab-org:registry\\" \\"npmProjectPath/\\" >> .yarnrc`,
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_YARN_SETUP_COMMAND,
+ });
+
+ findEndPointTypeSector().vm.$emit('change', INSTANCE_PACKAGE_ENDPOINT_TYPE);
+
+ await nextTick();
+
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: 'echo \\"@gitlab-org:registry\\" \\"npmPath/\\" >> .yarnrc',
+ multiline: false,
+ trackingAction: TRACKING_ACTION_COPY_YARN_SETUP_COMMAND,
+ });
+ });
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
index b69008f04f0..57b8be40a7c 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
@@ -5,7 +5,7 @@ import {
packageData,
packagePipelines,
} from 'jest/packages_and_registries/package_registry/mock_data';
-import { HISTORY_PIPELINES_LIMIT } from '~/packages/details/constants';
+import { HISTORY_PIPELINES_LIMIT } from '~/packages_and_registries/shared/constants';
import component from '~/packages_and_registries/package_registry/components/details/package_history.vue';
import HistoryItem from '~/vue_shared/components/registry/history_item.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap
new file mode 100644
index 00000000000..1b556be5873
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap
@@ -0,0 +1,12 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PackagesListApp renders 1`] = `
+<div>
+ <package-title-stub
+ count="2"
+ helpurl="packageHelpUrl"
+ />
+
+ <package-search-stub />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
new file mode 100644
index 00000000000..2f2be797251
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
@@ -0,0 +1,122 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`packages_list_row renders 1`] = `
+<div
+ class="gl-display-flex gl-flex-direction-column gl-border-b-solid gl-border-t-solid gl-border-t-1 gl-border-b-1 gl-border-t-transparent gl-border-b-gray-100"
+ data-qa-selector="package_row"
+>
+ <div
+ class="gl-display-flex gl-align-items-center gl-py-3 gl-px-5"
+ >
+ <!---->
+
+ <div
+ class="gl-display-flex gl-xs-flex-direction-column gl-justify-content-space-between gl-align-items-stretch gl-flex-grow-1"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-xs-mb-3 gl-min-w-0 gl-flex-grow-1"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-body gl-font-weight-bold gl-min-h-6 gl-min-w-0"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-3 gl-min-w-0"
+ >
+ <gl-link-stub
+ class="gl-text-body gl-min-w-0"
+ data-qa-selector="package_link"
+ href="http://gdk.test:3000/gitlab-org/gitlab-test/-/packages/111"
+ >
+ <gl-truncate-stub
+ position="end"
+ text="@gitlab-org/package-15"
+ />
+ </gl-link-stub>
+
+ <!---->
+
+ <!---->
+ </div>
+
+ <!---->
+ </div>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-min-h-6 gl-min-w-0 gl-flex-grow-1"
+ >
+ <div
+ class="gl-display-flex"
+ data-testid="left-secondary-infos"
+ >
+ <span>
+ 1.0.0
+ </span>
+
+ <!---->
+
+ <package-icon-and-name-stub>
+
+ npm
+
+ </package-icon-and-name-stub>
+
+ <!---->
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-sm-align-items-flex-end gl-justify-content-space-between gl-text-gray-500 gl-flex-shrink-0"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-sm-text-body gl-sm-font-weight-bold gl-min-h-6"
+ >
+ <publish-method-stub />
+ </div>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-min-h-6"
+ >
+ <span>
+ Created
+ <timeago-tooltip-stub
+ cssclass=""
+ time="2020-08-17T14:23:32Z"
+ tooltipplacement="top"
+ />
+ </span>
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="gl-w-9 gl-display-none gl-sm-display-flex gl-justify-content-end gl-pr-1"
+ >
+ <gl-button-stub
+ aria-label="Remove package"
+ buttontextclasses=""
+ category="secondary"
+ data-testid="action-delete"
+ icon="remove"
+ size="medium"
+ title="Remove package"
+ variant="danger"
+ />
+ </div>
+ </div>
+
+ <div
+ class="gl-display-flex"
+ >
+ <div
+ class="gl-w-7"
+ />
+
+ <!---->
+
+ <div
+ class="gl-w-9"
+ />
+ </div>
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/packages_list_app_spec.js.snap
deleted file mode 100644
index dbebdeeb452..00000000000
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/packages_list_app_spec.js.snap
+++ /dev/null
@@ -1,68 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`packages_list_app renders 1`] = `
-<div>
- <div
- help-url="foo"
- />
-
- <div />
-
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full"
- role="img"
- src="helpSvg"
- />
- </div>
- </div>
-
- <div
- class="col-12"
- >
- <div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="h4"
- >
- There are no packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-justify-content-center"
- >
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
-</div>
-`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap
new file mode 100644
index 00000000000..919dbe25ffe
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap
@@ -0,0 +1,42 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`publish_method renders 1`] = `
+<div
+ class="gl-display-flex gl-align-items-center"
+>
+ <gl-icon-stub
+ class="gl-mr-2"
+ name="git-merge"
+ size="16"
+ />
+
+ <span
+ class="gl-mr-2"
+ data-testid="pipeline-ref"
+ >
+ master
+ </span>
+
+ <gl-icon-stub
+ class="gl-mr-2"
+ name="commit"
+ size="16"
+ />
+
+ <gl-link-stub
+ class="gl-mr-2"
+ data-testid="pipeline-sha"
+ href="/namespace14/project14/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0"
+ >
+ b83d6e39
+ </gl-link-stub>
+
+ <clipboard-button-stub
+ category="tertiary"
+ size="small"
+ text="b83d6e391c22777fca1ed3012fce84f633d7fed0"
+ title="Copy commit SHA"
+ tooltipplacement="top"
+ />
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js
new file mode 100644
index 00000000000..3958cdf21bb
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js
@@ -0,0 +1,154 @@
+import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
+import { createLocalVue } from '@vue/test-utils';
+
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import PackageListApp from '~/packages_and_registries/package_registry/components/list/app.vue';
+import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue';
+import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue';
+
+import {
+ PROJECT_RESOURCE_TYPE,
+ GROUP_RESOURCE_TYPE,
+ LIST_QUERY_DEBOUNCE_TIME,
+} from '~/packages_and_registries/package_registry/constants';
+
+import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
+
+import { packagesListQuery } from '../../mock_data';
+
+jest.mock('~/lib/utils/common_utils');
+jest.mock('~/flash');
+
+const localVue = createLocalVue();
+
+describe('PackagesListApp', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const defaultProvide = {
+ packageHelpUrl: 'packageHelpUrl',
+ emptyListIllustration: 'emptyListIllustration',
+ emptyListHelpUrl: 'emptyListHelpUrl',
+ isGroupPage: true,
+ fullPath: 'gitlab-org',
+ };
+
+ const PackageList = {
+ name: 'package-list',
+ template: '<div><slot name="empty-state"></slot></div>',
+ };
+ const GlLoadingIcon = { name: 'gl-loading-icon', template: '<div>loading</div>' };
+
+ const findPackageTitle = () => wrapper.findComponent(PackageTitle);
+ const findSearch = () => wrapper.findComponent(PackageSearch);
+
+ const mountComponent = ({
+ resolver = jest.fn().mockResolvedValue(packagesListQuery()),
+ provide = defaultProvide,
+ } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[getPackagesQuery, resolver]];
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMountExtended(PackageListApp, {
+ localVue,
+ apolloProvider,
+ provide,
+ stubs: {
+ GlEmptyState,
+ GlLoadingIcon,
+ PackageList,
+ GlSprintf,
+ GlLink,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const waitForDebouncedApollo = () => {
+ jest.advanceTimersByTime(LIST_QUERY_DEBOUNCE_TIME);
+ return waitForPromises();
+ };
+
+ it('renders', async () => {
+ mountComponent();
+
+ await waitForDebouncedApollo();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('has a package title', async () => {
+ mountComponent();
+
+ await waitForDebouncedApollo();
+
+ expect(findPackageTitle().exists()).toBe(true);
+ expect(findPackageTitle().props('count')).toBe(2);
+ });
+
+ describe('search component', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findSearch().exists()).toBe(true);
+ });
+
+ it('on update triggers a new query with updated values', async () => {
+ const resolver = jest.fn().mockResolvedValue(packagesListQuery());
+ mountComponent({ resolver });
+
+ const payload = {
+ sort: 'VERSION_DESC',
+ filters: { packageName: 'foo', packageType: 'CONAN' },
+ };
+
+ findSearch().vm.$emit('update', payload);
+
+ await waitForDebouncedApollo();
+ jest.advanceTimersByTime(LIST_QUERY_DEBOUNCE_TIME);
+
+ expect(resolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ groupSort: payload.sort,
+ ...payload.filters,
+ }),
+ );
+ });
+ });
+
+ describe.each`
+ type | sortType
+ ${PROJECT_RESOURCE_TYPE} | ${'sort'}
+ ${GROUP_RESOURCE_TYPE} | ${'groupSort'}
+ `('$type query', ({ type, sortType }) => {
+ let provide;
+ let resolver;
+
+ const isGroupPage = type === GROUP_RESOURCE_TYPE;
+
+ beforeEach(() => {
+ provide = { ...defaultProvide, isGroupPage };
+ resolver = jest.fn().mockResolvedValue(packagesListQuery(type));
+ mountComponent({ provide, resolver });
+ return waitForDebouncedApollo();
+ });
+
+ it('succeeds', () => {
+ expect(findPackageTitle().props('count')).toBe(2);
+ });
+
+ it('calls the resolver with the right parameters', () => {
+ expect(resolver).toHaveBeenCalledWith(
+ expect.objectContaining({ isGroupPage, [sortType]: '' }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
new file mode 100644
index 00000000000..a276db104d7
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
@@ -0,0 +1,156 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue';
+import PackagePath from '~/packages/shared/components/package_path.vue';
+import PackageTags from '~/packages/shared/components/package_tags.vue';
+import PackageIconAndName from '~/packages/shared/components/package_icon_and_name.vue';
+import { PACKAGE_ERROR_STATUS } from '~/packages_and_registries/package_registry/constants';
+
+import ListItem from '~/vue_shared/components/registry/list_item.vue';
+import { packageData, packagePipelines, packageProject, packageTags } from '../../mock_data';
+
+describe('packages_list_row', () => {
+ let wrapper;
+
+ const defaultProvide = {
+ isGroupPage: false,
+ };
+
+ const packageWithoutTags = { ...packageData(), project: packageProject() };
+ const packageWithTags = { ...packageWithoutTags, tags: { nodes: packageTags() } };
+
+ const findPackageTags = () => wrapper.find(PackageTags);
+ const findPackagePath = () => wrapper.find(PackagePath);
+ const findDeleteButton = () => wrapper.findByTestId('action-delete');
+ const findPackageIconAndName = () => wrapper.find(PackageIconAndName);
+ const findListItem = () => wrapper.findComponent(ListItem);
+ const findPackageLink = () => wrapper.findComponent(GlLink);
+ const findWarningIcon = () => wrapper.findByTestId('warning-icon');
+ const findLeftSecondaryInfos = () => wrapper.findByTestId('left-secondary-infos');
+
+ const mountComponent = ({
+ packageEntity = packageWithoutTags,
+ provide = defaultProvide,
+ } = {}) => {
+ wrapper = shallowMountExtended(PackagesListRow, {
+ provide,
+ stubs: {
+ ListItem,
+ GlSprintf,
+ },
+ propsData: {
+ packageEntity,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ mountComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('tags', () => {
+ it('renders package tags when a package has tags', () => {
+ mountComponent({ packageEntity: packageWithTags });
+
+ expect(findPackageTags().exists()).toBe(true);
+ });
+
+ it('does not render when there are no tags', () => {
+ mountComponent();
+
+ expect(findPackageTags().exists()).toBe(false);
+ });
+ });
+
+ describe('when it is group', () => {
+ it('has a package path component', () => {
+ mountComponent({ provide: { isGroupPage: true } });
+
+ expect(findPackagePath().exists()).toBe(true);
+ expect(findPackagePath().props()).toMatchObject({ path: 'gitlab-org/gitlab-test' });
+ });
+ });
+
+ describe('delete button', () => {
+ it('exists and has the correct props', () => {
+ mountComponent({ packageEntity: packageWithoutTags });
+
+ expect(findDeleteButton().exists()).toBe(true);
+ expect(findDeleteButton().attributes()).toMatchObject({
+ icon: 'remove',
+ category: 'secondary',
+ variant: 'danger',
+ title: 'Remove package',
+ });
+ });
+
+ it('emits the packageToDelete event when the delete button is clicked', async () => {
+ mountComponent({ packageEntity: packageWithoutTags });
+
+ findDeleteButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('packageToDelete')).toBeTruthy();
+ expect(wrapper.emitted('packageToDelete')[0]).toEqual([packageWithoutTags]);
+ });
+ });
+
+ describe(`when the package is in ${PACKAGE_ERROR_STATUS} status`, () => {
+ beforeEach(() => {
+ mountComponent({ packageEntity: { ...packageWithoutTags, status: PACKAGE_ERROR_STATUS } });
+ });
+
+ it('list item has a disabled prop', () => {
+ expect(findListItem().props('disabled')).toBe(true);
+ });
+
+ it('details link is disabled', () => {
+ expect(findPackageLink().attributes('disabled')).toBe('true');
+ });
+
+ it('has a warning icon', () => {
+ const icon = findWarningIcon();
+ const tooltip = getBinding(icon.element, 'gl-tooltip');
+ expect(icon.props('icon')).toBe('warning');
+ expect(tooltip.value).toMatchObject({
+ title: 'Invalid Package: failed metadata extraction',
+ });
+ });
+
+ it('delete button does not exist', () => {
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+ });
+
+ describe('secondary left info', () => {
+ it('has the package version', () => {
+ mountComponent();
+
+ expect(findLeftSecondaryInfos().text()).toContain(packageWithoutTags.version);
+ });
+
+ it('if the pipeline exists show the author message', () => {
+ mountComponent({
+ packageEntity: { ...packageWithoutTags, pipelines: { nodes: packagePipelines() } },
+ });
+
+ expect(findLeftSecondaryInfos().text()).toContain('published by Administrator');
+ });
+
+ it('has icon and name component', () => {
+ mountComponent();
+
+ expect(findPackageIconAndName().text()).toBe(packageWithoutTags.packageType.toLowerCase());
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_app_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_app_spec.js
deleted file mode 100644
index 6c871a34d50..00000000000
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_app_spec.js
+++ /dev/null
@@ -1,273 +0,0 @@
-import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import setWindowLocation from 'helpers/set_window_location_helper';
-import createFlash from '~/flash';
-import * as commonUtils from '~/lib/utils/common_utils';
-import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages/list/constants';
-import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants';
-import PackageListApp from '~/packages_and_registries/package_registry/components/list/packages_list_app.vue';
-import { FILTERED_SEARCH_TERM } from '~/packages_and_registries/shared/constants';
-import * as packageUtils from '~/packages_and_registries/shared/utils';
-
-jest.mock('~/lib/utils/common_utils');
-jest.mock('~/flash');
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('packages_list_app', () => {
- let wrapper;
- let store;
-
- const PackageList = {
- name: 'package-list',
- template: '<div><slot name="empty-state"></slot></div>',
- };
- const GlLoadingIcon = { name: 'gl-loading-icon', template: '<div>loading</div>' };
-
- // we need to manually stub dynamic imported components because shallowMount is not able to stub them automatically. See: https://github.com/vuejs/vue-test-utils/issues/1279
- const PackageSearch = { name: 'PackageSearch', template: '<div></div>' };
- const PackageTitle = { name: 'PackageTitle', template: '<div></div>' };
- const InfrastructureTitle = { name: 'InfrastructureTitle', template: '<div></div>' };
- const InfrastructureSearch = { name: 'InfrastructureSearch', template: '<div></div>' };
-
- const emptyListHelpUrl = 'helpUrl';
- const findEmptyState = () => wrapper.find(GlEmptyState);
- const findListComponent = () => wrapper.find(PackageList);
- const findPackageSearch = () => wrapper.find(PackageSearch);
- const findPackageTitle = () => wrapper.find(PackageTitle);
- const findInfrastructureTitle = () => wrapper.find(InfrastructureTitle);
- const findInfrastructureSearch = () => wrapper.find(InfrastructureSearch);
-
- const createStore = (filter = []) => {
- store = new Vuex.Store({
- state: {
- isLoading: false,
- config: {
- resourceId: 'project_id',
- emptyListIllustration: 'helpSvg',
- emptyListHelpUrl,
- packageHelpUrl: 'foo',
- },
- filter,
- },
- });
- store.dispatch = jest.fn();
- };
-
- const mountComponent = (provide) => {
- wrapper = shallowMount(PackageListApp, {
- localVue,
- store,
- stubs: {
- GlEmptyState,
- GlLoadingIcon,
- PackageList,
- GlSprintf,
- GlLink,
- PackageSearch,
- PackageTitle,
- InfrastructureTitle,
- InfrastructureSearch,
- },
- provide,
- });
- };
-
- beforeEach(() => {
- createStore();
- jest.spyOn(packageUtils, 'getQueryParams').mockReturnValue({});
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders', () => {
- mountComponent();
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('call requestPackagesList on page:changed', () => {
- mountComponent();
- store.dispatch.mockClear();
-
- const list = findListComponent();
- list.vm.$emit('page:changed', 1);
- expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList', { page: 1 });
- });
-
- it('call requestDeletePackage on package:delete', () => {
- mountComponent();
-
- const list = findListComponent();
- list.vm.$emit('package:delete', 'foo');
- expect(store.dispatch).toHaveBeenCalledWith('requestDeletePackage', 'foo');
- });
-
- it('does call requestPackagesList only one time on render', () => {
- mountComponent();
-
- expect(store.dispatch).toHaveBeenCalledTimes(3);
- expect(store.dispatch).toHaveBeenNthCalledWith(1, 'setSorting', expect.any(Object));
- expect(store.dispatch).toHaveBeenNthCalledWith(2, 'setFilter', expect.any(Array));
- expect(store.dispatch).toHaveBeenNthCalledWith(3, 'requestPackagesList');
- });
-
- describe('url query string handling', () => {
- const defaultQueryParamsMock = {
- search: [1, 2],
- type: 'npm',
- sort: 'asc',
- orderBy: 'created',
- };
-
- it('calls setSorting with the query string based sorting', () => {
- jest.spyOn(packageUtils, 'getQueryParams').mockReturnValue(defaultQueryParamsMock);
-
- mountComponent();
-
- expect(store.dispatch).toHaveBeenNthCalledWith(1, 'setSorting', {
- orderBy: defaultQueryParamsMock.orderBy,
- sort: defaultQueryParamsMock.sort,
- });
- });
-
- it('calls setFilter with the query string based filters', () => {
- jest.spyOn(packageUtils, 'getQueryParams').mockReturnValue(defaultQueryParamsMock);
-
- mountComponent();
-
- expect(store.dispatch).toHaveBeenNthCalledWith(2, 'setFilter', [
- { type: 'type', value: { data: defaultQueryParamsMock.type } },
- { type: FILTERED_SEARCH_TERM, value: { data: defaultQueryParamsMock.search[0] } },
- { type: FILTERED_SEARCH_TERM, value: { data: defaultQueryParamsMock.search[1] } },
- ]);
- });
-
- it('calls setSorting and setFilters with the results of extractFilterAndSorting', () => {
- jest
- .spyOn(packageUtils, 'extractFilterAndSorting')
- .mockReturnValue({ filters: ['foo'], sorting: { sort: 'desc' } });
-
- mountComponent();
-
- expect(store.dispatch).toHaveBeenNthCalledWith(1, 'setSorting', { sort: 'desc' });
- expect(store.dispatch).toHaveBeenNthCalledWith(2, 'setFilter', ['foo']);
- });
- });
-
- describe('empty state', () => {
- it('generate the correct empty list link', () => {
- mountComponent();
-
- const link = findListComponent().find(GlLink);
-
- expect(link.attributes('href')).toBe(emptyListHelpUrl);
- expect(link.text()).toBe('publish and share your packages');
- });
-
- it('includes the right content on the default tab', () => {
- mountComponent();
-
- const heading = findEmptyState().find('h1');
-
- expect(heading.text()).toBe('There are no packages yet');
- });
- });
-
- describe('filter without results', () => {
- beforeEach(() => {
- createStore([{ type: 'something' }]);
- mountComponent();
- });
-
- it('should show specific empty message', () => {
- expect(findEmptyState().text()).toContain('Sorry, your filter produced no results');
- expect(findEmptyState().text()).toContain(
- 'To widen your search, change or remove the filters above',
- );
- });
- });
-
- describe('Package Search', () => {
- it('exists', () => {
- mountComponent();
-
- expect(findPackageSearch().exists()).toBe(true);
- });
-
- it('on update fetches data from the store', () => {
- mountComponent();
- store.dispatch.mockClear();
-
- findPackageSearch().vm.$emit('update');
-
- expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
- });
- });
-
- describe('Infrastructure config', () => {
- it('defaults to package registry components', () => {
- mountComponent();
-
- expect(findPackageSearch().exists()).toBe(true);
- expect(findPackageTitle().exists()).toBe(true);
-
- expect(findInfrastructureTitle().exists()).toBe(false);
- expect(findInfrastructureSearch().exists()).toBe(false);
- });
-
- it('mount different component based on the provided values', () => {
- mountComponent({
- titleComponent: 'InfrastructureTitle',
- searchComponent: 'InfrastructureSearch',
- });
-
- expect(findPackageSearch().exists()).toBe(false);
- expect(findPackageTitle().exists()).toBe(false);
-
- expect(findInfrastructureTitle().exists()).toBe(true);
- expect(findInfrastructureSearch().exists()).toBe(true);
- });
- });
-
- describe('delete alert handling', () => {
- const originalLocation = window.location.href;
- const search = `?${SHOW_DELETE_SUCCESS_ALERT}=true`;
-
- beforeEach(() => {
- createStore();
- jest.spyOn(commonUtils, 'historyReplaceState').mockImplementation(() => {});
- setWindowLocation(search);
- });
-
- afterEach(() => {
- setWindowLocation(originalLocation);
- });
-
- it(`creates a flash if the query string contains ${SHOW_DELETE_SUCCESS_ALERT}`, () => {
- mountComponent();
-
- expect(createFlash).toHaveBeenCalledWith({
- message: DELETE_PACKAGE_SUCCESS_MESSAGE,
- type: 'notice',
- });
- });
-
- it('calls historyReplaceState with a clean url', () => {
- mountComponent();
-
- expect(commonUtils.historyReplaceState).toHaveBeenCalledWith(originalLocation);
- });
-
- it(`does nothing if the query string does not contain ${SHOW_DELETE_SUCCESS_ALERT}`, () => {
- setWindowLocation('?');
- mountComponent();
-
- expect(createFlash).not.toHaveBeenCalled();
- expect(commonUtils.historyReplaceState).not.toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
index 42bc9fa3a9e..e65b2a6f320 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
@@ -1,79 +1,79 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { sortableFields } from '~/packages/list/utils';
import component from '~/packages_and_registries/package_registry/components/list/package_search.vue';
import PackageTypeToken from '~/packages_and_registries/package_registry/components/list/tokens/package_type_token.vue';
import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
import UrlSync from '~/vue_shared/components/url_sync.vue';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
+import { getQueryParams, extractFilterAndSorting } from '~/packages_and_registries/shared/utils';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+jest.mock('~/packages_and_registries/shared/utils');
+
+useMockLocationHelper();
describe('Package Search', () => {
let wrapper;
- let store;
+
+ const defaultQueryParamsMock = {
+ filters: ['foo'],
+ sorting: { sort: 'desc' },
+ };
const findRegistrySearch = () => wrapper.findComponent(RegistrySearch);
const findUrlSync = () => wrapper.findComponent(UrlSync);
- const createStore = (isGroupPage) => {
- const state = {
- config: {
- isGroupPage,
- },
- sorting: {
- orderBy: 'version',
- sort: 'desc',
- },
- filter: [],
- };
- store = new Vuex.Store({
- state,
- });
- store.dispatch = jest.fn();
- };
-
const mountComponent = (isGroupPage = false) => {
- createStore(isGroupPage);
-
- wrapper = shallowMount(component, {
- localVue,
- store,
+ wrapper = shallowMountExtended(component, {
+ provide() {
+ return {
+ isGroupPage,
+ };
+ },
stubs: {
UrlSync,
},
});
};
+ beforeEach(() => {
+ extractFilterAndSorting.mockReturnValue(defaultQueryParamsMock);
+ });
+
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- it('has a registry search component', () => {
+ it('has a registry search component', async () => {
mountComponent();
+ await nextTick();
+
expect(findRegistrySearch().exists()).toBe(true);
- expect(findRegistrySearch().props()).toMatchObject({
- filter: store.state.filter,
- sorting: store.state.sorting,
- tokens: expect.arrayContaining([
- expect.objectContaining({ token: PackageTypeToken, type: 'type', icon: 'package' }),
- ]),
- sortableFields: sortableFields(),
- });
+ });
+
+ it('registry search is mounted after mount', async () => {
+ mountComponent();
+
+ expect(findRegistrySearch().exists()).toBe(false);
+ });
+
+ it('has a UrlSync component', () => {
+ mountComponent();
+
+ expect(findUrlSync().exists()).toBe(true);
});
it.each`
isGroupPage | page
${false} | ${'project'}
${true} | ${'group'}
- `('in a $page page binds the right props', ({ isGroupPage }) => {
+ `('in a $page page binds the right props', async ({ isGroupPage }) => {
mountComponent(isGroupPage);
+ await nextTick();
+
expect(findRegistrySearch().props()).toMatchObject({
- filter: store.state.filter,
- sorting: store.state.sorting,
tokens: expect.arrayContaining([
expect.objectContaining({ token: PackageTypeToken, type: 'type', icon: 'package' }),
]),
@@ -81,48 +81,85 @@ describe('Package Search', () => {
});
});
- it('on sorting:changed emits update event and calls vuex setSorting', () => {
+ it('on sorting:changed emits update event and update internal sort', async () => {
const payload = { sort: 'foo' };
mountComponent();
+ await nextTick();
+
findRegistrySearch().vm.$emit('sorting:changed', payload);
- expect(store.dispatch).toHaveBeenCalledWith('setSorting', payload);
- expect(wrapper.emitted('update')).toEqual([[]]);
+ await nextTick();
+
+ expect(findRegistrySearch().props('sorting')).toEqual({ sort: 'foo', orderBy: 'name' });
+
+ // there is always a first call on mounted that emits up default values
+ expect(wrapper.emitted('update')[1]).toEqual([
+ {
+ filters: {
+ packageName: '',
+ packageType: undefined,
+ },
+ sort: 'NAME_FOO',
+ },
+ ]);
});
- it('on filter:changed calls vuex setFilter', () => {
+ it('on filter:changed updates the filters', async () => {
const payload = ['foo'];
mountComponent();
+ await nextTick();
+
findRegistrySearch().vm.$emit('filter:changed', payload);
- expect(store.dispatch).toHaveBeenCalledWith('setFilter', payload);
+ await nextTick();
+
+ expect(findRegistrySearch().props('filter')).toEqual(['foo']);
});
- it('on filter:submit emits update event', () => {
+ it('on filter:submit emits update event', async () => {
mountComponent();
- findRegistrySearch().vm.$emit('filter:submit');
-
- expect(wrapper.emitted('update')).toEqual([[]]);
- });
+ await nextTick();
- it('has a UrlSync component', () => {
- mountComponent();
+ findRegistrySearch().vm.$emit('filter:submit');
- expect(findUrlSync().exists()).toBe(true);
+ expect(wrapper.emitted('update')[1]).toEqual([
+ {
+ filters: {
+ packageName: '',
+ packageType: undefined,
+ },
+ sort: 'NAME_DESC',
+ },
+ ]);
});
- it('on query:changed calls updateQuery from UrlSync', () => {
+ it('on query:changed calls updateQuery from UrlSync', async () => {
jest.spyOn(UrlSync.methods, 'updateQuery').mockImplementation(() => {});
mountComponent();
+ await nextTick();
+
findRegistrySearch().vm.$emit('query:changed');
expect(UrlSync.methods.updateQuery).toHaveBeenCalled();
});
+
+ it('sets the component sorting and filtering based on the querystring', async () => {
+ mountComponent();
+
+ await nextTick();
+
+ expect(getQueryParams).toHaveBeenCalled();
+
+ expect(findRegistrySearch().props()).toMatchObject({
+ filter: defaultQueryParamsMock.filters,
+ sorting: defaultQueryParamsMock.sorting,
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/publish_method_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/publish_method_spec.js
new file mode 100644
index 00000000000..fcbd7cc6a50
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/publish_method_spec.js
@@ -0,0 +1,47 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PublishMethod from '~/packages_and_registries/package_registry/components/list/publish_method.vue';
+import { packagePipelines } from '../../mock_data';
+
+const [pipelineData] = packagePipelines();
+
+describe('publish_method', () => {
+ let wrapper;
+
+ const findPipelineRef = () => wrapper.findByTestId('pipeline-ref');
+ const findPipelineSha = () => wrapper.findByTestId('pipeline-sha');
+ const findManualPublish = () => wrapper.findByTestId('manually-published');
+
+ const mountComponent = (pipeline = pipelineData) => {
+ wrapper = shallowMountExtended(PublishMethod, {
+ propsData: {
+ pipeline,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ mountComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('pipeline information', () => {
+ it('displays branch and commit when pipeline info exists', () => {
+ mountComponent();
+
+ expect(findPipelineRef().exists()).toBe(true);
+ expect(findPipelineSha().exists()).toBe(true);
+ });
+
+ it('does not show any pipeline details when no information exists', () => {
+ mountComponent(null);
+
+ expect(findPipelineRef().exists()).toBe(false);
+ expect(findPipelineSha().exists()).toBe(false);
+ expect(findManualPublish().text()).toBe(PublishMethod.i18n.MANUALLY_PUBLISHED);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index 9438a2d2d72..70fc096fa44 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -86,6 +86,12 @@ export const dependencyLinks = () => [
},
];
+export const packageProject = () => ({
+ fullPath: 'gitlab-org/gitlab-test',
+ webUrl: 'http://gdk.test:3000/gitlab-org/gitlab-test',
+ __typename: 'Project',
+});
+
export const packageVersions = () => [
{
createdAt: '2021-08-10T09:33:54Z',
@@ -249,3 +255,31 @@ export const packageDestroyFileMutationError = () => ({
},
],
});
+
+export const packagesListQuery = (type = 'group') => ({
+ data: {
+ [type]: {
+ packages: {
+ count: 2,
+ nodes: [
+ {
+ ...packageData(),
+ project: packageProject(),
+ tags: { nodes: packageTags() },
+ pipelines: {
+ nodes: packagePipelines(),
+ },
+ },
+ {
+ ...packageData(),
+ project: packageProject(),
+ tags: { nodes: [] },
+ pipelines: { nodes: [] },
+ },
+ ],
+ __typename: 'PackageConnection',
+ },
+ __typename: 'Group',
+ },
+ },
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
new file mode 100644
index 00000000000..d3a970e86eb
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
@@ -0,0 +1,189 @@
+import { GlSprintf, GlLink, GlToggle } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import component from '~/packages_and_registries/settings/group/components/dependency_proxy_settings.vue';
+import {
+ DEPENDENCY_PROXY_HEADER,
+ DEPENDENCY_PROXY_SETTINGS_DESCRIPTION,
+ DEPENDENCY_PROXY_DOCS_PATH,
+} from '~/packages_and_registries/settings/group/constants';
+
+import updateDependencyProxySettings from '~/packages_and_registries/settings/group/graphql/mutations/update_dependency_proxy_settings.mutation.graphql';
+import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql';
+import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+import { updateGroupDependencyProxySettingsOptimisticResponse } from '~/packages_and_registries/settings/group/graphql/utils/optimistic_responses';
+import {
+ dependencyProxySettings,
+ dependencyProxySettingMutationMock,
+ groupPackageSettingsMock,
+ dependencyProxySettingMutationErrorMock,
+} from '../mock_data';
+
+jest.mock('~/flash');
+jest.mock('~/packages_and_registries/settings/group/graphql/utils/optimistic_responses');
+
+const localVue = createLocalVue();
+
+describe('DependencyProxySettings', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const defaultProvide = {
+ defaultExpanded: false,
+ groupPath: 'foo_group_path',
+ };
+
+ localVue.use(VueApollo);
+
+ const mountComponent = ({
+ provide = defaultProvide,
+ mutationResolver = jest.fn().mockResolvedValue(dependencyProxySettingMutationMock()),
+ isLoading = false,
+ } = {}) => {
+ const requestHandlers = [[updateDependencyProxySettings, mutationResolver]];
+
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMount(component, {
+ localVue,
+ apolloProvider,
+ provide,
+ propsData: {
+ dependencyProxySettings: dependencyProxySettings(),
+ isLoading,
+ },
+ stubs: {
+ GlSprintf,
+ SettingsBlock,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
+ const findDescription = () => wrapper.find('[data-testid="description"');
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findToggle = () => wrapper.findComponent(GlToggle);
+
+ const fillApolloCache = () => {
+ apolloProvider.defaultClient.cache.writeQuery({
+ query: getGroupPackagesSettingsQuery,
+ variables: {
+ fullPath: defaultProvide.groupPath,
+ },
+ ...groupPackageSettingsMock,
+ });
+ };
+
+ const emitSettingsUpdate = (value = false) => {
+ findToggle().vm.$emit('change', value);
+ };
+
+ it('renders a settings block', () => {
+ mountComponent();
+
+ expect(findSettingsBlock().exists()).toBe(true);
+ });
+
+ it('passes the correct props to settings block', () => {
+ mountComponent();
+
+ expect(findSettingsBlock().props('defaultExpanded')).toBe(false);
+ });
+
+ it('has the correct header text', () => {
+ mountComponent();
+
+ expect(wrapper.text()).toContain(DEPENDENCY_PROXY_HEADER);
+ });
+
+ it('has the correct description text', () => {
+ mountComponent();
+
+ expect(findDescription().text()).toMatchInterpolatedText(DEPENDENCY_PROXY_SETTINGS_DESCRIPTION);
+ });
+
+ it('has the correct link', () => {
+ mountComponent();
+
+ expect(findLink().attributes()).toMatchObject({
+ href: DEPENDENCY_PROXY_DOCS_PATH,
+ });
+ expect(findLink().text()).toBe('Learn more');
+ });
+
+ describe('settings update', () => {
+ describe('success state', () => {
+ it('emits a success event', async () => {
+ mountComponent();
+
+ fillApolloCache();
+ emitSettingsUpdate();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('success')).toEqual([[]]);
+ });
+
+ it('has an optimistic response', () => {
+ mountComponent();
+
+ fillApolloCache();
+
+ expect(findToggle().props('value')).toBe(true);
+
+ emitSettingsUpdate();
+
+ expect(updateGroupDependencyProxySettingsOptimisticResponse).toHaveBeenCalledWith({
+ enabled: false,
+ });
+ });
+ });
+
+ describe('errors', () => {
+ it('mutation payload with root level errors', async () => {
+ const mutationResolver = jest
+ .fn()
+ .mockResolvedValue(dependencyProxySettingMutationErrorMock);
+ mountComponent({ mutationResolver });
+
+ fillApolloCache();
+
+ emitSettingsUpdate();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[]]);
+ });
+
+ it.each`
+ type | mutationResolver
+ ${'local'} | ${jest.fn().mockResolvedValue(dependencyProxySettingMutationMock({ errors: ['foo'] }))}
+ ${'network'} | ${jest.fn().mockRejectedValue()}
+ `('mutation payload with $type error', async ({ mutationResolver }) => {
+ mountComponent({ mutationResolver });
+
+ fillApolloCache();
+ emitSettingsUpdate();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[]]);
+ });
+ });
+ });
+
+ describe('when isLoading is true', () => {
+ it('disables enable toggle', () => {
+ mountComponent({ isLoading: true });
+
+ expect(findToggle().props('disabled')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
index 0bbb1ce3436..79c2f811c08 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
@@ -143,4 +143,18 @@ describe('Duplicates Settings', () => {
expect(findInputGroup().exists()).toBe(false);
});
});
+
+ describe('loading', () => {
+ beforeEach(() => {
+ mountComponent({ ...defaultProps, loading: true });
+ });
+
+ it('disables the enable toggle', () => {
+ expect(findToggle().props('disabled')).toBe(true);
+ });
+
+ it('disables the form input', () => {
+ expect(findInput().attributes('disabled')).toBe('true');
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
index f2877a1f2a5..e4d62bc6a6e 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
@@ -1,28 +1,16 @@
-import { GlSprintf, GlLink, GlAlert } from '@gitlab/ui';
+import { GlAlert } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
+import { nextTick } from 'vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import DuplicatesSettings from '~/packages_and_registries/settings/group/components/duplicates_settings.vue';
-import GenericSettings from '~/packages_and_registries/settings/group/components/generic_settings.vue';
+import PackagesSettings from '~/packages_and_registries/settings/group/components/packages_settings.vue';
+import DependencyProxySettings from '~/packages_and_registries/settings/group/components/dependency_proxy_settings.vue';
+
import component from '~/packages_and_registries/settings/group/components/group_settings_app.vue';
-import MavenSettings from '~/packages_and_registries/settings/group/components/maven_settings.vue';
-import {
- PACKAGE_SETTINGS_HEADER,
- PACKAGE_SETTINGS_DESCRIPTION,
- PACKAGES_DOCS_PATH,
- ERROR_UPDATING_SETTINGS,
- SUCCESS_UPDATING_SETTINGS,
-} from '~/packages_and_registries/settings/group/constants';
-import updateNamespacePackageSettings from '~/packages_and_registries/settings/group/graphql/mutations/update_group_packages_settings.mutation.graphql';
import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql';
-import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
-import {
- groupPackageSettingsMock,
- groupPackageSettingsMutationMock,
- groupPackageSettingsMutationErrorMock,
-} from '../mock_data';
+import { groupPackageSettingsMock, packageSettings, dependencyProxySettings } from '../mock_data';
jest.mock('~/flash');
@@ -36,20 +24,16 @@ describe('Group Settings App', () => {
const defaultProvide = {
defaultExpanded: false,
groupPath: 'foo_group_path',
+ dependencyProxyAvailable: true,
};
const mountComponent = ({
- provide = defaultProvide,
resolver = jest.fn().mockResolvedValue(groupPackageSettingsMock),
- mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock()),
- data = {},
+ provide = defaultProvide,
} = {}) => {
localVue.use(VueApollo);
- const requestHandlers = [
- [getGroupPackagesSettingsQuery, resolver],
- [updateNamespacePackageSettings, mutationResolver],
- ];
+ const requestHandlers = [[getGroupPackagesSettingsQuery, resolver]];
apolloProvider = createMockApollo(requestHandlers);
@@ -57,17 +41,6 @@ describe('Group Settings App', () => {
localVue,
apolloProvider,
provide,
- data() {
- return {
- ...data,
- };
- },
- stubs: {
- GlSprintf,
- SettingsBlock,
- MavenSettings,
- GenericSettings,
- },
mocks: {
$toast: {
show,
@@ -84,274 +57,89 @@ describe('Group Settings App', () => {
wrapper.destroy();
});
- const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
- const findDescription = () => wrapper.find('[data-testid="description"');
- const findLink = () => wrapper.findComponent(GlLink);
const findAlert = () => wrapper.findComponent(GlAlert);
- const findMavenSettings = () => wrapper.findComponent(MavenSettings);
- const findMavenDuplicatedSettings = () => findMavenSettings().findComponent(DuplicatesSettings);
- const findGenericSettings = () => wrapper.findComponent(GenericSettings);
- const findGenericDuplicatedSettings = () =>
- findGenericSettings().findComponent(DuplicatesSettings);
+ const findPackageSettings = () => wrapper.findComponent(PackagesSettings);
+ const findDependencyProxySettings = () => wrapper.findComponent(DependencyProxySettings);
const waitForApolloQueryAndRender = async () => {
await waitForPromises();
- await wrapper.vm.$nextTick();
- };
-
- const emitSettingsUpdate = (override) => {
- findMavenDuplicatedSettings().vm.$emit('update', {
- mavenDuplicateExceptionRegex: ')',
- ...override,
- });
+ await nextTick();
};
- it('renders a settings block', () => {
- mountComponent();
-
- expect(findSettingsBlock().exists()).toBe(true);
- });
-
- it('passes the correct props to settings block', () => {
- mountComponent();
-
- expect(findSettingsBlock().props('defaultExpanded')).toBe(false);
- });
-
- it('has the correct header text', () => {
- mountComponent();
-
- expect(wrapper.text()).toContain(PACKAGE_SETTINGS_HEADER);
- });
-
- it('has the correct description text', () => {
- mountComponent();
-
- expect(findDescription().text()).toMatchInterpolatedText(PACKAGE_SETTINGS_DESCRIPTION);
- });
-
- it('has the correct link', () => {
- mountComponent();
-
- expect(findLink().attributes()).toMatchObject({
- href: PACKAGES_DOCS_PATH,
- target: '_blank',
- });
- expect(findLink().text()).toBe('Learn more.');
- });
-
- it('calls the graphql API with the proper variables', () => {
- const resolver = jest.fn().mockResolvedValue(groupPackageSettingsMock);
- mountComponent({ resolver });
-
- expect(resolver).toHaveBeenCalledWith({
- fullPath: defaultProvide.groupPath,
- });
- });
-
- describe('maven settings', () => {
- it('exists', () => {
+ describe.each`
+ finder | entityProp | entityValue | successMessage | errorMessage
+ ${findPackageSettings} | ${'packageSettings'} | ${packageSettings()} | ${'Settings saved successfully'} | ${'An error occurred while saving the settings'}
+ ${findDependencyProxySettings} | ${'dependencyProxySettings'} | ${dependencyProxySettings()} | ${'Setting saved successfully'} | ${'An error occurred while saving the setting'}
+ `('settings blocks', ({ finder, entityProp, entityValue, successMessage, errorMessage }) => {
+ beforeEach(() => {
mountComponent();
-
- expect(findMavenSettings().exists()).toBe(true);
+ return waitForApolloQueryAndRender();
});
- it('assigns duplication allowness and exception props', async () => {
- mountComponent();
-
- expect(findMavenDuplicatedSettings().props('loading')).toBe(true);
-
- await waitForApolloQueryAndRender();
-
- const {
- mavenDuplicatesAllowed,
- mavenDuplicateExceptionRegex,
- } = groupPackageSettingsMock.data.group.packageSettings;
-
- expect(findMavenDuplicatedSettings().props()).toMatchObject({
- duplicatesAllowed: mavenDuplicatesAllowed,
- duplicateExceptionRegex: mavenDuplicateExceptionRegex,
- duplicateExceptionRegexError: '',
- loading: false,
- });
+ it('renders the settings block', () => {
+ expect(finder().exists()).toBe(true);
});
- it('on update event calls the mutation', async () => {
- const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock());
- mountComponent({ mutationResolver });
-
- await waitForApolloQueryAndRender();
-
- emitSettingsUpdate();
-
- expect(mutationResolver).toHaveBeenCalledWith({
- input: { mavenDuplicateExceptionRegex: ')', namespacePath: 'foo_group_path' },
+ it('binds the correctProps', () => {
+ expect(finder().props()).toMatchObject({
+ isLoading: false,
+ [entityProp]: entityValue,
});
});
- });
-
- describe('generic settings', () => {
- it('exists', () => {
- mountComponent();
-
- expect(findGenericSettings().exists()).toBe(true);
- });
-
- it('assigns duplication allowness and exception props', async () => {
- mountComponent();
-
- expect(findGenericDuplicatedSettings().props('loading')).toBe(true);
-
- await waitForApolloQueryAndRender();
-
- const {
- genericDuplicatesAllowed,
- genericDuplicateExceptionRegex,
- } = groupPackageSettingsMock.data.group.packageSettings;
- expect(findGenericDuplicatedSettings().props()).toMatchObject({
- duplicatesAllowed: genericDuplicatesAllowed,
- duplicateExceptionRegex: genericDuplicateExceptionRegex,
- duplicateExceptionRegexError: '',
- loading: false,
+ describe('success event', () => {
+ it('shows a success toast', () => {
+ finder().vm.$emit('success');
+ expect(show).toHaveBeenCalledWith(successMessage);
});
- });
- it('on update event calls the mutation', async () => {
- const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock());
- mountComponent({ mutationResolver });
+ it('hides the error alert', async () => {
+ finder().vm.$emit('error');
+ await nextTick();
- await waitForApolloQueryAndRender();
+ expect(findAlert().exists()).toBe(true);
- findMavenDuplicatedSettings().vm.$emit('update', {
- genericDuplicateExceptionRegex: ')',
- });
+ finder().vm.$emit('success');
+ await nextTick();
- expect(mutationResolver).toHaveBeenCalledWith({
- input: { genericDuplicateExceptionRegex: ')', namespacePath: 'foo_group_path' },
+ expect(findAlert().exists()).toBe(false);
});
});
- });
-
- describe('settings update', () => {
- describe('success state', () => {
- it('shows a success alert', async () => {
- mountComponent();
-
- await waitForApolloQueryAndRender();
-
- emitSettingsUpdate();
-
- await waitForPromises();
-
- expect(show).toHaveBeenCalledWith(SUCCESS_UPDATING_SETTINGS);
- });
-
- it('has an optimistic response', async () => {
- const mavenDuplicateExceptionRegex = 'latest[main]something';
- mountComponent();
- await waitForApolloQueryAndRender();
-
- expect(findMavenDuplicatedSettings().props('duplicateExceptionRegex')).toBe('');
-
- emitSettingsUpdate({ mavenDuplicateExceptionRegex });
-
- // wait for apollo to update the model with the optimistic response
- await wrapper.vm.$nextTick();
-
- expect(findMavenDuplicatedSettings().props('duplicateExceptionRegex')).toBe(
- mavenDuplicateExceptionRegex,
- );
-
- // wait for the call to resolve
- await waitForPromises();
-
- expect(findMavenDuplicatedSettings().props('duplicateExceptionRegex')).toBe(
- mavenDuplicateExceptionRegex,
- );
+ describe('error event', () => {
+ beforeEach(() => {
+ finder().vm.$emit('error');
+ return nextTick();
});
- });
- describe('errors', () => {
- const verifyAlert = () => {
+ it('shows an alert', () => {
expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(ERROR_UPDATING_SETTINGS);
- expect(findAlert().props('variant')).toBe('warning');
- };
-
- it('mutation payload with root level errors', async () => {
- // note this is a complex test that covers all the path around errors that are shown in the form
- // it's one single it case, due to the expensive preparation and execution
- const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationErrorMock);
- mountComponent({ mutationResolver });
-
- await waitForApolloQueryAndRender();
-
- emitSettingsUpdate();
-
- await waitForApolloQueryAndRender();
-
- // errors are bound to the component
- expect(findMavenDuplicatedSettings().props('duplicateExceptionRegexError')).toBe(
- groupPackageSettingsMutationErrorMock.errors[0].extensions.problems[0].message,
- );
-
- // general error message is shown
-
- verifyAlert();
-
- emitSettingsUpdate();
-
- await wrapper.vm.$nextTick();
-
- // errors are reset on mutation call
- expect(findMavenDuplicatedSettings().props('duplicateExceptionRegexError')).toBe('');
});
- it.each`
- type | mutationResolver
- ${'local'} | ${jest.fn().mockResolvedValue(groupPackageSettingsMutationMock({ errors: ['foo'] }))}
- ${'network'} | ${jest.fn().mockRejectedValue()}
- `('mutation payload with $type error', async ({ mutationResolver }) => {
- mountComponent({ mutationResolver });
-
- await waitForApolloQueryAndRender();
-
- emitSettingsUpdate();
-
- await waitForPromises();
-
- verifyAlert();
+ it('alert has the right text', () => {
+ expect(findAlert().text()).toBe(errorMessage);
});
- it('a successful request dismisses the alert', async () => {
- mountComponent({ data: { alertMessage: 'foo' } });
-
- await waitForApolloQueryAndRender();
-
+ it('dismissing the alert removes it', async () => {
expect(findAlert().exists()).toBe(true);
- emitSettingsUpdate();
+ findAlert().vm.$emit('dismiss');
- await waitForPromises();
+ await nextTick();
expect(findAlert().exists()).toBe(false);
});
+ });
+ });
- it('dismiss event from alert dismiss it from the page', async () => {
- mountComponent({ data: { alertMessage: 'foo' } });
-
- await waitForApolloQueryAndRender();
-
- expect(findAlert().exists()).toBe(true);
-
- findAlert().vm.$emit('dismiss');
-
- await wrapper.vm.$nextTick();
+ describe('when the dependency proxy is not available', () => {
+ beforeEach(() => {
+ mountComponent({ provide: { ...defaultProvide, dependencyProxyAvailable: false } });
+ return waitForApolloQueryAndRender();
+ });
- expect(findAlert().exists()).toBe(false);
- });
+ it('the setting block is hidden', () => {
+ expect(findDependencyProxySettings().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js
new file mode 100644
index 00000000000..693af21e24a
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js
@@ -0,0 +1,277 @@
+import { GlSprintf, GlLink } from '@gitlab/ui';
+import { createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import DuplicatesSettings from '~/packages_and_registries/settings/group/components/duplicates_settings.vue';
+import GenericSettings from '~/packages_and_registries/settings/group/components/generic_settings.vue';
+import component from '~/packages_and_registries/settings/group/components/packages_settings.vue';
+import MavenSettings from '~/packages_and_registries/settings/group/components/maven_settings.vue';
+import {
+ PACKAGE_SETTINGS_HEADER,
+ PACKAGE_SETTINGS_DESCRIPTION,
+ PACKAGES_DOCS_PATH,
+} from '~/packages_and_registries/settings/group/constants';
+
+import updateNamespacePackageSettings from '~/packages_and_registries/settings/group/graphql/mutations/update_group_packages_settings.mutation.graphql';
+import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql';
+import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+import { updateGroupPackagesSettingsOptimisticResponse } from '~/packages_and_registries/settings/group/graphql/utils/optimistic_responses';
+import {
+ packageSettings,
+ groupPackageSettingsMock,
+ groupPackageSettingsMutationMock,
+ groupPackageSettingsMutationErrorMock,
+} from '../mock_data';
+
+jest.mock('~/flash');
+jest.mock('~/packages_and_registries/settings/group/graphql/utils/optimistic_responses');
+
+const localVue = createLocalVue();
+
+describe('Packages Settings', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const defaultProvide = {
+ defaultExpanded: false,
+ groupPath: 'foo_group_path',
+ };
+
+ const mountComponent = ({
+ mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock()),
+ } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[updateNamespacePackageSettings, mutationResolver]];
+
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMountExtended(component, {
+ localVue,
+ apolloProvider,
+ provide: defaultProvide,
+ propsData: {
+ packageSettings: packageSettings(),
+ },
+ stubs: {
+ GlSprintf,
+ SettingsBlock,
+ MavenSettings,
+ GenericSettings,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
+ const findDescription = () => wrapper.findByTestId('description');
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findMavenSettings = () => wrapper.findComponent(MavenSettings);
+ const findMavenDuplicatedSettings = () => findMavenSettings().findComponent(DuplicatesSettings);
+ const findGenericSettings = () => wrapper.findComponent(GenericSettings);
+ const findGenericDuplicatedSettings = () =>
+ findGenericSettings().findComponent(DuplicatesSettings);
+
+ const fillApolloCache = () => {
+ apolloProvider.defaultClient.cache.writeQuery({
+ query: getGroupPackagesSettingsQuery,
+ variables: {
+ fullPath: defaultProvide.groupPath,
+ },
+ ...groupPackageSettingsMock,
+ });
+ };
+
+ const emitMavenSettingsUpdate = (override) => {
+ findMavenDuplicatedSettings().vm.$emit('update', {
+ mavenDuplicateExceptionRegex: ')',
+ ...override,
+ });
+ };
+
+ it('renders a settings block', () => {
+ mountComponent();
+
+ expect(findSettingsBlock().exists()).toBe(true);
+ });
+
+ it('passes the correct props to settings block', () => {
+ mountComponent();
+
+ expect(findSettingsBlock().props('defaultExpanded')).toBe(false);
+ });
+
+ it('has the correct header text', () => {
+ mountComponent();
+
+ expect(wrapper.text()).toContain(PACKAGE_SETTINGS_HEADER);
+ });
+
+ it('has the correct description text', () => {
+ mountComponent();
+
+ expect(findDescription().text()).toMatchInterpolatedText(PACKAGE_SETTINGS_DESCRIPTION);
+ });
+
+ it('has the correct link', () => {
+ mountComponent();
+
+ expect(findLink().attributes()).toMatchObject({
+ href: PACKAGES_DOCS_PATH,
+ target: '_blank',
+ });
+ expect(findLink().text()).toBe('Learn more.');
+ });
+
+ describe('maven settings', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findMavenSettings().exists()).toBe(true);
+ });
+
+ it('assigns duplication allowness and exception props', async () => {
+ mountComponent();
+
+ const { mavenDuplicatesAllowed, mavenDuplicateExceptionRegex } = packageSettings();
+
+ expect(findMavenDuplicatedSettings().props()).toMatchObject({
+ duplicatesAllowed: mavenDuplicatesAllowed,
+ duplicateExceptionRegex: mavenDuplicateExceptionRegex,
+ duplicateExceptionRegexError: '',
+ loading: false,
+ });
+ });
+
+ it('on update event calls the mutation', () => {
+ const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock());
+ mountComponent({ mutationResolver });
+
+ fillApolloCache();
+
+ emitMavenSettingsUpdate();
+
+ expect(mutationResolver).toHaveBeenCalledWith({
+ input: { mavenDuplicateExceptionRegex: ')', namespacePath: 'foo_group_path' },
+ });
+ });
+ });
+
+ describe('generic settings', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findGenericSettings().exists()).toBe(true);
+ });
+
+ it('assigns duplication allowness and exception props', async () => {
+ mountComponent();
+
+ const { genericDuplicatesAllowed, genericDuplicateExceptionRegex } = packageSettings();
+
+ expect(findGenericDuplicatedSettings().props()).toMatchObject({
+ duplicatesAllowed: genericDuplicatesAllowed,
+ duplicateExceptionRegex: genericDuplicateExceptionRegex,
+ duplicateExceptionRegexError: '',
+ loading: false,
+ });
+ });
+
+ it('on update event calls the mutation', async () => {
+ const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock());
+ mountComponent({ mutationResolver });
+
+ fillApolloCache();
+
+ findMavenDuplicatedSettings().vm.$emit('update', {
+ genericDuplicateExceptionRegex: ')',
+ });
+
+ expect(mutationResolver).toHaveBeenCalledWith({
+ input: { genericDuplicateExceptionRegex: ')', namespacePath: 'foo_group_path' },
+ });
+ });
+ });
+
+ describe('settings update', () => {
+ describe('success state', () => {
+ it('emits a success event', async () => {
+ mountComponent();
+
+ fillApolloCache();
+ emitMavenSettingsUpdate();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('success')).toEqual([[]]);
+ });
+
+ it('has an optimistic response', () => {
+ const mavenDuplicateExceptionRegex = 'latest[main]something';
+ mountComponent();
+
+ fillApolloCache();
+
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegex')).toBe('');
+
+ emitMavenSettingsUpdate({ mavenDuplicateExceptionRegex });
+
+ expect(updateGroupPackagesSettingsOptimisticResponse).toHaveBeenCalledWith({
+ ...packageSettings(),
+ mavenDuplicateExceptionRegex,
+ });
+ });
+ });
+
+ describe('errors', () => {
+ it('mutation payload with root level errors', async () => {
+ // note this is a complex test that covers all the path around errors that are shown in the form
+ // it's one single it case, due to the expensive preparation and execution
+ const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationErrorMock);
+ mountComponent({ mutationResolver });
+
+ fillApolloCache();
+
+ emitMavenSettingsUpdate();
+
+ await waitForPromises();
+
+ // errors are bound to the component
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegexError')).toBe(
+ groupPackageSettingsMutationErrorMock.errors[0].extensions.problems[0].message,
+ );
+
+ // general error message is shown
+
+ expect(wrapper.emitted('error')).toEqual([[]]);
+
+ emitMavenSettingsUpdate();
+
+ await wrapper.vm.$nextTick();
+
+ // errors are reset on mutation call
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegexError')).toBe('');
+ });
+
+ it.each`
+ type | mutationResolver
+ ${'local'} | ${jest.fn().mockResolvedValue(groupPackageSettingsMutationMock({ errors: ['foo'] }))}
+ ${'network'} | ${jest.fn().mockRejectedValue()}
+ `('mutation payload with $type error', async ({ mutationResolver }) => {
+ mountComponent({ mutationResolver });
+
+ fillApolloCache();
+ emitMavenSettingsUpdate();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[]]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js b/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js
index 03133bf1158..9d8504a1124 100644
--- a/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js
@@ -4,14 +4,16 @@ import { updateGroupPackageSettings } from '~/packages_and_registries/settings/g
describe('Package and Registries settings group cache updates', () => {
let client;
- const payload = {
- data: {
- updateNamespacePackageSettings: {
- packageSettings: {
- mavenDuplicatesAllowed: false,
- mavenDuplicateExceptionRegex: 'latest[main]something',
- },
- },
+ const updateNamespacePackageSettingsPayload = {
+ packageSettings: {
+ mavenDuplicatesAllowed: false,
+ mavenDuplicateExceptionRegex: 'latest[main]something',
+ },
+ };
+
+ const updateDependencyProxySettingsPayload = {
+ dependencyProxySetting: {
+ enabled: false,
},
};
@@ -21,6 +23,9 @@ describe('Package and Registries settings group cache updates', () => {
mavenDuplicatesAllowed: true,
mavenDuplicateExceptionRegex: '',
},
+ dependencyProxySetting: {
+ enabled: true,
+ },
},
};
@@ -35,22 +40,35 @@ describe('Package and Registries settings group cache updates', () => {
writeQuery: jest.fn(),
};
});
- describe('updateGroupPackageSettings', () => {
- it('calls readQuery', () => {
- updateGroupPackageSettings('foo')(client, payload);
- expect(client.readQuery).toHaveBeenCalledWith(queryAndVariables);
- });
-
- it('writes the correct result in the cache', () => {
- updateGroupPackageSettings('foo')(client, payload);
- expect(client.writeQuery).toHaveBeenCalledWith({
- ...queryAndVariables,
- data: {
- group: {
- ...payload.data.updateNamespacePackageSettings,
+
+ describe.each`
+ updateNamespacePackageSettings | updateDependencyProxySettings
+ ${updateNamespacePackageSettingsPayload} | ${updateDependencyProxySettingsPayload}
+ ${undefined} | ${updateDependencyProxySettingsPayload}
+ ${updateNamespacePackageSettingsPayload} | ${undefined}
+ ${undefined} | ${undefined}
+ `(
+ 'updateGroupPackageSettings',
+ ({ updateNamespacePackageSettings, updateDependencyProxySettings }) => {
+ const payload = { data: { updateNamespacePackageSettings, updateDependencyProxySettings } };
+ it('calls readQuery', () => {
+ updateGroupPackageSettings('foo')(client, payload);
+ expect(client.readQuery).toHaveBeenCalledWith(queryAndVariables);
+ });
+
+ it('writes the correct result in the cache', () => {
+ updateGroupPackageSettings('foo')(client, payload);
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ ...queryAndVariables,
+ data: {
+ group: {
+ ...cacheMock.group,
+ ...payload.data.updateNamespacePackageSettings,
+ ...payload.data.updateDependencyProxySettings,
+ },
},
- },
+ });
});
- });
- });
+ },
+ );
});
diff --git a/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js b/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js
index a3c53d5768a..debeb9aa89c 100644
--- a/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js
@@ -1,4 +1,7 @@
-import { updateGroupPackagesSettingsOptimisticResponse } from '~/packages_and_registries/settings/group/graphql/utils/optimistic_responses';
+import {
+ updateGroupPackagesSettingsOptimisticResponse,
+ updateGroupDependencyProxySettingsOptimisticResponse,
+} from '~/packages_and_registries/settings/group/graphql/utils/optimistic_responses';
describe('Optimistic responses', () => {
describe('updateGroupPackagesSettingsOptimisticResponse', () => {
@@ -17,4 +20,22 @@ describe('Optimistic responses', () => {
`);
});
});
+
+ describe('updateGroupDependencyProxySettingsOptimisticResponse', () => {
+ it('returns the correct structure', () => {
+ expect(updateGroupDependencyProxySettingsOptimisticResponse({ foo: 'bar' }))
+ .toMatchInlineSnapshot(`
+ Object {
+ "__typename": "Mutation",
+ "updateDependencyProxySettings": Object {
+ "__typename": "UpdateDependencyProxySettingsPayload",
+ "dependencyProxySetting": Object {
+ "foo": "bar",
+ },
+ "errors": Array [],
+ },
+ }
+ `);
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/settings/group/mock_data.js b/spec/frontend/packages_and_registries/settings/group/mock_data.js
index 65119e288a1..81ba0795b7d 100644
--- a/spec/frontend/packages_and_registries/settings/group/mock_data.js
+++ b/spec/frontend/packages_and_registries/settings/group/mock_data.js
@@ -1,12 +1,20 @@
+export const packageSettings = () => ({
+ mavenDuplicatesAllowed: true,
+ mavenDuplicateExceptionRegex: '',
+ genericDuplicatesAllowed: true,
+ genericDuplicateExceptionRegex: '',
+});
+
+export const dependencyProxySettings = () => ({
+ enabled: true,
+});
+
export const groupPackageSettingsMock = {
data: {
group: {
- packageSettings: {
- mavenDuplicatesAllowed: true,
- mavenDuplicateExceptionRegex: '',
- genericDuplicatesAllowed: true,
- genericDuplicateExceptionRegex: '',
- },
+ fullPath: 'foo_group_path',
+ packageSettings: packageSettings(),
+ dependencyProxySetting: dependencyProxySettings(),
},
},
};
@@ -26,6 +34,16 @@ export const groupPackageSettingsMutationMock = (override) => ({
},
});
+export const dependencyProxySettingMutationMock = (override) => ({
+ data: {
+ updateDependencyProxySettings: {
+ dependencyProxySetting: dependencyProxySettings(),
+ errors: [],
+ ...override,
+ },
+ },
+});
+
export const groupPackageSettingsMutationErrorMock = {
errors: [
{
@@ -50,3 +68,23 @@ export const groupPackageSettingsMutationErrorMock = {
},
],
};
+export const dependencyProxySettingMutationErrorMock = {
+ errors: [
+ {
+ message: 'Some error',
+ locations: [{ line: 1, column: 41 }],
+ extensions: {
+ value: {
+ enabled: 'gitlab-org',
+ },
+ problems: [
+ {
+ path: ['enabled'],
+ explanation: 'explaination',
+ message: 'message',
+ },
+ ],
+ },
+ },
+ ],
+};
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
index cf554717127..2719e917a9b 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
@@ -100,6 +100,12 @@ Array [
"variable": 30,
},
Object {
+ "default": false,
+ "key": "SIXTY_DAYS",
+ "label": "60 days",
+ "variable": 60,
+ },
+ Object {
"default": true,
"key": "NINETY_DAYS",
"label": "90 days",
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
index 1009db46401..9938357ed24 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
@@ -44,7 +44,7 @@ exports[`Settings Form Keep Regex matches snapshot 1`] = `
exports[`Settings Form OlderThan matches snapshot 1`] = `
<expiration-dropdown-stub
data-testid="older-than-dropdown"
- formoptions="[object Object],[object Object],[object Object],[object Object],[object Object]"
+ formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Remove tags older than:"
name="older-than"
value="FOURTEEN_DAYS"
diff --git a/spec/frontend/pages/admin/projects/components/namespace_select_spec.js b/spec/frontend/pages/admin/projects/components/namespace_select_spec.js
new file mode 100644
index 00000000000..c579aa2f2da
--- /dev/null
+++ b/spec/frontend/pages/admin/projects/components/namespace_select_spec.js
@@ -0,0 +1,93 @@
+import { mount } from '@vue/test-utils';
+import Api from '~/api';
+import NamespaceSelect from '~/pages/admin/projects/components/namespace_select.vue';
+
+describe('Dropdown select component', () => {
+ let wrapper;
+
+ const mountDropdown = (propsData) => {
+ wrapper = mount(NamespaceSelect, { propsData });
+ };
+
+ const findDropdownToggle = () => wrapper.find('button.dropdown-toggle');
+ const findNamespaceInput = () => wrapper.find('[data-testid="hidden-input"]');
+ const findFilterInput = () => wrapper.find('.namespace-search-box input');
+ const findDropdownOption = (match) => {
+ const buttons = wrapper
+ .findAll('button.dropdown-item')
+ .filter((node) => node.text().match(match));
+ return buttons.length ? buttons.at(0) : buttons;
+ };
+
+ const setFieldValue = async (field, value) => {
+ await field.setValue(value);
+ field.trigger('blur');
+ };
+
+ beforeEach(() => {
+ setFixtures('<div class="test-container"></div>');
+
+ jest.spyOn(Api, 'namespaces').mockImplementation((_, callback) =>
+ callback([
+ { id: 10, kind: 'user', full_path: 'Administrator' },
+ { id: 20, kind: 'group', full_path: 'GitLab Org' },
+ ]),
+ );
+ });
+
+ it('creates a hidden input if fieldName is provided', () => {
+ mountDropdown({ fieldName: 'namespace-input' });
+
+ expect(findNamespaceInput()).toExist();
+ expect(findNamespaceInput().attributes('name')).toBe('namespace-input');
+ });
+
+ describe('clicking dropdown options', () => {
+ it('retrieves namespaces based on filter query', async () => {
+ mountDropdown();
+
+ await setFieldValue(findFilterInput(), 'test');
+
+ expect(Api.namespaces).toHaveBeenCalledWith('test', expect.anything());
+ });
+
+ it('updates the dropdown value based upon selection', async () => {
+ mountDropdown({ fieldName: 'namespace-input' });
+
+ // wait for dropdown options to populate
+ await wrapper.vm.$nextTick();
+
+ expect(findDropdownOption('user: Administrator')).toExist();
+ expect(findDropdownOption('group: GitLab Org')).toExist();
+ expect(findDropdownOption('group: Foobar')).not.toExist();
+
+ findDropdownOption('user: Administrator').trigger('click');
+ await wrapper.vm.$nextTick();
+
+ expect(findNamespaceInput().attributes('value')).toBe('10');
+ expect(findDropdownToggle().text()).toBe('user: Administrator');
+ });
+
+ it('triggers a setNamespace event upon selection', async () => {
+ mountDropdown();
+
+ // wait for dropdown options to populate
+ await wrapper.vm.$nextTick();
+
+ findDropdownOption('group: GitLab Org').trigger('click');
+
+ expect(wrapper.emitted('setNamespace')).toHaveLength(1);
+ expect(wrapper.emitted('setNamespace')[0][0]).toBe(20);
+ });
+
+ it('displays "Any Namespace" option when showAny prop provided', () => {
+ mountDropdown({ showAny: true });
+ expect(wrapper.text()).toContain('Any namespace');
+ });
+
+ it('does not display "Any Namespace" option when showAny prop not provided', () => {
+ mountDropdown();
+ expect(wrapper.text()).not.toContain('Any namespace');
+ });
+ });
+});
diff --git a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
new file mode 100644
index 00000000000..d6b394a42c6
--- /dev/null
+++ b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
@@ -0,0 +1,175 @@
+import { GlEmptyState, GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import PaginationBar from '~/import_entities/components/pagination_bar.vue';
+import BulkImportsHistoryApp from '~/pages/import/bulk_imports/history/components/bulk_imports_history_app.vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+
+describe('BulkImportsHistoryApp', () => {
+ const API_URL = '/api/v4/bulk_imports/entities';
+
+ const DEFAULT_HEADERS = {
+ 'x-page': 1,
+ 'x-per-page': 20,
+ 'x-next-page': 2,
+ 'x-total': 22,
+ 'x-total-pages': 2,
+ 'x-prev-page': null,
+ };
+ const DUMMY_RESPONSE = [
+ {
+ id: 1,
+ bulk_import_id: 1,
+ status: 'finished',
+ source_full_path: 'top-level-group-12',
+ destination_name: 'top-level-group-12',
+ destination_namespace: 'h5bp',
+ created_at: '2021-07-08T10:03:44.743Z',
+ failures: [],
+ },
+ {
+ id: 2,
+ bulk_import_id: 2,
+ status: 'failed',
+ source_full_path: 'autodevops-demo',
+ destination_name: 'autodevops-demo',
+ destination_namespace: 'flightjs',
+ parent_id: null,
+ namespace_id: null,
+ project_id: null,
+ created_at: '2021-07-13T12:52:26.664Z',
+ updated_at: '2021-07-13T13:34:49.403Z',
+ failures: [
+ {
+ pipeline_class: 'BulkImports::Groups::Pipelines::GroupPipeline',
+ pipeline_step: 'loader',
+ exception_class: 'ActiveRecord::RecordNotUnique',
+ correlation_id_value: '01FAFYSYZ7XPF3P9NSMTS693SZ',
+ created_at: '2021-07-13T13:34:49.344Z',
+ },
+ ],
+ },
+ ];
+
+ let wrapper;
+ let mock;
+
+ function createComponent({ shallow = true } = {}) {
+ const mountFn = shallow ? shallowMount : mount;
+ wrapper = mountFn(BulkImportsHistoryApp);
+ }
+
+ const originalApiVersion = gon.api_version;
+ beforeAll(() => {
+ gon.api_version = 'v4';
+ });
+
+ afterAll(() => {
+ gon.api_version = originalApiVersion;
+ });
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ describe('general behavior', () => {
+ it('renders loading state when loading', () => {
+ createComponent();
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('renders empty state when no data is available', async () => {
+ mock.onGet(API_URL).reply(200, [], DEFAULT_HEADERS);
+ createComponent();
+ await axios.waitForAll();
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(GlEmptyState).exists()).toBe(true);
+ });
+
+ it('renders table with data when history is available', async () => {
+ mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ createComponent();
+ await axios.waitForAll();
+
+ const table = wrapper.find(GlTable);
+ expect(table.exists()).toBe(true);
+ // can't use .props() or .attributes() here
+ expect(table.vm.$attrs.items).toHaveLength(DUMMY_RESPONSE.length);
+ });
+
+ it('changes page when requested by pagination bar', async () => {
+ const NEW_PAGE = 4;
+
+ mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ createComponent();
+ await axios.waitForAll();
+ mock.resetHistory();
+
+ wrapper.findComponent(PaginationBar).vm.$emit('set-page', NEW_PAGE);
+ await axios.waitForAll();
+
+ expect(mock.history.get.length).toBe(1);
+ expect(mock.history.get[0].params).toStrictEqual(expect.objectContaining({ page: NEW_PAGE }));
+ });
+ });
+
+ it('changes page size when requested by pagination bar', async () => {
+ const NEW_PAGE_SIZE = 4;
+
+ mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ createComponent();
+ await axios.waitForAll();
+ mock.resetHistory();
+
+ wrapper.findComponent(PaginationBar).vm.$emit('set-page-size', NEW_PAGE_SIZE);
+ await axios.waitForAll();
+
+ expect(mock.history.get.length).toBe(1);
+ expect(mock.history.get[0].params).toStrictEqual(
+ expect.objectContaining({ per_page: NEW_PAGE_SIZE }),
+ );
+ });
+
+ describe('details button', () => {
+ beforeEach(() => {
+ mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ createComponent({ shallow: false });
+ return axios.waitForAll();
+ });
+
+ it('renders details button if relevant item has failures', async () => {
+ expect(
+ extendedWrapper(wrapper.find('tbody').findAll('tr').at(1)).findByText('Details').exists(),
+ ).toBe(true);
+ });
+
+ it('does not render details button if relevant item has no failures', () => {
+ expect(
+ extendedWrapper(wrapper.find('tbody').findAll('tr').at(0)).findByText('Details').exists(),
+ ).toBe(false);
+ });
+
+ it('expands details when details button is clicked', async () => {
+ const ORIGINAL_ROW_INDEX = 1;
+ await extendedWrapper(wrapper.find('tbody').findAll('tr').at(ORIGINAL_ROW_INDEX))
+ .findByText('Details')
+ .trigger('click');
+
+ const detailsRowContent = wrapper
+ .find('tbody')
+ .findAll('tr')
+ .at(ORIGINAL_ROW_INDEX + 1)
+ .find('pre');
+
+ expect(detailsRowContent.exists()).toBe(true);
+ expect(JSON.parse(detailsRowContent.text())).toStrictEqual(DUMMY_RESPONSE[1].failures);
+ });
+ });
+});
diff --git a/spec/frontend/pages/profiles/password_prompt/password_prompt_modal_spec.js b/spec/frontend/pages/profiles/password_prompt/password_prompt_modal_spec.js
new file mode 100644
index 00000000000..b722ac1e97b
--- /dev/null
+++ b/spec/frontend/pages/profiles/password_prompt/password_prompt_modal_spec.js
@@ -0,0 +1,92 @@
+import { GlModal } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import {
+ I18N_PASSWORD_PROMPT_CANCEL_BUTTON,
+ I18N_PASSWORD_PROMPT_CONFIRM_BUTTON,
+} from '~/pages/profiles/password_prompt/constants';
+import PasswordPromptModal from '~/pages/profiles/password_prompt/password_prompt_modal.vue';
+
+const createComponent = ({ props }) => {
+ return shallowMountExtended(PasswordPromptModal, {
+ propsData: {
+ ...props,
+ },
+ });
+};
+
+describe('Password prompt modal', () => {
+ let wrapper;
+
+ const mockPassword = 'not+fake+shady+password';
+ const mockEvent = { preventDefault: jest.fn() };
+ const handleConfirmPasswordSpy = jest.fn();
+
+ const findField = () => wrapper.findByTestId('password-prompt-field');
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findConfirmBtn = () => findModal().props('actionPrimary');
+ const findConfirmBtnDisabledState = () =>
+ findModal().props('actionPrimary').attributes[2].disabled;
+
+ const findCancelBtn = () => findModal().props('actionCancel');
+
+ const submitModal = () => findModal().vm.$emit('primary', mockEvent);
+ const setPassword = (newPw) => findField().vm.$emit('input', newPw);
+
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: {
+ handleConfirmPassword: handleConfirmPasswordSpy,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the password field', () => {
+ expect(findField().exists()).toBe(true);
+ });
+
+ it('renders the confirm button', () => {
+ expect(findConfirmBtn().text).toEqual(I18N_PASSWORD_PROMPT_CONFIRM_BUTTON);
+ });
+
+ it('renders the cancel button', () => {
+ expect(findCancelBtn().text).toEqual(I18N_PASSWORD_PROMPT_CANCEL_BUTTON);
+ });
+
+ describe('confirm button', () => {
+ describe('with a valid password', () => {
+ it('calls the `handleConfirmPassword` method when clicked', async () => {
+ setPassword(mockPassword);
+ submitModal();
+
+ await wrapper.vm.$nextTick();
+
+ expect(handleConfirmPasswordSpy).toHaveBeenCalledTimes(1);
+ expect(handleConfirmPasswordSpy).toHaveBeenCalledWith(mockPassword);
+ });
+
+ it('enables the confirm button', async () => {
+ setPassword(mockPassword);
+
+ expect(findConfirmBtnDisabledState()).toBe(true);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findConfirmBtnDisabledState()).toBe(false);
+ });
+ });
+
+ it('without a valid password is disabled', async () => {
+ setPassword('');
+
+ expect(findConfirmBtnDisabledState()).toBe(true);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findConfirmBtnDisabledState()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
index 417567c9f4c..43361bb6f24 100644
--- a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
+++ b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
@@ -12,11 +12,11 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
<gl-dropdown-stub
category="primary"
clearalltext="Clear all"
+ clearalltextclass="gl-px-5"
headertext=""
hideheaderborder="true"
highlighteditemstitle="Selected"
highlighteditemstitleclass="gl-px-5"
- showhighlighteditemstitle="true"
size="medium"
text="rspec"
variant="default"
diff --git a/spec/frontend/pages/projects/new/components/new_project_url_select_spec.js b/spec/frontend/pages/projects/new/components/new_project_url_select_spec.js
deleted file mode 100644
index 8a7f9229503..00000000000
--- a/spec/frontend/pages/projects/new/components/new_project_url_select_spec.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import { GlButton, GlDropdown, GlDropdownItem, GlDropdownSectionHeader } from '@gitlab/ui';
-import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import NewProjectUrlSelect from '~/pages/projects/new/components/new_project_url_select.vue';
-import searchQuery from '~/pages/projects/new/queries/search_namespaces_where_user_can_create_projects.query.graphql';
-
-describe('NewProjectUrlSelect component', () => {
- let wrapper;
-
- const data = {
- currentUser: {
- groups: {
- nodes: [
- {
- id: 'gid://gitlab/Group/26',
- fullPath: 'flightjs',
- },
- {
- id: 'gid://gitlab/Group/28',
- fullPath: 'h5bp',
- },
- ],
- },
- namespace: {
- id: 'gid://gitlab/Namespace/1',
- fullPath: 'root',
- },
- },
- };
-
- const localVue = createLocalVue();
- localVue.use(VueApollo);
-
- const requestHandlers = [[searchQuery, jest.fn().mockResolvedValue({ data })]];
- const apolloProvider = createMockApollo(requestHandlers);
-
- const provide = {
- namespaceFullPath: 'h5bp',
- namespaceId: '28',
- rootUrl: 'https://gitlab.com/',
- trackLabel: 'blank_project',
- };
-
- const mountComponent = ({ mountFn = shallowMount } = {}) =>
- mountFn(NewProjectUrlSelect, { localVue, apolloProvider, provide });
-
- const findButtonLabel = () => wrapper.findComponent(GlButton);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findHiddenInput = () => wrapper.find('input');
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders the root url as a label', () => {
- wrapper = mountComponent();
-
- expect(findButtonLabel().text()).toBe(provide.rootUrl);
- expect(findButtonLabel().props('label')).toBe(true);
- });
-
- it('renders a dropdown with the initial namespace full path as the text', () => {
- wrapper = mountComponent();
-
- expect(findDropdown().props('text')).toBe(provide.namespaceFullPath);
- });
-
- it('renders a dropdown with the initial namespace id in the hidden input', () => {
- wrapper = mountComponent();
-
- expect(findHiddenInput().attributes('value')).toBe(provide.namespaceId);
- });
-
- it('renders expected dropdown items', async () => {
- wrapper = mountComponent({ mountFn: mount });
-
- jest.runOnlyPendingTimers();
- await wrapper.vm.$nextTick();
-
- const listItems = wrapper.findAll('li');
-
- expect(listItems.at(0).findComponent(GlDropdownSectionHeader).text()).toBe('Groups');
- expect(listItems.at(1).text()).toBe(data.currentUser.groups.nodes[0].fullPath);
- expect(listItems.at(2).text()).toBe(data.currentUser.groups.nodes[1].fullPath);
- expect(listItems.at(3).findComponent(GlDropdownSectionHeader).text()).toBe('Users');
- expect(listItems.at(4).text()).toBe(data.currentUser.namespace.fullPath);
- });
-
- it('updates hidden input with selected namespace', async () => {
- wrapper = mountComponent();
-
- jest.runOnlyPendingTimers();
- await wrapper.vm.$nextTick();
-
- wrapper.findComponent(GlDropdownItem).vm.$emit('click');
-
- await wrapper.vm.$nextTick();
-
- expect(findHiddenInput().attributes()).toMatchObject({
- name: 'project[namespace_id]',
- value: getIdFromGraphQLId(data.currentUser.groups.nodes[0].id).toString(),
- });
- });
-
- it('tracks clicking on the dropdown', () => {
- wrapper = mountComponent();
-
- const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
-
- findDropdown().vm.$emit('show');
-
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'activate_form_input', {
- label: provide.trackLabel,
- property: 'project_path',
- });
-
- unmockTracking();
- });
-});
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
index 2a3b07f95f2..53c1733eab9 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
@@ -10,7 +10,17 @@ describe('Timezone Dropdown', () => {
let $dropdownEl = null;
let $wrapper = null;
const tzListSel = '.dropdown-content ul li a.is-active';
- const tzDropdownToggleText = '.dropdown-toggle-text';
+
+ const initTimezoneDropdown = (options = {}) => {
+ // eslint-disable-next-line no-new
+ new TimezoneDropdown({
+ $inputEl,
+ $dropdownEl,
+ ...options,
+ });
+ };
+
+ const findDropdownToggleText = () => $wrapper.find('.dropdown-toggle-text');
describe('Initialize', () => {
describe('with dropdown already loaded', () => {
@@ -18,16 +28,13 @@ describe('Timezone Dropdown', () => {
loadFixtures('pipeline_schedules/edit.html');
$wrapper = $('.dropdown');
$inputEl = $('#schedule_cron_timezone');
+ $inputEl.val('');
$dropdownEl = $('.js-timezone-dropdown');
-
- // eslint-disable-next-line no-new
- new TimezoneDropdown({
- $inputEl,
- $dropdownEl,
- });
});
it('can take an $inputEl in the constructor', () => {
+ initTimezoneDropdown();
+
const tzStr = '[UTC + 5.5] Sri Jayawardenepura';
const tzValue = 'Asia/Colombo';
@@ -42,6 +49,8 @@ describe('Timezone Dropdown', () => {
});
it('will format data array of timezones into a list of offsets', () => {
+ initTimezoneDropdown();
+
const data = $dropdownEl.data('data');
const formatted = $wrapper.find(tzListSel).text();
@@ -50,10 +59,28 @@ describe('Timezone Dropdown', () => {
});
});
- it('will default the timezone to UTC', () => {
- const tz = $inputEl.val();
+ describe('when `allowEmpty` property is `false`', () => {
+ beforeEach(() => {
+ initTimezoneDropdown();
+ });
+
+ it('will default the timezone to UTC', () => {
+ const tz = $inputEl.val();
- expect(tz).toBe('UTC');
+ expect(tz).toBe('UTC');
+ });
+ });
+
+ describe('when `allowEmpty` property is `true`', () => {
+ beforeEach(() => {
+ initTimezoneDropdown({
+ allowEmpty: true,
+ });
+ });
+
+ it('will default the value of the input to an empty string', () => {
+ expect($inputEl.val()).toBe('');
+ });
});
});
@@ -68,23 +95,15 @@ describe('Timezone Dropdown', () => {
it('will populate the list of UTC offsets after the dropdown is loaded', () => {
expect($wrapper.find(tzListSel).length).toEqual(0);
- // eslint-disable-next-line no-new
- new TimezoneDropdown({
- $inputEl,
- $dropdownEl,
- });
+ initTimezoneDropdown();
expect($wrapper.find(tzListSel).length).toEqual($($dropdownEl).data('data').length);
});
it('will call a provided handler when a new timezone is selected', () => {
const onSelectTimezone = jest.fn();
- // eslint-disable-next-line no-new
- new TimezoneDropdown({
- $inputEl,
- $dropdownEl,
- onSelectTimezone,
- });
+
+ initTimezoneDropdown({ onSelectTimezone });
$wrapper.find(tzListSel).first().trigger('click');
@@ -94,24 +113,15 @@ describe('Timezone Dropdown', () => {
it('will correctly set the dropdown label if a timezone identifier is set on the inputEl', () => {
$inputEl.val('America/St_Johns');
- // eslint-disable-next-line no-new
- new TimezoneDropdown({
- $inputEl,
- $dropdownEl,
- displayFormat: (selectedItem) => formatTimezone(selectedItem),
- });
+ initTimezoneDropdown({ displayFormat: (selectedItem) => formatTimezone(selectedItem) });
- expect($wrapper.find(tzDropdownToggleText).html()).toEqual('[UTC - 2.5] Newfoundland');
+ expect(findDropdownToggleText().html()).toEqual('[UTC - 2.5] Newfoundland');
});
it('will call a provided `displayFormat` handler to format the dropdown value', () => {
const displayFormat = jest.fn();
- // eslint-disable-next-line no-new
- new TimezoneDropdown({
- $inputEl,
- $dropdownEl,
- displayFormat,
- });
+
+ initTimezoneDropdown({ displayFormat });
$wrapper.find(tzListSel).first().trigger('click');
diff --git a/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js b/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
index e39a3904613..a29db961452 100644
--- a/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
+++ b/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
@@ -44,7 +44,7 @@ describe('preserve_url_fragment', () => {
});
it('when "remember-me" is present', () => {
- $('.omniauth-btn')
+ $('.js-oauth-login')
.parent('form')
.attr('action', (i, href) => `${href}?remember_me=1`);
diff --git a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
index 85222f2ecbb..a43da4b0f19 100644
--- a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
+++ b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
@@ -112,11 +112,6 @@ describe('Pipeline Editor | Text editor component', () => {
it('configures editor with syntax highlight', () => {
expect(mockUse).toHaveBeenCalledTimes(1);
expect(mockRegisterCiSchema).toHaveBeenCalledTimes(1);
- expect(mockRegisterCiSchema).toHaveBeenCalledWith({
- projectNamespace: mockProjectNamespace,
- projectPath: mockProjectPath,
- ref: mockCommitSha,
- });
});
});
diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
index 753682d438b..44656b2b67d 100644
--- a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
+++ b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
@@ -5,22 +5,18 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineStatus, { i18n } from '~/pipeline_editor/components/header/pipeline_status.vue';
import getPipelineQuery from '~/pipeline_editor/graphql/queries/client/pipeline.graphql';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import PipelineEditorMiniGraph from '~/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
import { mockCommitSha, mockProjectPipeline, mockProjectFullPath } from '../../mock_data';
const localVue = createLocalVue();
localVue.use(VueApollo);
-const mockProvide = {
- projectFullPath: mockProjectFullPath,
-};
-
describe('Pipeline Status', () => {
let wrapper;
let mockApollo;
let mockPipelineQuery;
- const createComponentWithApollo = () => {
+ const createComponentWithApollo = (glFeatures = {}) => {
const handlers = [[getPipelineQuery, mockPipelineQuery]];
mockApollo = createMockApollo(handlers);
@@ -30,19 +26,23 @@ describe('Pipeline Status', () => {
propsData: {
commitSha: mockCommitSha,
},
- provide: mockProvide,
+ provide: {
+ glFeatures,
+ projectFullPath: mockProjectFullPath,
+ },
stubs: { GlLink, GlSprintf },
});
};
const findIcon = () => wrapper.findComponent(GlIcon);
- const findCiIcon = () => wrapper.findComponent(CiIcon);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findPipelineEditorMiniGraph = () => wrapper.findComponent(PipelineEditorMiniGraph);
const findPipelineId = () => wrapper.find('[data-testid="pipeline-id"]');
const findPipelineCommit = () => wrapper.find('[data-testid="pipeline-commit"]');
const findPipelineErrorMsg = () => wrapper.find('[data-testid="pipeline-error-msg"]');
const findPipelineLoadingMsg = () => wrapper.find('[data-testid="pipeline-loading-msg"]');
const findPipelineViewBtn = () => wrapper.find('[data-testid="pipeline-view-btn"]');
+ const findStatusIcon = () => wrapper.find('[data-testid="pipeline-status-icon"]');
beforeEach(() => {
mockPipelineQuery = jest.fn();
@@ -50,9 +50,7 @@ describe('Pipeline Status', () => {
afterEach(() => {
mockPipelineQuery.mockReset();
-
wrapper.destroy();
- wrapper = null;
});
describe('loading icon', () => {
@@ -73,13 +71,13 @@ describe('Pipeline Status', () => {
describe('when querying data', () => {
describe('when data is set', () => {
- beforeEach(async () => {
+ beforeEach(() => {
mockPipelineQuery.mockResolvedValue({
- data: { project: mockProjectPipeline },
+ data: { project: mockProjectPipeline() },
});
createComponentWithApollo();
- await waitForPromises();
+ waitForPromises();
});
it('query is called with correct variables', async () => {
@@ -91,20 +89,24 @@ describe('Pipeline Status', () => {
});
it('does not render error', () => {
- expect(findIcon().exists()).toBe(false);
+ expect(findPipelineErrorMsg().exists()).toBe(false);
});
it('renders pipeline data', () => {
const {
id,
detailedStatus: { detailsPath },
- } = mockProjectPipeline.pipeline;
+ } = mockProjectPipeline().pipeline;
- expect(findCiIcon().exists()).toBe(true);
+ expect(findStatusIcon().exists()).toBe(true);
expect(findPipelineId().text()).toBe(`#${id.match(/\d+/g)[0]}`);
expect(findPipelineCommit().text()).toBe(mockCommitSha);
expect(findPipelineViewBtn().attributes('href')).toBe(detailsPath);
});
+
+ it('does not render the pipeline mini graph', () => {
+ expect(findPipelineEditorMiniGraph().exists()).toBe(false);
+ });
});
describe('when data cannot be fetched', () => {
@@ -121,11 +123,26 @@ describe('Pipeline Status', () => {
});
it('does not render pipeline data', () => {
- expect(findCiIcon().exists()).toBe(false);
+ expect(findStatusIcon().exists()).toBe(false);
expect(findPipelineId().exists()).toBe(false);
expect(findPipelineCommit().exists()).toBe(false);
expect(findPipelineViewBtn().exists()).toBe(false);
});
});
});
+
+ describe('when feature flag for pipeline mini graph is enabled', () => {
+ beforeEach(() => {
+ mockPipelineQuery.mockResolvedValue({
+ data: { project: mockProjectPipeline() },
+ });
+
+ createComponentWithApollo({ pipelineEditorMiniGraph: true });
+ waitForPromises();
+ });
+
+ it('renders the pipeline mini graph', () => {
+ expect(findPipelineEditorMiniGraph().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js b/spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js
new file mode 100644
index 00000000000..3d7c3c839da
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/header/pipline_editor_mini_graph_spec.js
@@ -0,0 +1,42 @@
+import { shallowMount } from '@vue/test-utils';
+import PipelineEditorMiniGraph from '~/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
+import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mini_graph.vue';
+import { mockProjectPipeline } from '../../mock_data';
+
+describe('Pipeline Status', () => {
+ let wrapper;
+
+ const createComponent = ({ hasStages = true } = {}) => {
+ wrapper = shallowMount(PipelineEditorMiniGraph, {
+ propsData: {
+ pipeline: mockProjectPipeline({ hasStages }).pipeline,
+ },
+ });
+ };
+
+ const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when there are stages', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders pipeline mini graph', () => {
+ expect(findPipelineMiniGraph().exists()).toBe(true);
+ });
+ });
+
+ describe('when there are no stages', () => {
+ beforeEach(() => {
+ createComponent({ hasStages: false });
+ });
+
+ it('does not render pipeline mini graph', () => {
+ expect(findPipelineMiniGraph().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
index b019bae886c..8e0a73b6e7c 100644
--- a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
+++ b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
@@ -6,9 +6,6 @@ import PipelineEditorEmptyState from '~/pipeline_editor/components/ui/pipeline_e
describe('Pipeline editor empty state', () => {
let wrapper;
const defaultProvide = {
- glFeatures: {
- pipelineEditorEmptyStateAction: false,
- },
emptyStateIllustrationPath: 'my/svg/path',
};
@@ -51,24 +48,6 @@ describe('Pipeline editor empty state', () => {
expect(findFileNav().exists()).toBe(true);
});
- describe('with feature flag off', () => {
- it('does not renders a CTA button', () => {
- expect(findConfirmButton().exists()).toBe(false);
- });
- });
- });
-
- describe('with feature flag on', () => {
- beforeEach(() => {
- createComponent({
- provide: {
- glFeatures: {
- pipelineEditorEmptyStateAction: true,
- },
- },
- });
- });
-
it('renders a CTA button', () => {
expect(findConfirmButton().exists()).toBe(true);
expect(findConfirmButton().text()).toBe(wrapper.vm.$options.i18n.btnText);
diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js
index f2104f25324..0b0ff14486e 100644
--- a/spec/frontend/pipeline_editor/mock_data.js
+++ b/spec/frontend/pipeline_editor/mock_data.js
@@ -247,20 +247,47 @@ export const mockEmptySearchBranches = {
export const mockBranchPaginationLimit = 10;
export const mockTotalBranches = 20; // must be greater than mockBranchPaginationLimit to test pagination
-export const mockProjectPipeline = {
- pipeline: {
- commitPath: '/-/commit/aabbccdd',
- id: 'gid://gitlab/Ci::Pipeline/118',
- iid: '28',
- shortSha: mockCommitSha,
- status: 'SUCCESS',
- detailedStatus: {
- detailsPath: '/root/sample-ci-project/-/pipelines/118"',
- group: 'success',
- icon: 'status_success',
- text: 'passed',
+export const mockProjectPipeline = ({ hasStages = true } = {}) => {
+ const stages = hasStages
+ ? {
+ edges: [
+ {
+ node: {
+ id: 'gid://gitlab/Ci::Stage/605',
+ name: 'prepare',
+ status: 'success',
+ detailedStatus: {
+ detailsPath: '/root/sample-ci-project/-/pipelines/268#prepare',
+ group: 'success',
+ hasDetails: true,
+ icon: 'status_success',
+ id: 'success-605-605',
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
+ },
+ },
+ },
+ ],
+ }
+ : null;
+
+ return {
+ pipeline: {
+ commitPath: '/-/commit/aabbccdd',
+ id: 'gid://gitlab/Ci::Pipeline/118',
+ iid: '28',
+ shortSha: mockCommitSha,
+ status: 'SUCCESS',
+ detailedStatus: {
+ detailsPath: '/root/sample-ci-project/-/pipelines/118',
+ group: 'success',
+ icon: 'status_success',
+ text: 'passed',
+ },
+ stages,
},
- },
+ };
};
export const mockLintResponse = {
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index 393cad0546b..b6713319e69 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -22,7 +22,6 @@ import {
mockCiConfigPath,
mockCiConfigQueryResponse,
mockBlobContentQueryResponse,
- mockBlobContentQueryResponseEmptyCiFile,
mockBlobContentQueryResponseNoCiFile,
mockCiYml,
mockCommitSha,
@@ -43,9 +42,6 @@ const MockSourceEditor = {
const mockProvide = {
ciConfigPath: mockCiConfigPath,
defaultBranch: mockDefaultBranch,
- glFeatures: {
- pipelineEditorEmptyStateAction: false,
- },
projectFullPath: mockProjectFullPath,
};
@@ -221,37 +217,12 @@ describe('Pipeline editor app component', () => {
});
});
- describe('with an empty CI config file', () => {
- describe('with empty state feature flag on', () => {
- it('does not show the empty screen state', async () => {
- mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponseEmptyCiFile);
-
- await createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineEditorEmptyStateAction: true,
- },
- },
- });
-
- expect(findEmptyState().exists()).toBe(false);
- expect(findTextEditor().exists()).toBe(true);
- });
- });
- });
-
- describe('when landing on the empty state with feature flag on', () => {
- it('user can click on CTA button and see an empty editor', async () => {
+ describe('with no CI config setup', () => {
+ it('user can click on CTA button to get started', async () => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponseNoCiFile);
mockLatestCommitShaQuery.mockResolvedValue(mockEmptyCommitShaResults);
- await createComponentWithApollo({
- provide: {
- glFeatures: {
- pipelineEditorEmptyStateAction: true,
- },
- },
- });
+ await createComponentWithApollo();
expect(findEmptyState().exists()).toBe(true);
expect(findTextEditor().exists()).toBe(false);
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
index 7aba336b8e8..335049892ec 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -25,7 +25,6 @@ describe('Pipeline editor home wrapper', () => {
},
provide: {
glFeatures: {
- pipelineEditorDrawer: true,
...glFeatures,
},
},
@@ -94,12 +93,4 @@ describe('Pipeline editor home wrapper', () => {
expect(findCommitSection().exists()).toBe(true);
});
});
-
- describe('Pipeline drawer', () => {
- it('hides the drawer when the feature flag is off', () => {
- createComponent({ glFeatures: { pipelineEditorDrawer: false } });
-
- expect(findPipelineEditorDrawer().exists()).toBe(false);
- });
- });
});
diff --git a/spec/frontend/pipelines/components/pipelines_list/pipeline_mini_graph_spec.js b/spec/frontend/pipelines/components/pipelines_list/pipeline_mini_graph_spec.js
index 154828aff4b..1cb43c199aa 100644
--- a/spec/frontend/pipelines/components/pipelines_list/pipeline_mini_graph_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_list/pipeline_mini_graph_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
+import { pipelines } from 'test_fixtures/pipelines/pipelines.json';
import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mini_graph.vue';
import PipelineStage from '~/pipelines/components/pipelines_list/pipeline_stage.vue';
-const { pipelines } = getJSONFixture('pipelines/pipelines.json');
const mockStages = pipelines[0].details.stages;
describe('Pipeline Mini Graph', () => {
diff --git a/spec/frontend/pipelines/components/pipelines_list/pipieline_stop_modal_spec.js b/spec/frontend/pipelines/components/pipelines_list/pipieline_stop_modal_spec.js
new file mode 100644
index 00000000000..249126390f1
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipelines_list/pipieline_stop_modal_spec.js
@@ -0,0 +1,27 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlSprintf } from '@gitlab/ui';
+import PipelineStopModal from '~/pipelines/components/pipelines_list/pipeline_stop_modal.vue';
+import { mockPipelineHeader } from '../../mock_data';
+
+describe('PipelineStopModal', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineStopModal, {
+ propsData: {
+ pipeline: mockPipelineHeader,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should render "stop pipeline" warning', () => {
+ expect(wrapper.text()).toMatch(`You’re about to stop pipeline #${mockPipelineHeader.id}.`);
+ });
+});
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index cbc5d11403e..06f1fa4c827 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -205,4 +205,64 @@ describe('pipeline graph job item', () => {
},
);
});
+
+ describe('job classes', () => {
+ it('job class is shown', () => {
+ createWrapper({
+ job: mockJob,
+ cssClassJobName: 'my-class',
+ });
+
+ expect(wrapper.find('a').classes()).toContain('my-class');
+
+ expect(wrapper.find('a').classes()).not.toContain(triggerActiveClass);
+ });
+
+ it('job class is shown, along with hover', () => {
+ createWrapper({
+ job: mockJob,
+ cssClassJobName: 'my-class',
+ sourceJobHovered: mockJob.name,
+ });
+
+ expect(wrapper.find('a').classes()).toContain('my-class');
+ expect(wrapper.find('a').classes()).toContain(triggerActiveClass);
+ });
+
+ it('multiple job classes are shown', () => {
+ createWrapper({
+ job: mockJob,
+ cssClassJobName: ['my-class-1', 'my-class-2'],
+ });
+
+ expect(wrapper.find('a').classes()).toContain('my-class-1');
+ expect(wrapper.find('a').classes()).toContain('my-class-2');
+
+ expect(wrapper.find('a').classes()).not.toContain(triggerActiveClass);
+ });
+
+ it('multiple job classes are shown conditionally', () => {
+ createWrapper({
+ job: mockJob,
+ cssClassJobName: { 'my-class-1': true, 'my-class-2': true },
+ });
+
+ expect(wrapper.find('a').classes()).toContain('my-class-1');
+ expect(wrapper.find('a').classes()).toContain('my-class-2');
+
+ expect(wrapper.find('a').classes()).not.toContain(triggerActiveClass);
+ });
+
+ it('multiple job classes are shown, along with a hover', () => {
+ createWrapper({
+ job: mockJob,
+ cssClassJobName: ['my-class-1', 'my-class-2'],
+ sourceJobHovered: mockJob.name,
+ });
+
+ expect(wrapper.find('a').classes()).toContain('my-class-1');
+ expect(wrapper.find('a').classes()).toContain('my-class-2');
+ expect(wrapper.find('a').classes()).toContain(triggerActiveClass);
+ });
+ });
});
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
index a606595b37d..e24d2e51f08 100644
--- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js
+++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
@@ -95,7 +95,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
createComponent({ mockData: { artifacts } });
expect(findFirstArtifactItem().attributes('href')).toBe(artifacts[0].path);
- expect(findFirstArtifactItem().text()).toBe(`Download ${artifacts[0].name} artifact`);
+ expect(findFirstArtifactItem().text()).toBe(artifacts[0].name);
});
it('should render empty message when no artifacts are found', () => {
diff --git a/spec/frontend/pipelines/pipelines_artifacts_spec.js b/spec/frontend/pipelines/pipelines_artifacts_spec.js
index 336255768d7..f33c66dedf3 100644
--- a/spec/frontend/pipelines/pipelines_artifacts_spec.js
+++ b/spec/frontend/pipelines/pipelines_artifacts_spec.js
@@ -87,8 +87,7 @@ describe('Pipelines Artifacts dropdown', () => {
createComponent({ mockData: { artifacts } });
expect(findFirstGlDropdownItem().attributes('href')).toBe(artifacts[0].path);
-
- expect(findFirstGlDropdownItem().text()).toBe(`Download ${artifacts[0].name} artifact`);
+ expect(findFirstGlDropdownItem().text()).toBe(artifacts[0].name);
});
describe('with a failing request', () => {
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index aa30062c987..2875498bb52 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -4,6 +4,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { chunk } from 'lodash';
import { nextTick } from 'vue';
+import mockPipelinesResponse from 'test_fixtures/pipelines/pipelines.json';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -33,7 +34,6 @@ jest.mock('~/experimentation/utils', () => ({
const mockProjectPath = 'twitter/flight';
const mockProjectId = '21';
const mockPipelinesEndpoint = `/${mockProjectPath}/pipelines.json`;
-const mockPipelinesResponse = getJSONFixture('pipelines/pipelines.json');
const mockPipelinesIds = mockPipelinesResponse.pipelines.map(({ id }) => id);
const mockPipelineWithStages = mockPipelinesResponse.pipelines.find(
(p) => p.details.stages && p.details.stages.length,
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index 4472a5ae70d..fb019b463b1 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -1,6 +1,7 @@
import '~/commons';
import { GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import fixture from 'test_fixtures/pipelines/pipelines.json';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mini_graph.vue';
import PipelineOperations from '~/pipelines/components/pipelines_list/pipeline_operations.vue';
@@ -20,8 +21,6 @@ describe('Pipelines Table', () => {
let pipeline;
let wrapper;
- const jsonFixtureName = 'pipelines/pipelines.json';
-
const defaultProps = {
pipelines: [],
viewType: 'root',
@@ -29,7 +28,8 @@ describe('Pipelines Table', () => {
};
const createMockPipeline = () => {
- const { pipelines } = getJSONFixture(jsonFixtureName);
+ // Clone fixture as it could be modified by tests
+ const { pipelines } = JSON.parse(JSON.stringify(fixture));
return pipelines.find((p) => p.user !== null && p.commit !== null);
};
diff --git a/spec/frontend/pipelines/test_reports/stores/actions_spec.js b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
index e931ddb8496..84a9f4776b9 100644
--- a/spec/frontend/pipelines/test_reports/stores/actions_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
@@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
-import { getJSONFixture } from 'helpers/fixtures';
+import testReports from 'test_fixtures/pipelines/test_report.json';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
import createFlash from '~/flash';
@@ -13,7 +13,6 @@ describe('Actions TestReports Store', () => {
let mock;
let state;
- const testReports = getJSONFixture('pipelines/test_report.json');
const summary = { total_count: 1 };
const suiteEndpoint = `${TEST_HOST}/tests/suite.json`;
diff --git a/spec/frontend/pipelines/test_reports/stores/getters_spec.js b/spec/frontend/pipelines/test_reports/stores/getters_spec.js
index f8298fdaba5..70e3a01dbf1 100644
--- a/spec/frontend/pipelines/test_reports/stores/getters_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/getters_spec.js
@@ -1,4 +1,4 @@
-import { getJSONFixture } from 'helpers/fixtures';
+import testReports from 'test_fixtures/pipelines/test_report.json';
import * as getters from '~/pipelines/stores/test_reports/getters';
import {
iconForTestStatus,
@@ -9,8 +9,6 @@ import {
describe('Getters TestReports Store', () => {
let state;
- const testReports = getJSONFixture('pipelines/test_report.json');
-
const defaultState = {
blobPath: '/test/blob/path',
testReports,
diff --git a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
index 191e9e7391c..f2dbeec6a06 100644
--- a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
@@ -1,12 +1,10 @@
-import { getJSONFixture } from 'helpers/fixtures';
+import testReports from 'test_fixtures/pipelines/test_report.json';
import * as types from '~/pipelines/stores/test_reports/mutation_types';
import mutations from '~/pipelines/stores/test_reports/mutations';
describe('Mutations TestReports Store', () => {
let mockState;
- const testReports = getJSONFixture('pipelines/test_report.json');
-
const defaultState = {
endpoint: '',
testReports: {},
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/pipelines/test_reports/test_reports_spec.js
index e44d59ba888..384b7cf6930 100644
--- a/spec/frontend/pipelines/test_reports/test_reports_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_reports_spec.js
@@ -1,7 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { getJSONFixture } from 'helpers/fixtures';
+import testReports from 'test_fixtures/pipelines/test_report.json';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import EmptyState from '~/pipelines/components/test_reports/empty_state.vue';
import TestReports from '~/pipelines/components/test_reports/test_reports.vue';
@@ -16,8 +16,6 @@ describe('Test reports app', () => {
let wrapper;
let store;
- const testReports = getJSONFixture('pipelines/test_report.json');
-
const loadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
const testsDetail = () => wrapper.findByTestId('tests-detail');
const emptyState = () => wrapper.findComponent(EmptyState);
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index a87145cc557..793bad6b82a 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -1,7 +1,7 @@
import { GlButton, GlFriendlyWrap, GlLink, GlPagination } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { getJSONFixture } from 'helpers/fixtures';
+import testReports from 'test_fixtures/pipelines/test_report.json';
import SuiteTable from '~/pipelines/components/test_reports/test_suite_table.vue';
import { TestStatus } from '~/pipelines/constants';
import * as getters from '~/pipelines/stores/test_reports/getters';
@@ -17,7 +17,7 @@ describe('Test reports suite table', () => {
const {
test_suites: [testSuite],
- } = getJSONFixture('pipelines/test_report.json');
+ } = testReports;
testSuite.test_cases = [...testSuite.test_cases, ...skippedTestCases];
const testCases = testSuite.test_cases;
diff --git a/spec/frontend/pipelines/test_reports/test_summary_spec.js b/spec/frontend/pipelines/test_reports/test_summary_spec.js
index df404d87c99..7eed6671fb9 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_summary_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { getJSONFixture } from 'helpers/fixtures';
+import testReports from 'test_fixtures/pipelines/test_report.json';
import Summary from '~/pipelines/components/test_reports/test_summary.vue';
import { formattedTime } from '~/pipelines/stores/test_reports/utils';
@@ -8,7 +8,7 @@ describe('Test reports summary', () => {
const {
test_suites: [testSuite],
- } = getJSONFixture('pipelines/test_report.json');
+ } = testReports;
const backButton = () => wrapper.find('.js-back-button');
const totalTests = () => wrapper.find('.js-total-tests');
diff --git a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
index 892a3742fea..0813739d72f 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
@@ -1,6 +1,6 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { getJSONFixture } from 'helpers/fixtures';
+import testReports from 'test_fixtures/pipelines/test_report.json';
import SummaryTable from '~/pipelines/components/test_reports/test_summary_table.vue';
import * as getters from '~/pipelines/stores/test_reports/getters';
@@ -11,8 +11,6 @@ describe('Test reports summary table', () => {
let wrapper;
let store;
- const testReports = getJSONFixture('pipelines/test_report.json');
-
const allSuitesRows = () => wrapper.findAll('.js-suite-row');
const noSuitesToShow = () => wrapper.find('.js-no-tests-suites');
diff --git a/spec/frontend/pages/projects/new/components/app_spec.js b/spec/frontend/projects/new/components/app_spec.js
index ab8c6d529a8..f6edbab3cca 100644
--- a/spec/frontend/pages/projects/new/components/app_spec.js
+++ b/spec/frontend/projects/new/components/app_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import App from '~/pages/projects/new/components/app.vue';
+import App from '~/projects/new/components/app.vue';
import NewNamespacePage from '~/vue_shared/new_namespace/new_namespace_page.vue';
describe('Experimental new project creation app', () => {
diff --git a/spec/frontend/pages/projects/new/components/new_project_push_tip_popover_spec.js b/spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js
index d4cf8c78600..31ddbc80ae4 100644
--- a/spec/frontend/pages/projects/new/components/new_project_push_tip_popover_spec.js
+++ b/spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js
@@ -1,6 +1,6 @@
import { GlPopover, GlFormInputGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import NewProjectPushTipPopover from '~/pages/projects/new/components/new_project_push_tip_popover.vue';
+import NewProjectPushTipPopover from '~/projects/new/components/new_project_push_tip_popover.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('New project push tip popover', () => {
diff --git a/spec/frontend/projects/new/components/new_project_url_select_spec.js b/spec/frontend/projects/new/components/new_project_url_select_spec.js
new file mode 100644
index 00000000000..aa16b71172b
--- /dev/null
+++ b/spec/frontend/projects/new/components/new_project_url_select_spec.js
@@ -0,0 +1,235 @@
+import {
+ GlButton,
+ GlDropdown,
+ GlDropdownItem,
+ GlDropdownSectionHeader,
+ GlSearchBoxByType,
+} from '@gitlab/ui';
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import eventHub from '~/projects/new/event_hub';
+import NewProjectUrlSelect from '~/projects/new/components/new_project_url_select.vue';
+import searchQuery from '~/projects/new/queries/search_namespaces_where_user_can_create_projects.query.graphql';
+
+describe('NewProjectUrlSelect component', () => {
+ let wrapper;
+
+ const data = {
+ currentUser: {
+ groups: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Group/26',
+ fullPath: 'flightjs',
+ },
+ {
+ id: 'gid://gitlab/Group/28',
+ fullPath: 'h5bp',
+ },
+ {
+ id: 'gid://gitlab/Group/30',
+ fullPath: 'h5bp/subgroup',
+ },
+ ],
+ },
+ namespace: {
+ id: 'gid://gitlab/Namespace/1',
+ fullPath: 'root',
+ },
+ },
+ };
+
+ const localVue = createLocalVue();
+ localVue.use(VueApollo);
+
+ const defaultProvide = {
+ namespaceFullPath: 'h5bp',
+ namespaceId: '28',
+ rootUrl: 'https://gitlab.com/',
+ trackLabel: 'blank_project',
+ userNamespaceFullPath: 'root',
+ userNamespaceId: '1',
+ };
+
+ const mountComponent = ({
+ search = '',
+ queryResponse = data,
+ provide = defaultProvide,
+ mountFn = shallowMount,
+ } = {}) => {
+ const requestHandlers = [[searchQuery, jest.fn().mockResolvedValue({ data: queryResponse })]];
+ const apolloProvider = createMockApollo(requestHandlers);
+
+ return mountFn(NewProjectUrlSelect, {
+ localVue,
+ apolloProvider,
+ provide,
+ data() {
+ return {
+ search,
+ };
+ },
+ });
+ };
+
+ const findButtonLabel = () => wrapper.findComponent(GlButton);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findInput = () => wrapper.findComponent(GlSearchBoxByType);
+ const findHiddenInput = () => wrapper.find('input');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the root url as a label', () => {
+ wrapper = mountComponent();
+
+ expect(findButtonLabel().text()).toBe(defaultProvide.rootUrl);
+ expect(findButtonLabel().props('label')).toBe(true);
+ });
+
+ describe('when namespaceId is provided', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('renders a dropdown with the given namespace full path as the text', () => {
+ expect(findDropdown().props('text')).toBe(defaultProvide.namespaceFullPath);
+ });
+
+ it('renders a dropdown with the given namespace id in the hidden input', () => {
+ expect(findHiddenInput().attributes('value')).toBe(defaultProvide.namespaceId);
+ });
+ });
+
+ describe('when namespaceId is not provided', () => {
+ const provide = {
+ ...defaultProvide,
+ namespaceFullPath: undefined,
+ namespaceId: undefined,
+ };
+
+ beforeEach(() => {
+ wrapper = mountComponent({ provide });
+ });
+
+ it("renders a dropdown with the user's namespace full path as the text", () => {
+ expect(findDropdown().props('text')).toBe(defaultProvide.userNamespaceFullPath);
+ });
+
+ it("renders a dropdown with the user's namespace id in the hidden input", () => {
+ expect(findHiddenInput().attributes('value')).toBe(defaultProvide.userNamespaceId);
+ });
+ });
+
+ it('focuses on the input when the dropdown is opened', async () => {
+ wrapper = mountComponent({ mountFn: mount });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ const spy = jest.spyOn(findInput().vm, 'focusInput');
+
+ findDropdown().vm.$emit('shown');
+
+ expect(spy).toHaveBeenCalledTimes(1);
+ });
+
+ it('renders expected dropdown items', async () => {
+ wrapper = mountComponent({ mountFn: mount });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ const listItems = wrapper.findAll('li');
+
+ expect(listItems).toHaveLength(6);
+ expect(listItems.at(0).findComponent(GlDropdownSectionHeader).text()).toBe('Groups');
+ expect(listItems.at(1).text()).toBe(data.currentUser.groups.nodes[0].fullPath);
+ expect(listItems.at(2).text()).toBe(data.currentUser.groups.nodes[1].fullPath);
+ expect(listItems.at(3).text()).toBe(data.currentUser.groups.nodes[2].fullPath);
+ expect(listItems.at(4).findComponent(GlDropdownSectionHeader).text()).toBe('Users');
+ expect(listItems.at(5).text()).toBe(data.currentUser.namespace.fullPath);
+ });
+
+ describe('when selecting from a group template', () => {
+ const groupId = getIdFromGraphQLId(data.currentUser.groups.nodes[1].id);
+
+ beforeEach(async () => {
+ wrapper = mountComponent({ mountFn: mount });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ eventHub.$emit('select-template', groupId);
+ });
+
+ it('filters the dropdown items to the selected group and children', async () => {
+ const listItems = wrapper.findAll('li');
+
+ expect(listItems).toHaveLength(3);
+ expect(listItems.at(0).findComponent(GlDropdownSectionHeader).text()).toBe('Groups');
+ expect(listItems.at(1).text()).toBe(data.currentUser.groups.nodes[1].fullPath);
+ expect(listItems.at(2).text()).toBe(data.currentUser.groups.nodes[2].fullPath);
+ });
+
+ it('sets the selection to the group', async () => {
+ expect(findDropdown().props('text')).toBe(data.currentUser.groups.nodes[1].fullPath);
+ });
+ });
+
+ it('renders `No matches found` when there are no matching dropdown items', async () => {
+ const queryResponse = {
+ currentUser: {
+ groups: {
+ nodes: [],
+ },
+ namespace: {
+ id: 'gid://gitlab/Namespace/1',
+ fullPath: 'root',
+ },
+ },
+ };
+
+ wrapper = mountComponent({ search: 'no matches', queryResponse, mountFn: mount });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('li').text()).toBe('No matches found');
+ });
+
+ it('updates hidden input with selected namespace', async () => {
+ wrapper = mountComponent();
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ wrapper.findComponent(GlDropdownItem).vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findHiddenInput().attributes()).toMatchObject({
+ name: 'project[namespace_id]',
+ value: getIdFromGraphQLId(data.currentUser.groups.nodes[0].id).toString(),
+ });
+ });
+
+ it('tracks clicking on the dropdown', () => {
+ wrapper = mountComponent();
+
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ findDropdown().vm.$emit('show');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'activate_form_input', {
+ label: defaultProvide.trackLabel,
+ property: 'project_path',
+ });
+
+ unmockTracking();
+ });
+});
diff --git a/spec/frontend/projects/projects_filterable_list_spec.js b/spec/frontend/projects/projects_filterable_list_spec.js
index 377d347623a..d4dbf85b5ca 100644
--- a/spec/frontend/projects/projects_filterable_list_spec.js
+++ b/spec/frontend/projects/projects_filterable_list_spec.js
@@ -1,3 +1,4 @@
+// eslint-disable-next-line import/no-deprecated
import { getJSONFixture, setHTMLFixture } from 'helpers/fixtures';
import ProjectsFilterableList from '~/projects/projects_filterable_list';
@@ -14,6 +15,7 @@ describe('ProjectsFilterableList', () => {
</div>
<div class="js-projects-list-holder"></div>
`);
+ // eslint-disable-next-line import/no-deprecated
getJSONFixture('static/projects.json');
form = document.querySelector('form#project-filter-form');
filter = document.querySelector('.js-projects-list-filter');
diff --git a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
new file mode 100644
index 00000000000..a42891423cd
--- /dev/null
+++ b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
@@ -0,0 +1,345 @@
+import {
+ GlSprintf,
+ GlDropdown,
+ GlDropdownItem,
+ GlDropdownSectionHeader,
+ GlSearchBoxByType,
+} from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { getUsers, getGroups, getDeployKeys } from '~/projects/settings/api/access_dropdown_api';
+import AccessDropdown, { i18n } from '~/projects/settings/components/access_dropdown.vue';
+import { ACCESS_LEVELS, LEVEL_TYPES } from '~/projects/settings/constants';
+
+jest.mock('~/projects/settings/api/access_dropdown_api', () => ({
+ getGroups: jest.fn().mockResolvedValue({
+ data: [
+ { id: 4, name: 'group4' },
+ { id: 5, name: 'group5' },
+ { id: 6, name: 'group6' },
+ ],
+ }),
+ getUsers: jest.fn().mockResolvedValue({
+ data: [
+ { id: 7, name: 'user7' },
+ { id: 8, name: 'user8' },
+ { id: 9, name: 'user9' },
+ ],
+ }),
+ getDeployKeys: jest.fn().mockResolvedValue({
+ data: [
+ { id: 10, title: 'key10', fingerprint: 'abcdefghijklmnop', owner: { name: 'user1' } },
+ { id: 11, title: 'key11', fingerprint: 'abcdefghijklmnop', owner: { name: 'user2' } },
+ { id: 12, title: 'key12', fingerprint: 'abcdefghijklmnop', owner: { name: 'user3' } },
+ ],
+ }),
+}));
+
+describe('Access Level Dropdown', () => {
+ let wrapper;
+ const mockAccessLevelsData = [
+ {
+ id: 1,
+ text: 'role1',
+ },
+ {
+ id: 2,
+ text: 'role2',
+ },
+ {
+ id: 3,
+ text: 'role3',
+ },
+ ];
+
+ const createComponent = ({
+ accessLevelsData = mockAccessLevelsData,
+ accessLevel = ACCESS_LEVELS.PUSH,
+ hasLicense,
+ label,
+ disabled,
+ preselectedItems,
+ } = {}) => {
+ wrapper = shallowMountExtended(AccessDropdown, {
+ propsData: {
+ accessLevelsData,
+ accessLevel,
+ hasLicense,
+ label,
+ disabled,
+ preselectedItems,
+ },
+ stubs: {
+ GlSprintf,
+ GlDropdown,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownToggleLabel = () => findDropdown().props('text');
+ const findAllDropdownItems = () => findDropdown().findAllComponents(GlDropdownItem);
+ const findAllDropdownHeaders = () => findDropdown().findAllComponents(GlDropdownSectionHeader);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+
+ const findDropdownItemWithText = (items, text) =>
+ items.filter((item) => item.text().includes(text)).at(0);
+
+ describe('data request', () => {
+ it('should make an api call for users, groups && deployKeys when user has a license', () => {
+ createComponent();
+ expect(getUsers).toHaveBeenCalled();
+ expect(getGroups).toHaveBeenCalled();
+ expect(getDeployKeys).toHaveBeenCalled();
+ });
+
+ it('should make an api call for deployKeys but not for users or groups when user does not have a license', () => {
+ createComponent({ hasLicense: false });
+ expect(getUsers).not.toHaveBeenCalled();
+ expect(getGroups).not.toHaveBeenCalled();
+ expect(getDeployKeys).toHaveBeenCalled();
+ });
+
+ it('should make api calls when search query is updated', async () => {
+ createComponent();
+ const query = 'root';
+
+ findSearchBox().vm.$emit('input', query);
+ await nextTick();
+ expect(getUsers).toHaveBeenCalledWith(query);
+ expect(getGroups).toHaveBeenCalled();
+ expect(getDeployKeys).toHaveBeenCalledWith(query);
+ });
+ });
+
+ describe('layout', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('renders headers for each section ', () => {
+ expect(findAllDropdownHeaders()).toHaveLength(4);
+ });
+
+ it('renders dropdown item for each access level type', () => {
+ expect(findAllDropdownItems()).toHaveLength(12);
+ });
+ });
+
+ describe('toggleLabel', () => {
+ let dropdownItems = [];
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ dropdownItems = findAllDropdownItems();
+ });
+
+ const findItemByNameAndClick = async (name) => {
+ findDropdownItemWithText(dropdownItems, name).trigger('click');
+ await nextTick();
+ };
+
+ it('when no items selected and custom label provided, displays it and has default CSS class', () => {
+ wrapper.destroy();
+ const customLabel = 'Set the access level';
+ createComponent({ label: customLabel });
+ expect(findDropdownToggleLabel()).toBe(customLabel);
+ expect(findDropdown().props('toggleClass')).toBe('gl-text-gray-500!');
+ });
+
+ it('when no items selected, displays a default fallback label and has default CSS class ', () => {
+ expect(findDropdownToggleLabel()).toBe(i18n.selectUsers);
+ expect(findDropdown().props('toggleClass')).toBe('gl-text-gray-500!');
+ });
+
+ it('displays a number of selected items for each group level', async () => {
+ dropdownItems.wrappers.forEach((item) => {
+ item.trigger('click');
+ });
+ await nextTick();
+ expect(findDropdownToggleLabel()).toBe('3 roles, 3 users, 3 deploy keys, 3 groups');
+ });
+
+ it('with only role selected displays the role name and has no class applied', async () => {
+ await findItemByNameAndClick('role1');
+ expect(findDropdownToggleLabel()).toBe('role1');
+ expect(findDropdown().props('toggleClass')).toBe('');
+ });
+
+ it('with only groups selected displays the number of selected groups', async () => {
+ await findItemByNameAndClick('group4');
+ await findItemByNameAndClick('group5');
+ await findItemByNameAndClick('group6');
+ expect(findDropdownToggleLabel()).toBe('3 groups');
+ expect(findDropdown().props('toggleClass')).toBe('');
+ });
+
+ it('with only users selected displays the number of selected users', async () => {
+ await findItemByNameAndClick('user7');
+ await findItemByNameAndClick('user8');
+ expect(findDropdownToggleLabel()).toBe('2 users');
+ expect(findDropdown().props('toggleClass')).toBe('');
+ });
+
+ it('with users and groups selected displays the number of selected users & groups', async () => {
+ await findItemByNameAndClick('group4');
+ await findItemByNameAndClick('group6');
+ await findItemByNameAndClick('user7');
+ await findItemByNameAndClick('user9');
+ expect(findDropdownToggleLabel()).toBe('2 users, 2 groups');
+ expect(findDropdown().props('toggleClass')).toBe('');
+ });
+
+ it('with users and deploy keys selected displays the number of selected users & keys', async () => {
+ await findItemByNameAndClick('user8');
+ await findItemByNameAndClick('key10');
+ await findItemByNameAndClick('key11');
+ expect(findDropdownToggleLabel()).toBe('1 user, 2 deploy keys');
+ expect(findDropdown().props('toggleClass')).toBe('');
+ });
+ });
+
+ describe('selecting an item', () => {
+ it('selects the item on click and deselects on the next click ', async () => {
+ createComponent();
+ await waitForPromises();
+
+ const item = findAllDropdownItems().at(1);
+ item.trigger('click');
+ await nextTick();
+ expect(item.props('isChecked')).toBe(true);
+ item.trigger('click');
+ await nextTick();
+ expect(item.props('isChecked')).toBe(false);
+ });
+
+ it('emits a formatted update on selection ', async () => {
+ // ids: the items appear in that order in the dropdown
+ // 1 2 3 - roles
+ // 4 5 6 - groups
+ // 7 8 9 - users
+ // 10 11 12 - deploy_keys
+ // we set 2 from each group as preselected. Then for the sake of the test deselect one, leave one as-is
+ // and select a new one from the group.
+ // Preselected items should have `id` along with `user_id/group_id/access_level/deplo_key_id`.
+ // Items to be removed from previous selection will have `_deploy` flag set to true
+ // Newly selected items will have only `user_id/group_id/access_level/deploy_key_id` (depending on their type);
+ const preselectedItems = [
+ { id: 112, type: 'role', access_level: 2 },
+ { id: 113, type: 'role', access_level: 3 },
+ { id: 115, type: 'group', group_id: 5 },
+ { id: 116, type: 'group', group_id: 6 },
+ { id: 118, type: 'user', user_id: 8, name: 'user8' },
+ { id: 119, type: 'user', user_id: 9, name: 'user9' },
+ { id: 121, type: 'deploy_key', deploy_key_id: 11 },
+ { id: 122, type: 'deploy_key', deploy_key_id: 12 },
+ ];
+
+ createComponent({ preselectedItems });
+ await waitForPromises();
+ const spy = jest.spyOn(wrapper.vm, '$emit');
+ const dropdownItems = findAllDropdownItems();
+ // select new item from each group
+ findDropdownItemWithText(dropdownItems, 'role1').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'group4').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'user7').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'key10').trigger('click');
+ // deselect one item from each group
+ findDropdownItemWithText(dropdownItems, 'role2').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'group5').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'user8').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'key11').trigger('click');
+
+ expect(spy).toHaveBeenLastCalledWith('select', [
+ { access_level: 1 },
+ { id: 112, access_level: 2, _destroy: true },
+ { id: 113, access_level: 3 },
+ { group_id: 4 },
+ { id: 115, group_id: 5, _destroy: true },
+ { id: 116, group_id: 6 },
+ { user_id: 7 },
+ { id: 118, user_id: 8, _destroy: true },
+ { id: 119, user_id: 9 },
+ { deploy_key_id: 10 },
+ { id: 121, deploy_key_id: 11, _destroy: true },
+ { id: 122, deploy_key_id: 12 },
+ ]);
+ });
+ });
+
+ describe('Handling preselected items', () => {
+ const preselectedItems = [
+ { id: 112, type: 'role', access_level: 2 },
+ { id: 115, type: 'group', group_id: 5 },
+ { id: 118, type: 'user', user_id: 8, name: 'user2' },
+ { id: 121, type: 'deploy_key', deploy_key_id: 11 },
+ ];
+
+ const findSelected = (type) =>
+ wrapper.findAllByTestId(`${type}-dropdown-item`).filter((w) => w.props('isChecked'));
+
+ beforeEach(async () => {
+ createComponent({ preselectedItems });
+ await waitForPromises();
+ });
+
+ it('should set selected roles as intersection between the server response and preselected', () => {
+ const selectedRoles = findSelected(LEVEL_TYPES.ROLE);
+ expect(selectedRoles).toHaveLength(1);
+ expect(selectedRoles.at(0).text()).toBe('role2');
+ });
+
+ it('should set selected groups as intersection between the server response and preselected', () => {
+ const selectedGroups = findSelected(LEVEL_TYPES.GROUP);
+ expect(selectedGroups).toHaveLength(1);
+ expect(selectedGroups.at(0).text()).toBe('group5');
+ });
+
+ it('should set selected users to all preselected mapping `user_id` to `id`', () => {
+ const selectedUsers = findSelected(LEVEL_TYPES.USER);
+ expect(selectedUsers).toHaveLength(1);
+ expect(selectedUsers.at(0).text()).toBe('user2');
+ });
+
+ it('should set selected deploy keys as intersection between the server response and preselected mapping some keys', () => {
+ const selectedDeployKeys = findSelected(LEVEL_TYPES.DEPLOY_KEY);
+ expect(selectedDeployKeys).toHaveLength(1);
+ expect(selectedDeployKeys.at(0).text()).toContain('key11 (abcdefghijklmn...)');
+ });
+ });
+
+ describe('on dropdown open', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should set the search input focus', () => {
+ wrapper.vm.$refs.search.focusInput = jest.fn();
+ findDropdown().vm.$emit('shown');
+
+ expect(wrapper.vm.$refs.search.focusInput).toHaveBeenCalled();
+ });
+ });
+
+ describe('on dropdown close', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('should emit `hidden` event with dropdown selection', () => {
+ jest.spyOn(wrapper.vm, '$emit');
+
+ findAllDropdownItems().at(1).trigger('click');
+
+ findDropdown().vm.$emit('hidden');
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('hidden', [{ access_level: 2 }]);
+ });
+ });
+});
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index a642a8cf8c2..b486992ac4b 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -4,6 +4,9 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { merge, last } from 'lodash';
import Vuex from 'vuex';
+import commit from 'test_fixtures/api/commits/commit.json';
+import branches from 'test_fixtures/api/branches/branches.json';
+import tags from 'test_fixtures/api/tags/tags.json';
import { trimText } from 'helpers/text_helper';
import { ENTER_KEY } from '~/lib/utils/keys';
import { sprintf } from '~/locale';
@@ -21,11 +24,7 @@ const localVue = createLocalVue();
localVue.use(Vuex);
describe('Ref selector component', () => {
- const fixtures = {
- branches: getJSONFixture('api/branches/branches.json'),
- tags: getJSONFixture('api/tags/tags.json'),
- commit: getJSONFixture('api/commits/commit.json'),
- };
+ const fixtures = { branches, tags, commit };
const projectId = '8';
@@ -480,8 +479,6 @@ describe('Ref selector component', () => {
it('renders each commit as a selectable item with the short SHA and commit title', () => {
const dropdownItems = findCommitDropdownItems();
- const { commit } = fixtures;
-
expect(dropdownItems.at(0).text()).toBe(`${commit.short_id} ${commit.title}`);
});
});
diff --git a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
index c8fcb3116cd..a5da37a2786 100644
--- a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
@@ -1,13 +1,12 @@
-import { GlFormCheckbox, GlSprintf, GlIcon } from '@gitlab/ui';
+import { GlFormCheckbox, GlSprintf, GlIcon, GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import DeleteButton from '~/registry/explorer/components/delete_button.vue';
+
import component from '~/registry/explorer/components/details_page/tags_list_row.vue';
import {
REMOVE_TAG_BUTTON_TITLE,
- REMOVE_TAG_BUTTON_DISABLE_TOOLTIP,
MISSING_MANIFEST_WARNING_TOOLTIP,
NOT_AVAILABLE_TEXT,
NOT_AVAILABLE_SIZE,
@@ -25,19 +24,20 @@ describe('tags list row', () => {
const defaultProps = { tag, isMobile: false, index: 0 };
- const findCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findName = () => wrapper.find('[data-testid="name"]');
const findSize = () => wrapper.find('[data-testid="size"]');
const findTime = () => wrapper.find('[data-testid="time"]');
const findShortRevision = () => wrapper.find('[data-testid="digest"]');
- const findClipboardButton = () => wrapper.find(ClipboardButton);
- const findDeleteButton = () => wrapper.find(DeleteButton);
- const findTimeAgoTooltip = () => wrapper.find(TimeAgoTooltip);
+ const findClipboardButton = () => wrapper.findComponent(ClipboardButton);
+ const findTimeAgoTooltip = () => wrapper.findComponent(TimeAgoTooltip);
const findDetailsRows = () => wrapper.findAll(DetailsRow);
const findPublishedDateDetail = () => wrapper.find('[data-testid="published-date-detail"]');
const findManifestDetail = () => wrapper.find('[data-testid="manifest-detail"]');
const findConfigurationDetail = () => wrapper.find('[data-testid="configuration-detail"]');
- const findWarningIcon = () => wrapper.find(GlIcon);
+ const findWarningIcon = () => wrapper.findComponent(GlIcon);
+ const findAdditionalActionsMenu = () => wrapper.findComponent(GlDropdown);
+ const findDeleteButton = () => wrapper.findComponent(GlDropdownItem);
const mountComponent = (propsData = defaultProps) => {
wrapper = shallowMount(component, {
@@ -45,6 +45,7 @@ describe('tags list row', () => {
GlSprintf,
ListItem,
DetailsRow,
+ GlDropdown,
},
propsData,
directives: {
@@ -262,44 +263,61 @@ describe('tags list row', () => {
});
});
- describe('delete button', () => {
+ describe('additional actions menu', () => {
it('exists', () => {
mountComponent();
- expect(findDeleteButton().exists()).toBe(true);
+ expect(findAdditionalActionsMenu().exists()).toBe(true);
});
- it('has the correct props/attributes', () => {
+ it('has the correct props', () => {
mountComponent();
- expect(findDeleteButton().attributes()).toMatchObject({
- title: REMOVE_TAG_BUTTON_TITLE,
- tooltiptitle: REMOVE_TAG_BUTTON_DISABLE_TOOLTIP,
- tooltipdisabled: 'true',
+ expect(findAdditionalActionsMenu().props()).toMatchObject({
+ icon: 'ellipsis_v',
+ text: 'More actions',
+ textSrOnly: true,
+ category: 'tertiary',
+ right: true,
});
});
it.each`
- canDelete | digest | disabled
- ${true} | ${null} | ${true}
- ${false} | ${'foo'} | ${true}
- ${false} | ${null} | ${true}
- ${true} | ${'foo'} | ${true}
+ canDelete | digest | disabled | buttonDisabled
+ ${true} | ${null} | ${true} | ${true}
+ ${false} | ${'foo'} | ${true} | ${true}
+ ${false} | ${null} | ${true} | ${true}
+ ${true} | ${'foo'} | ${true} | ${true}
+ ${true} | ${'foo'} | ${false} | ${false}
`(
- 'is disabled when canDelete is $canDelete and digest is $digest and disabled is $disabled',
- ({ canDelete, digest, disabled }) => {
+ 'is $visible that is visible when canDelete is $canDelete and digest is $digest and disabled is $disabled',
+ ({ canDelete, digest, disabled, buttonDisabled }) => {
mountComponent({ ...defaultProps, tag: { ...tag, canDelete, digest }, disabled });
- expect(findDeleteButton().attributes('disabled')).toBe('true');
+ expect(findAdditionalActionsMenu().props('disabled')).toBe(buttonDisabled);
+ expect(findAdditionalActionsMenu().classes('gl-opacity-0')).toBe(buttonDisabled);
+ expect(findAdditionalActionsMenu().classes('gl-pointer-events-none')).toBe(buttonDisabled);
},
);
- it('delete event emits delete', () => {
- mountComponent();
+ describe('delete button', () => {
+ it('exists and has the correct attrs', () => {
+ mountComponent();
+
+ expect(findDeleteButton().exists()).toBe(true);
+ expect(findDeleteButton().attributes()).toMatchObject({
+ variant: 'danger',
+ });
+ expect(findDeleteButton().text()).toBe(REMOVE_TAG_BUTTON_TITLE);
+ });
- findDeleteButton().vm.$emit('delete');
+ it('delete event emits delete', () => {
+ mountComponent();
- expect(wrapper.emitted('delete')).toEqual([[]]);
+ findDeleteButton().vm.$emit('click');
+
+ expect(wrapper.emitted('delete')).toEqual([[]]);
+ });
});
});
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index b58a53f0af2..e1f24a2b65b 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -129,13 +129,16 @@ describe('List Page', () => {
});
});
- describe('connection error', () => {
+ describe.each([
+ { error: 'connectionError', errorName: 'connection error' },
+ { error: 'invalidPathError', errorName: 'invalid path error' },
+ ])('handling $errorName', ({ error }) => {
const config = {
- characterError: true,
containersErrorImage: 'foo',
helpPagePath: 'bar',
isGroupPage: false,
};
+ config[error] = true;
it('should show an empty state', () => {
mountComponent({ config });
diff --git a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
index f306fdef624..67f62815720 100644
--- a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
+++ b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
@@ -1,23 +1,19 @@
import { mount, createLocalVue } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import mockData from 'test_fixtures/issues/related_merge_requests.json';
import axios from '~/lib/utils/axios_utils';
import RelatedMergeRequests from '~/related_merge_requests/components/related_merge_requests.vue';
import createStore from '~/related_merge_requests/store/index';
import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue';
-const FIXTURE_PATH = 'issues/related_merge_requests.json';
const API_ENDPOINT = '/api/v4/projects/2/issues/33/related_merge_requests';
const localVue = createLocalVue();
describe('RelatedMergeRequests', () => {
let wrapper;
let mock;
- let mockData;
beforeEach((done) => {
- loadFixtures(FIXTURE_PATH);
- mockData = getJSONFixture(FIXTURE_PATH);
-
// put the fixture in DOM as the component expects
document.body.innerHTML = `<div id="js-issuable-app"></div>`;
document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(mockData);
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 1db6fa21d6b..029d720f7b9 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -3,7 +3,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { merge } from 'lodash';
import Vuex from 'vuex';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import * as commonUtils from '~/lib/utils/common_utils';
@@ -11,7 +11,6 @@ import ReleaseEditNewApp from '~/releases/components/app_edit_new.vue';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
import { BACK_URL_PARAM } from '~/releases/constants';
-const originalRelease = getJSONFixture('api/releases/release.json');
const originalMilestones = originalRelease.milestones;
const releasesPagePath = 'path/to/releases/page';
diff --git a/spec/frontend/releases/components/app_index_apollo_client_spec.js b/spec/frontend/releases/components/app_index_apollo_client_spec.js
index 096d319c82f..32bbfd386f5 100644
--- a/spec/frontend/releases/components/app_index_apollo_client_spec.js
+++ b/spec/frontend/releases/components/app_index_apollo_client_spec.js
@@ -1,6 +1,7 @@
import { cloneDeep } from 'lodash';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import originalAllReleasesQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/all_releases.query.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import allReleasesQuery from 'shared_queries/releases/all_releases.query.graphql';
@@ -32,9 +33,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
}));
describe('app_index_apollo_client.vue', () => {
- const originalAllReleasesQueryResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/all_releases.query.graphql.json',
- );
const projectPath = 'project/path';
const newReleasePath = 'path/to/new/release/page';
const before = 'beforeCursor';
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 7ea7a6ffe94..72ebaaaf76c 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { getJSONFixture } from 'helpers/fixtures';
+import oneReleaseQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release.query.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import createFlash from '~/flash';
import ReleaseShowApp from '~/releases/components/app_show.vue';
@@ -11,10 +11,6 @@ import oneReleaseQuery from '~/releases/graphql/queries/one_release.query.graphq
jest.mock('~/flash');
-const oneReleaseQueryResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/one_release.query.graphql.json',
-);
-
Vue.use(VueApollo);
const EXPECTED_ERROR_MESSAGE = 'Something went wrong while getting the release details.';
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index 460007e48ef..839d127e00f 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -1,6 +1,6 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import * as commonUtils from '~/lib/utils/common_utils';
import { ENTER_KEY } from '~/lib/utils/keys';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
@@ -9,8 +9,6 @@ import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
-const originalRelease = getJSONFixture('api/releases/release.json');
-
describe('Release edit component', () => {
let wrapper;
let release;
diff --git a/spec/frontend/releases/components/evidence_block_spec.js b/spec/frontend/releases/components/evidence_block_spec.js
index 50b6d1c4707..973428257b7 100644
--- a/spec/frontend/releases/components/evidence_block_spec.js
+++ b/spec/frontend/releases/components/evidence_block_spec.js
@@ -1,13 +1,11 @@
import { GlLink, GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { truncateSha } from '~/lib/utils/text_utility';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
-const originalRelease = getJSONFixture('api/releases/release.json');
-
describe('Evidence Block', () => {
let wrapper;
let release;
diff --git a/spec/frontend/releases/components/release_block_assets_spec.js b/spec/frontend/releases/components/release_block_assets_spec.js
index 3b9b16fa890..c63689e11ac 100644
--- a/spec/frontend/releases/components/release_block_assets_spec.js
+++ b/spec/frontend/releases/components/release_block_assets_spec.js
@@ -1,13 +1,11 @@
import { GlCollapse } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { getJSONFixture } from 'helpers/fixtures';
+import { assets } from 'test_fixtures/api/releases/release.json';
import { trimText } from 'helpers/text_helper';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockAssets from '~/releases/components/release_block_assets.vue';
import { ASSET_LINK_TYPE } from '~/releases/constants';
-const { assets } = getJSONFixture('api/releases/release.json');
-
describe('Release block assets', () => {
let wrapper;
let defaultProps;
diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index e9fa22b4ec7..f645dc309d7 100644
--- a/spec/frontend/releases/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -1,13 +1,11 @@
import { GlLink, GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import { trimText } from 'helpers/text_helper';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
-const originalRelease = getJSONFixture('api/releases/release.json');
-
// TODO: Encapsulate date helpers https://gitlab.com/gitlab-org/gitlab/-/issues/320883
const MONTHS_IN_MS = 1000 * 60 * 60 * 24 * 31;
const mockFutureDate = new Date(new Date().getTime() + MONTHS_IN_MS).toISOString();
diff --git a/spec/frontend/releases/components/release_block_header_spec.js b/spec/frontend/releases/components/release_block_header_spec.js
index 47fd6377fcf..167ae4f32a2 100644
--- a/spec/frontend/releases/components/release_block_header_spec.js
+++ b/spec/frontend/releases/components/release_block_header_spec.js
@@ -1,14 +1,12 @@
import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { merge } from 'lodash';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import setWindowLocation from 'helpers/set_window_location_helper';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockHeader from '~/releases/components/release_block_header.vue';
import { BACK_URL_PARAM } from '~/releases/constants';
-const originalRelease = getJSONFixture('api/releases/release.json');
-
describe('Release block header', () => {
let wrapper;
let release;
diff --git a/spec/frontend/releases/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index a2bf45c7861..146b2cc7490 100644
--- a/spec/frontend/releases/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -1,12 +1,12 @@
import { GlProgressBar, GlLink, GlBadge, GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import { trimText } from 'helpers/text_helper';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockMilestoneInfo from '~/releases/components/release_block_milestone_info.vue';
import { MAX_MILESTONES_TO_DISPLAY } from '~/releases/constants';
-const { milestones: originalMilestones } = getJSONFixture('api/releases/release.json');
+const { milestones: originalMilestones } = originalRelease;
describe('Release block milestone info', () => {
let wrapper;
diff --git a/spec/frontend/releases/components/release_block_spec.js b/spec/frontend/releases/components/release_block_spec.js
index 1ca441f7a5a..a847c32b8f1 100644
--- a/spec/frontend/releases/components/release_block_spec.js
+++ b/spec/frontend/releases/components/release_block_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
import $ from 'jquery';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import * as commonUtils from '~/lib/utils/common_utils';
import * as urlUtility from '~/lib/utils/url_utility';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
@@ -9,8 +9,6 @@ import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import { BACK_URL_PARAM } from '~/releases/constants';
import timeagoMixin from '~/vue_shared/mixins/timeago';
-const originalRelease = getJSONFixture('api/releases/release.json');
-
describe('Release block', () => {
let wrapper;
let release;
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 6504a09df2f..d8329fb82b1 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,5 +1,5 @@
import { cloneDeep } from 'lodash';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
import testAction from 'helpers/vuex_action_helper';
import createFlash from '~/flash';
import { redirectTo } from '~/lib/utils/url_utility';
@@ -27,10 +27,6 @@ jest.mock('~/releases/util', () => ({
},
}));
-const originalOneReleaseForEditingQueryResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json',
-);
-
describe('Release edit/new actions', () => {
let state;
let releaseResponse;
diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index 20ae332e500..24dcedb3580 100644
--- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -1,12 +1,10 @@
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
import * as types from '~/releases/stores/modules/edit_new/mutation_types';
import mutations from '~/releases/stores/modules/edit_new/mutations';
import createState from '~/releases/stores/modules/edit_new/state';
-const originalRelease = getJSONFixture('api/releases/release.json');
-
describe('Release edit/new mutations', () => {
let state;
let release;
diff --git a/spec/frontend/releases/stores/modules/list/actions_spec.js b/spec/frontend/releases/stores/modules/list/actions_spec.js
index af520c2eb20..91406f7e2f4 100644
--- a/spec/frontend/releases/stores/modules/list/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/list/actions_spec.js
@@ -1,5 +1,5 @@
import { cloneDeep } from 'lodash';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalGraphqlReleasesResponse from 'test_fixtures/graphql/releases/graphql/queries/all_releases.query.graphql.json';
import testAction from 'helpers/vuex_action_helper';
import { PAGE_SIZE } from '~/releases/constants';
import allReleasesQuery from '~/releases/graphql/queries/all_releases.query.graphql';
@@ -12,10 +12,6 @@ import * as types from '~/releases/stores/modules/index/mutation_types';
import createState from '~/releases/stores/modules/index/state';
import { gqClient, convertAllReleasesGraphQLResponse } from '~/releases/util';
-const originalGraphqlReleasesResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/all_releases.query.graphql.json',
-);
-
describe('Releases State actions', () => {
let mockedState;
let graphqlReleasesResponse;
diff --git a/spec/frontend/releases/stores/modules/list/mutations_spec.js b/spec/frontend/releases/stores/modules/list/mutations_spec.js
index 08d803b3c2c..49e324c28a5 100644
--- a/spec/frontend/releases/stores/modules/list/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/list/mutations_spec.js
@@ -1,17 +1,13 @@
-import { getJSONFixture } from 'helpers/fixtures';
+import originalRelease from 'test_fixtures/api/releases/release.json';
+import graphqlReleasesResponse from 'test_fixtures/graphql/releases/graphql/queries/all_releases.query.graphql.json';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import * as types from '~/releases/stores/modules/index/mutation_types';
import mutations from '~/releases/stores/modules/index/mutations';
import createState from '~/releases/stores/modules/index/state';
import { convertAllReleasesGraphQLResponse } from '~/releases/util';
-const originalRelease = getJSONFixture('api/releases/release.json');
const originalReleases = [originalRelease];
-const graphqlReleasesResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/all_releases.query.graphql.json',
-);
-
describe('Releases Store Mutations', () => {
let stateCopy;
let pageInfo;
diff --git a/spec/frontend/releases/util_spec.js b/spec/frontend/releases/util_spec.js
index 36e7be369d3..3c1060cb0e8 100644
--- a/spec/frontend/releases/util_spec.js
+++ b/spec/frontend/releases/util_spec.js
@@ -1,21 +1,13 @@
import { cloneDeep } from 'lodash';
-import { getJSONFixture } from 'helpers/fixtures';
+import originalAllReleasesQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/all_releases.query.graphql.json';
+import originalOneReleaseQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release.query.graphql.json';
+import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
import {
convertGraphQLRelease,
convertAllReleasesGraphQLResponse,
convertOneReleaseGraphQLResponse,
} from '~/releases/util';
-const originalAllReleasesQueryResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/all_releases.query.graphql.json',
-);
-const originalOneReleaseQueryResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/one_release.query.graphql.json',
-);
-const originalOneReleaseForEditingQueryResponse = getJSONFixture(
- 'graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json',
-);
-
describe('releases/util.js', () => {
describe('convertGraphQLRelease', () => {
let releaseFromResponse;
diff --git a/spec/frontend/reports/codequality_report/components/codequality_issue_body_spec.js b/spec/frontend/reports/codequality_report/components/codequality_issue_body_spec.js
index f99dcbffdff..c548007a8a6 100644
--- a/spec/frontend/reports/codequality_report/components/codequality_issue_body_spec.js
+++ b/spec/frontend/reports/codequality_report/components/codequality_issue_body_spec.js
@@ -38,6 +38,12 @@ describe('code quality issue body issue body', () => {
describe('severity rating', () => {
it.each`
severity | iconClass | iconName
+ ${'INFO'} | ${'text-primary-400'} | ${'severity-info'}
+ ${'MINOR'} | ${'text-warning-200'} | ${'severity-low'}
+ ${'CRITICAL'} | ${'text-danger-600'} | ${'severity-high'}
+ ${'BLOCKER'} | ${'text-danger-800'} | ${'severity-critical'}
+ ${'UNKNOWN'} | ${'text-secondary-400'} | ${'severity-unknown'}
+ ${'INVALID'} | ${'text-secondary-400'} | ${'severity-unknown'}
${'info'} | ${'text-primary-400'} | ${'severity-info'}
${'minor'} | ${'text-warning-200'} | ${'severity-low'}
${'major'} | ${'text-warning-400'} | ${'severity-medium'}
diff --git a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
index 84863eac3d3..685a1c50a46 100644
--- a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
+++ b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
@@ -60,7 +60,7 @@ describe('Grouped code quality reports app', () => {
});
it('should render loading text', () => {
- expect(findWidget().text()).toEqual('Loading codeclimate report');
+ expect(findWidget().text()).toEqual('Loading Code quality report');
});
});
@@ -84,7 +84,7 @@ describe('Grouped code quality reports app', () => {
});
it('renders summary text', () => {
- expect(findWidget().text()).toContain('Code quality degraded on 1 point');
+ expect(findWidget().text()).toContain('Code quality degraded');
});
it('renders custom codequality issue body', () => {
@@ -99,7 +99,7 @@ describe('Grouped code quality reports app', () => {
});
it('renders summary text', () => {
- expect(findWidget().text()).toContain('Code quality improved on 1 point');
+ expect(findWidget().text()).toContain('Code quality improved');
});
it('renders custom codequality issue body', () => {
@@ -115,7 +115,7 @@ describe('Grouped code quality reports app', () => {
it('renders summary text', () => {
expect(findWidget().text()).toContain(
- 'Code quality improved on 1 point and degraded on 1 point',
+ 'Code quality scanning detected 2 changes in merged results',
);
});
@@ -132,7 +132,7 @@ describe('Grouped code quality reports app', () => {
});
it('renders error text', () => {
- expect(findWidget().text()).toContain('Failed to load codeclimate report');
+ expect(findWidget().text()).toContain('Failed to load Code quality report');
});
it('does not render a help icon', () => {
diff --git a/spec/frontend/reports/codequality_report/store/getters_spec.js b/spec/frontend/reports/codequality_report/store/getters_spec.js
index 0378171084d..b5f6edf85eb 100644
--- a/spec/frontend/reports/codequality_report/store/getters_spec.js
+++ b/spec/frontend/reports/codequality_report/store/getters_spec.js
@@ -61,9 +61,9 @@ describe('Codequality reports store getters', () => {
it.each`
resolvedIssues | newIssues | expectedText
${0} | ${0} | ${'No changes to code quality'}
- ${0} | ${1} | ${'Code quality degraded on 1 point'}
- ${2} | ${0} | ${'Code quality improved on 2 points'}
- ${1} | ${2} | ${'Code quality improved on 1 point and degraded on 2 points'}
+ ${0} | ${1} | ${'Code quality degraded'}
+ ${2} | ${0} | ${'Code quality improved'}
+ ${1} | ${2} | ${'Code quality scanning detected 3 changes in merged results'}
`(
'returns a summary containing $resolvedIssues resolved issues and $newIssues new issues',
({ newIssues, resolvedIssues, expectedText }) => {
diff --git a/spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js b/spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js
index ba95294ab0a..5b77a2c74be 100644
--- a/spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js
+++ b/spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js
@@ -25,6 +25,18 @@ describe('Codequality report store utils', () => {
});
});
+ describe('when an issue has a non-nested path', () => {
+ const issue = { description: 'Insecure Dependency', path: 'Gemfile.lock' };
+
+ beforeEach(() => {
+ [result] = parseCodeclimateMetrics([issue], 'path');
+ });
+
+ it('is parsed', () => {
+ expect(result.name).toEqual(issue.description);
+ });
+ });
+
describe('when an issue has a path but no line', () => {
const issue = { description: 'Insecure Dependency', location: { path: 'Gemfile.lock' } };
diff --git a/spec/frontend/reports/components/report_section_spec.js b/spec/frontend/reports/components/report_section_spec.js
index e1b36aa1e21..39932b62dbb 100644
--- a/spec/frontend/reports/components/report_section_spec.js
+++ b/spec/frontend/reports/components/report_section_spec.js
@@ -23,7 +23,7 @@ describe('Report section', () => {
const defaultProps = {
component: '',
status: 'SUCCESS',
- loadingText: 'Loading codeclimate report',
+ loadingText: 'Loading Code Quality report',
errorText: 'foo',
successText: 'Code quality improved on 1 point and degraded on 1 point',
resolvedIssues,
@@ -117,13 +117,13 @@ describe('Report section', () => {
vm = mountComponent(ReportSection, {
component: '',
status: 'LOADING',
- loadingText: 'Loading codeclimate report',
+ loadingText: 'Loading Code Quality report',
errorText: 'foo',
successText: 'Code quality improved on 1 point and degraded on 1 point',
hasIssues: false,
});
- expect(vm.$el.textContent.trim()).toEqual('Loading codeclimate report');
+ expect(vm.$el.textContent.trim()).toEqual('Loading Code Quality report');
});
});
@@ -229,13 +229,13 @@ describe('Report section', () => {
vm = mountComponent(ReportSection, {
component: '',
status: 'ERROR',
- loadingText: 'Loading codeclimate report',
- errorText: 'Failed to load codeclimate report',
+ loadingText: 'Loading Code Quality report',
+ errorText: 'Failed to load Code Quality report',
successText: 'Code quality improved on 1 point and degraded on 1 point',
hasIssues: false,
});
- expect(vm.$el.textContent.trim()).toEqual('Failed to load codeclimate report');
+ expect(vm.$el.textContent.trim()).toEqual('Failed to load Code Quality report');
});
});
diff --git a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
index 0f7c2559e8b..c60c1f7b63c 100644
--- a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
+++ b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
@@ -24,7 +24,7 @@ describe('Grouped test reports app', () => {
let wrapper;
let mockStore;
- const mountComponent = ({ props = { pipelinePath }, glFeatures = {} } = {}) => {
+ const mountComponent = ({ props = { pipelinePath } } = {}) => {
wrapper = mount(GroupedTestReportsApp, {
store: mockStore,
localVue,
@@ -34,9 +34,6 @@ describe('Grouped test reports app', () => {
pipelinePath,
...props,
},
- provide: {
- glFeatures,
- },
});
};
@@ -114,8 +111,8 @@ describe('Grouped test reports app', () => {
setReports(newFailedTestReports);
});
- it('tracks service ping metric when enabled', () => {
- mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: true } });
+ it('tracks service ping metric', () => {
+ mountComponent();
findExpandButton().trigger('click');
expect(Api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
@@ -123,7 +120,7 @@ describe('Grouped test reports app', () => {
});
it('only tracks the first expansion', () => {
- mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: true } });
+ mountComponent();
const expandButton = findExpandButton();
expandButton.trigger('click');
expandButton.trigger('click');
@@ -131,13 +128,6 @@ describe('Grouped test reports app', () => {
expect(Api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
});
-
- it('does not track service ping metric when disabled', () => {
- mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: false } });
- findExpandButton().trigger('click');
-
- expect(Api.trackRedisHllUserEvent).not.toHaveBeenCalled();
- });
});
describe('with new failed result', () => {
diff --git a/spec/frontend/repository/commits_service_spec.js b/spec/frontend/repository/commits_service_spec.js
new file mode 100644
index 00000000000..d924974aede
--- /dev/null
+++ b/spec/frontend/repository/commits_service_spec.js
@@ -0,0 +1,84 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { loadCommits, isRequested, resetRequestedCommits } from '~/repository/commits_service';
+import httpStatus from '~/lib/utils/http_status';
+import createFlash from '~/flash';
+import { I18N_COMMIT_DATA_FETCH_ERROR } from '~/repository/constants';
+
+jest.mock('~/flash');
+
+describe('commits service', () => {
+ let mock;
+ const url = `${gon.relative_url_root || ''}/my-project/-/refs/main/logs_tree/`;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+
+ mock.onGet(url).reply(httpStatus.OK, [], {});
+
+ jest.spyOn(axios, 'get');
+ });
+
+ afterEach(() => {
+ mock.restore();
+ resetRequestedCommits();
+ });
+
+ const requestCommits = (offset, project = 'my-project', path = '', ref = 'main') =>
+ loadCommits(project, path, ref, offset);
+
+ it('calls axios get', async () => {
+ const offset = 10;
+ const project = 'my-project';
+ const path = 'my-path';
+ const ref = 'my-ref';
+ const testUrl = `${gon.relative_url_root || ''}/${project}/-/refs/${ref}/logs_tree/${path}`;
+
+ await requestCommits(offset, project, path, ref);
+
+ expect(axios.get).toHaveBeenCalledWith(testUrl, { params: { format: 'json', offset } });
+ });
+
+ it('encodes the path correctly', async () => {
+ await requestCommits(1, 'some-project', 'with $peci@l ch@rs/');
+
+ const encodedUrl = '/some-project/-/refs/main/logs_tree/with%20%24peci%40l%20ch%40rs%2F';
+ expect(axios.get).toHaveBeenCalledWith(encodedUrl, expect.anything());
+ });
+
+ it('calls axios get once per batch', async () => {
+ await Promise.all([requestCommits(0), requestCommits(1), requestCommits(23)]);
+
+ expect(axios.get.mock.calls.length).toEqual(1);
+ });
+
+ it('calls axios get twice if an offset is larger than 25', async () => {
+ await requestCommits(100);
+
+ expect(axios.get.mock.calls[0][1]).toEqual({ params: { format: 'json', offset: 75 } });
+ expect(axios.get.mock.calls[1][1]).toEqual({ params: { format: 'json', offset: 100 } });
+ });
+
+ it('updates the list of requested offsets', async () => {
+ await requestCommits(200);
+
+ expect(isRequested(200)).toBe(true);
+ });
+
+ it('resets the list of requested offsets', async () => {
+ await requestCommits(300);
+
+ resetRequestedCommits();
+ expect(isRequested(300)).toBe(false);
+ });
+
+ it('calls `createFlash` when the request fails', async () => {
+ const invalidPath = '/#@ some/path';
+ const invalidUrl = `${url}${invalidPath}`;
+ mock.onGet(invalidUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR, [], {});
+
+ await requestCommits(1, 'my-project', invalidPath);
+
+ expect(createFlash).toHaveBeenCalledWith({ message: I18N_COMMIT_DATA_FETCH_ERROR });
+ });
+});
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 8331adcdfc2..59db537282b 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -11,13 +11,18 @@ import BlobHeader from '~/blob/components/blob_header.vue';
import BlobButtonGroup from '~/repository/components/blob_button_group.vue';
import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
import BlobEdit from '~/repository/components/blob_edit.vue';
+import ForkSuggestion from '~/repository/components/fork_suggestion.vue';
import { loadViewer, viewerProps } from '~/repository/components/blob_viewers';
import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue';
import blobInfoQuery from '~/repository/queries/blob_info.query.graphql';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { isLoggedIn } from '~/lib/utils/common_utils';
jest.mock('~/repository/components/blob_viewers');
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/lib/utils/common_utils');
let wrapper;
let mockResolver;
@@ -34,12 +39,14 @@ const simpleMockData = {
webPath: 'some_file.js',
editBlobPath: 'some_file.js/edit',
ideEditPath: 'some_file.js/ide/edit',
+ forkAndEditPath: 'some_file.js/fork/edit',
+ ideForkAndEditPath: 'some_file.js/fork/ide',
+ canModifyBlob: true,
storedExternally: false,
rawPath: 'some_file.js',
externalStorageUrl: 'some_file.js',
replacePath: 'some_file.js/replace',
deletePath: 'some_file.js/delete',
- forkPath: 'some_file.js/fork',
simpleViewer: {
fileType: 'text',
tooLarge: false,
@@ -62,6 +69,8 @@ const projectMockData = {
userPermissions: {
pushCode: true,
downloadCode: true,
+ createMergeRequestIn: true,
+ forkProject: true,
},
repository: {
empty: false,
@@ -82,6 +91,8 @@ const createComponentWithApollo = (mockData = {}, inject = {}) => {
emptyRepo = defaultEmptyRepo,
canPushCode = defaultPushCode,
canDownloadCode = defaultDownloadCode,
+ createMergeRequestIn = projectMockData.userPermissions.createMergeRequestIn,
+ forkProject = projectMockData.userPermissions.forkProject,
pathLocks = [],
} = mockData;
@@ -89,7 +100,12 @@ const createComponentWithApollo = (mockData = {}, inject = {}) => {
data: {
project: {
id: '1234',
- userPermissions: { pushCode: canPushCode, downloadCode: canDownloadCode },
+ userPermissions: {
+ pushCode: canPushCode,
+ downloadCode: canDownloadCode,
+ createMergeRequestIn,
+ forkProject,
+ },
pathLocks: {
nodes: pathLocks,
},
@@ -158,9 +174,16 @@ describe('Blob content viewer component', () => {
const findBlobEdit = () => wrapper.findComponent(BlobEdit);
const findBlobContent = () => wrapper.findComponent(BlobContent);
const findBlobButtonGroup = () => wrapper.findComponent(BlobButtonGroup);
+ const findForkSuggestion = () => wrapper.findComponent(ForkSuggestion);
+
+ beforeEach(() => {
+ gon.features = { refactorTextViewer: true };
+ isLoggedIn.mockReturnValue(true);
+ });
afterEach(() => {
wrapper.destroy();
+ mockAxios.reset();
});
it('renders a GlLoadingIcon component', () => {
@@ -183,7 +206,6 @@ describe('Blob content viewer component', () => {
it('renders a BlobContent component', () => {
expect(findBlobContent().props('loading')).toEqual(false);
- expect(findBlobContent().props('content')).toEqual('raw content');
expect(findBlobContent().props('isRawContent')).toBe(true);
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'text',
@@ -192,6 +214,16 @@ describe('Blob content viewer component', () => {
renderError: null,
});
});
+
+ describe('legacy viewers', () => {
+ it('loads a legacy viewer when a viewer component is not available', async () => {
+ createComponentWithApollo({ blobs: { ...simpleMockData, fileType: 'unknown' } });
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=simple');
+ });
+ });
});
describe('rich viewer', () => {
@@ -210,7 +242,6 @@ describe('Blob content viewer component', () => {
it('renders a BlobContent component', () => {
expect(findBlobContent().props('loading')).toEqual(false);
- expect(findBlobContent().props('content')).toEqual('raw content');
expect(findBlobContent().props('isRawContent')).toBe(true);
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'markup',
@@ -241,18 +272,12 @@ describe('Blob content viewer component', () => {
});
describe('legacy viewers', () => {
- it('does not load a legacy viewer when a rich viewer is not available', async () => {
- createComponentWithApollo({ blobs: simpleMockData });
- await waitForPromises();
-
- expect(mockAxios.history.get).toHaveLength(0);
- });
-
- it('loads a legacy viewer when a rich viewer is available', async () => {
- createComponentWithApollo({ blobs: richMockData });
+ it('loads a legacy viewer when a viewer component is not available', async () => {
+ createComponentWithApollo({ blobs: { ...richMockData, fileType: 'unknown' } });
await waitForPromises();
expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=rich');
});
});
@@ -462,7 +487,7 @@ describe('Blob content viewer component', () => {
});
it('does not render if not logged in', async () => {
- window.gon.current_user_id = null;
+ isLoggedIn.mockReturnValueOnce(false);
fullFactory({
mockData: { blobInfo: simpleMockData },
@@ -506,4 +531,60 @@ describe('Blob content viewer component', () => {
);
});
});
+
+ describe('edit blob', () => {
+ beforeEach(() => {
+ fullFactory({
+ mockData: { blobInfo: simpleMockData },
+ stubs: {
+ BlobContent: true,
+ BlobReplace: true,
+ },
+ });
+ });
+
+ it('simple edit redirects to the simple editor', () => {
+ findBlobEdit().vm.$emit('edit', 'simple');
+ expect(redirectTo).toHaveBeenCalledWith(simpleMockData.editBlobPath);
+ });
+
+ it('IDE edit redirects to the IDE editor', () => {
+ findBlobEdit().vm.$emit('edit', 'ide');
+ expect(redirectTo).toHaveBeenCalledWith(simpleMockData.ideEditPath);
+ });
+
+ it.each`
+ loggedIn | canModifyBlob | createMergeRequestIn | forkProject | showForkSuggestion
+ ${true} | ${false} | ${true} | ${true} | ${true}
+ ${false} | ${false} | ${true} | ${true} | ${false}
+ ${true} | ${true} | ${false} | ${true} | ${false}
+ ${true} | ${true} | ${true} | ${false} | ${false}
+ `(
+ 'shows/hides a fork suggestion according to a set of conditions',
+ async ({
+ loggedIn,
+ canModifyBlob,
+ createMergeRequestIn,
+ forkProject,
+ showForkSuggestion,
+ }) => {
+ isLoggedIn.mockReturnValueOnce(loggedIn);
+ fullFactory({
+ mockData: {
+ blobInfo: { ...simpleMockData, canModifyBlob },
+ project: { userPermissions: { createMergeRequestIn, forkProject } },
+ },
+ stubs: {
+ BlobContent: true,
+ BlobButtonGroup: true,
+ },
+ });
+
+ findBlobEdit().vm.$emit('edit', 'simple');
+ await nextTick();
+
+ expect(findForkSuggestion().exists()).toBe(showForkSuggestion);
+ },
+ );
+ });
});
diff --git a/spec/frontend/repository/components/blob_edit_spec.js b/spec/frontend/repository/components/blob_edit_spec.js
index 11739674bc9..e2de7bc2957 100644
--- a/spec/frontend/repository/components/blob_edit_spec.js
+++ b/spec/frontend/repository/components/blob_edit_spec.js
@@ -7,6 +7,7 @@ const DEFAULT_PROPS = {
editPath: 'some_file.js/edit',
webIdePath: 'some_file.js/ide/edit',
showEditButton: true,
+ needsToFork: false,
};
describe('BlobEdit component', () => {
@@ -56,7 +57,6 @@ describe('BlobEdit component', () => {
it('renders the Edit button', () => {
createComponent();
- expect(findEditButton().attributes('href')).toBe(DEFAULT_PROPS.editPath);
expect(findEditButton().text()).toBe('Edit');
expect(findEditButton()).not.toBeDisabled();
});
@@ -64,7 +64,6 @@ describe('BlobEdit component', () => {
it('renders the Web IDE button', () => {
createComponent();
- expect(findWebIdeButton().attributes('href')).toBe(DEFAULT_PROPS.webIdePath);
expect(findWebIdeButton().text()).toBe('Web IDE');
expect(findWebIdeButton()).not.toBeDisabled();
});
@@ -72,13 +71,14 @@ describe('BlobEdit component', () => {
it('renders WebIdeLink component', () => {
createComponent(true);
- const { editPath: editUrl, webIdePath: webIdeUrl } = DEFAULT_PROPS;
+ const { editPath: editUrl, webIdePath: webIdeUrl, needsToFork } = DEFAULT_PROPS;
expect(findWebIdeLink().props()).toMatchObject({
editUrl,
webIdeUrl,
isBlob: true,
showEditButton: true,
+ needsToFork,
});
});
diff --git a/spec/frontend/repository/components/blob_viewers/video_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/video_viewer_spec.js
new file mode 100644
index 00000000000..34448c03b31
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/video_viewer_spec.js
@@ -0,0 +1,22 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import VideoViewer from '~/repository/components/blob_viewers/video_viewer.vue';
+
+describe('Video Viewer', () => {
+ let wrapper;
+
+ const propsData = { url: 'some/video.mp4' };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(VideoViewer, { propsData });
+ };
+
+ const findVideo = () => wrapper.findByTestId('video');
+
+ it('renders a Video element', () => {
+ createComponent();
+
+ expect(findVideo().exists()).toBe(true);
+ expect(findVideo().attributes('src')).toBe(propsData.url);
+ expect(findVideo().attributes('controls')).not.toBeUndefined();
+ });
+});
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index 0733cffe4f4..eb957c635ac 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -2,6 +2,7 @@ import { GlDropdown } from '@gitlab/ui';
import { shallowMount, RouterLinkStub } from '@vue/test-utils';
import Breadcrumbs from '~/repository/components/breadcrumbs.vue';
import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
+import NewDirectoryModal from '~/repository/components/new_directory_modal.vue';
const defaultMockRoute = {
name: 'blobPath',
@@ -10,7 +11,7 @@ const defaultMockRoute = {
describe('Repository breadcrumbs component', () => {
let wrapper;
- const factory = (currentPath, extraProps = {}, mockRoute = {}) => {
+ const factory = (currentPath, extraProps = {}, mockRoute = {}, newDirModal = true) => {
const $apollo = {
queries: {
userPermissions: {
@@ -34,10 +35,12 @@ describe('Repository breadcrumbs component', () => {
},
$apollo,
},
+ provide: { glFeatures: { newDirModal } },
});
};
const findUploadBlobModal = () => wrapper.find(UploadBlobModal);
+ const findNewDirectoryModal = () => wrapper.find(NewDirectoryModal);
afterEach(() => {
wrapper.destroy();
@@ -121,4 +124,37 @@ describe('Repository breadcrumbs component', () => {
expect(findUploadBlobModal().exists()).toBe(true);
});
});
+
+ describe('renders the new directory modal', () => {
+ describe('with the feature flag enabled', () => {
+ beforeEach(() => {
+ window.gon.features = {
+ newDirModal: true,
+ };
+ factory('/', { canEditTree: true });
+ });
+
+ it('does not render the modal while loading', () => {
+ expect(findNewDirectoryModal().exists()).toBe(false);
+ });
+
+ it('renders the modal once loaded', async () => {
+ wrapper.setData({ $apollo: { queries: { userPermissions: { loading: false } } } });
+
+ await wrapper.vm.$nextTick();
+
+ expect(findNewDirectoryModal().exists()).toBe(true);
+ });
+ });
+
+ describe('with the feature flag disabled', () => {
+ it('does not render the modal', () => {
+ window.gon.features = {
+ newDirModal: false,
+ };
+ factory('/', { canEditTree: true }, {}, {}, false);
+ expect(findNewDirectoryModal().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/repository/components/fork_suggestion_spec.js b/spec/frontend/repository/components/fork_suggestion_spec.js
new file mode 100644
index 00000000000..36a48a3fdb8
--- /dev/null
+++ b/spec/frontend/repository/components/fork_suggestion_spec.js
@@ -0,0 +1,44 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ForkSuggestion from '~/repository/components/fork_suggestion.vue';
+
+const DEFAULT_PROPS = { forkPath: 'some_file.js/fork' };
+
+describe('ForkSuggestion component', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ForkSuggestion, {
+ propsData: { ...DEFAULT_PROPS },
+ });
+ };
+
+ beforeEach(() => createComponent());
+
+ afterEach(() => wrapper.destroy());
+
+ const { i18n } = ForkSuggestion;
+ const findMessage = () => wrapper.findByTestId('message');
+ const findForkButton = () => wrapper.findByTestId('fork');
+ const findCancelButton = () => wrapper.findByTestId('cancel');
+
+ it('renders a message', () => {
+ expect(findMessage().text()).toBe(i18n.message);
+ });
+
+ it('renders a Fork button', () => {
+ const forkButton = findForkButton();
+
+ expect(forkButton.text()).toBe(i18n.fork);
+ expect(forkButton.attributes('href')).toBe(DEFAULT_PROPS.forkPath);
+ });
+
+ it('renders a Cancel button', () => {
+ expect(findCancelButton().text()).toBe(i18n.cancel);
+ });
+
+ it('emits a cancel event when Cancel button is clicked', () => {
+ findCancelButton().vm.$emit('click');
+
+ expect(wrapper.emitted('cancel')).toEqual([[]]);
+ });
+});
diff --git a/spec/frontend/repository/components/new_directory_modal_spec.js b/spec/frontend/repository/components/new_directory_modal_spec.js
new file mode 100644
index 00000000000..fe7f024e3ea
--- /dev/null
+++ b/spec/frontend/repository/components/new_directory_modal_spec.js
@@ -0,0 +1,203 @@
+import { GlModal, GlFormTextarea, GlToggle } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import httpStatusCodes from '~/lib/utils/http_status';
+import { visitUrl } from '~/lib/utils/url_utility';
+import NewDirectoryModal from '~/repository/components/new_directory_modal.vue';
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/url_utility', () => ({
+ visitUrl: jest.fn(),
+}));
+
+const initialProps = {
+ modalTitle: 'Create New Directory',
+ modalId: 'modal-new-directory',
+ commitMessage: 'Add new directory',
+ targetBranch: 'some-target-branch',
+ originalBranch: 'master',
+ canPushCode: true,
+ path: 'create_dir',
+};
+
+const defaultFormValue = {
+ dirName: 'foo',
+ originalBranch: initialProps.originalBranch,
+ branchName: initialProps.targetBranch,
+ commitMessage: initialProps.commitMessage,
+ createNewMr: true,
+};
+
+describe('NewDirectoryModal', () => {
+ let wrapper;
+ let mock;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(NewDirectoryModal, {
+ propsData: {
+ ...initialProps,
+ ...props,
+ },
+ attrs: {
+ static: true,
+ visible: true,
+ },
+ });
+ };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findDirName = () => wrapper.find('[name="dir_name"]');
+ const findBranchName = () => wrapper.find('[name="branch_name"]');
+ const findCommitMessage = () => wrapper.findComponent(GlFormTextarea);
+ const findMrToggle = () => wrapper.findComponent(GlToggle);
+
+ const fillForm = async (inputValue = {}) => {
+ const {
+ dirName = defaultFormValue.dirName,
+ branchName = defaultFormValue.branchName,
+ commitMessage = defaultFormValue.commitMessage,
+ createNewMr = true,
+ } = inputValue;
+
+ await findDirName().vm.$emit('input', dirName);
+ await findBranchName().vm.$emit('input', branchName);
+ await findCommitMessage().vm.$emit('input', commitMessage);
+ await findMrToggle().vm.$emit('change', createNewMr);
+ await nextTick;
+ };
+
+ const submitForm = async () => {
+ const mockEvent = { preventDefault: jest.fn() };
+ findModal().vm.$emit('primary', mockEvent);
+ await waitForPromises();
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders modal component', () => {
+ createComponent();
+
+ const { modalTitle: title } = initialProps;
+
+ expect(findModal().props()).toMatchObject({
+ title,
+ size: 'md',
+ actionPrimary: {
+ text: NewDirectoryModal.i18n.PRIMARY_OPTIONS_TEXT,
+ },
+ actionCancel: {
+ text: 'Cancel',
+ },
+ });
+ });
+
+ describe('form', () => {
+ it.each`
+ component | defaultValue | canPushCode | targetBranch | originalBranch | exist
+ ${findDirName} | ${undefined} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
+ ${findBranchName} | ${initialProps.targetBranch} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
+ ${findBranchName} | ${undefined} | ${false} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${false}
+ ${findCommitMessage} | ${initialProps.commitMessage} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
+ ${findMrToggle} | ${'true'} | ${true} | ${'new-target-branch'} | ${'master'} | ${true}
+ ${findMrToggle} | ${'true'} | ${true} | ${'master'} | ${'master'} | ${true}
+ `(
+ 'has the correct form fields ',
+ ({ component, defaultValue, canPushCode, targetBranch, originalBranch, exist }) => {
+ createComponent({
+ canPushCode,
+ targetBranch,
+ originalBranch,
+ });
+ const formField = component();
+
+ if (!exist) {
+ expect(formField.exists()).toBe(false);
+ return;
+ }
+
+ expect(formField.exists()).toBe(true);
+ expect(formField.attributes('value')).toBe(defaultValue);
+ },
+ );
+ });
+
+ describe('form submission', () => {
+ beforeEach(async () => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('valid form', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('passes the formData', async () => {
+ const {
+ dirName,
+ branchName,
+ commitMessage,
+ originalBranch,
+ createNewMr,
+ } = defaultFormValue;
+ mock.onPost(initialProps.path).reply(httpStatusCodes.OK, {});
+ await fillForm();
+ await submitForm();
+
+ expect(mock.history.post[0].data.get('dir_name')).toEqual(dirName);
+ expect(mock.history.post[0].data.get('branch_name')).toEqual(branchName);
+ expect(mock.history.post[0].data.get('commit_message')).toEqual(commitMessage);
+ expect(mock.history.post[0].data.get('original_branch')).toEqual(originalBranch);
+ expect(mock.history.post[0].data.get('create_merge_request')).toEqual(String(createNewMr));
+ });
+
+ it('does not submit "create_merge_request" formData if createNewMr is not checked', async () => {
+ mock.onPost(initialProps.path).reply(httpStatusCodes.OK, {});
+ await fillForm({ createNewMr: false });
+ await submitForm();
+ expect(mock.history.post[0].data.get('create_merge_request')).toBeNull();
+ });
+
+ it('redirects to the new directory', async () => {
+ const response = { filePath: 'new-dir-path' };
+ mock.onPost(initialProps.path).reply(httpStatusCodes.OK, response);
+
+ await fillForm({ dirName: 'foo', branchName: 'master', commitMessage: 'foo' });
+ await submitForm();
+
+ expect(visitUrl).toHaveBeenCalledWith(response.filePath);
+ });
+ });
+
+ describe('invalid form', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('disables submit button', async () => {
+ await fillForm({ dirName: '', branchName: '', commitMessage: '' });
+ expect(findModal().props('actionPrimary').attributes[0].disabled).toBe(true);
+ });
+
+ it('creates a flash error', async () => {
+ mock.onPost(initialProps.path).timeout();
+
+ await fillForm({ dirName: 'foo', branchName: 'master', commitMessage: 'foo' });
+ await submitForm();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: NewDirectoryModal.i18n.ERROR_MESSAGE,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index 6f461f4c69b..26064e9b248 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -31,25 +31,36 @@ exports[`Repository table row component renders a symlink table row 1`] = `
<!---->
- <!---->
+ <gl-icon-stub
+ class="ml-1"
+ name="lock"
+ size="12"
+ title="Locked by Root"
+ />
</td>
<td
class="d-none d-sm-table-cell tree-commit cursor-default"
>
- <gl-skeleton-loading-stub
- class="h-auto"
- lines="1"
+ <gl-link-stub
+ class="str-truncated-100 tree-commit-link"
/>
+
+ <gl-intersection-observer-stub>
+ <!---->
+ </gl-intersection-observer-stub>
</td>
<td
class="tree-time-ago text-right cursor-default"
>
- <gl-skeleton-loading-stub
- class="ml-auto h-auto w-50"
- lines="1"
+ <timeago-tooltip-stub
+ cssclass=""
+ time="2019-01-01"
+ tooltipplacement="top"
/>
+
+ <!---->
</td>
</tr>
`;
@@ -85,25 +96,36 @@ exports[`Repository table row component renders table row 1`] = `
<!---->
- <!---->
+ <gl-icon-stub
+ class="ml-1"
+ name="lock"
+ size="12"
+ title="Locked by Root"
+ />
</td>
<td
class="d-none d-sm-table-cell tree-commit cursor-default"
>
- <gl-skeleton-loading-stub
- class="h-auto"
- lines="1"
+ <gl-link-stub
+ class="str-truncated-100 tree-commit-link"
/>
+
+ <gl-intersection-observer-stub>
+ <!---->
+ </gl-intersection-observer-stub>
</td>
<td
class="tree-time-ago text-right cursor-default"
>
- <gl-skeleton-loading-stub
- class="ml-auto h-auto w-50"
- lines="1"
+ <timeago-tooltip-stub
+ cssclass=""
+ time="2019-01-01"
+ tooltipplacement="top"
/>
+
+ <!---->
</td>
</tr>
`;
@@ -139,25 +161,36 @@ exports[`Repository table row component renders table row for path with special
<!---->
- <!---->
+ <gl-icon-stub
+ class="ml-1"
+ name="lock"
+ size="12"
+ title="Locked by Root"
+ />
</td>
<td
class="d-none d-sm-table-cell tree-commit cursor-default"
>
- <gl-skeleton-loading-stub
- class="h-auto"
- lines="1"
+ <gl-link-stub
+ class="str-truncated-100 tree-commit-link"
/>
+
+ <gl-intersection-observer-stub>
+ <!---->
+ </gl-intersection-observer-stub>
</td>
<td
class="tree-time-ago text-right cursor-default"
>
- <gl-skeleton-loading-stub
- class="ml-auto h-auto w-50"
- lines="1"
+ <timeago-tooltip-stub
+ cssclass=""
+ time="2019-01-01"
+ tooltipplacement="top"
/>
+
+ <!---->
</td>
</tr>
`;
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index e9e51abaf0f..c8dddefc4f2 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -34,17 +34,45 @@ const MOCK_BLOBS = [
},
];
-function factory({ path, isLoading = false, hasMore = true, entries = {} }) {
+const MOCK_COMMITS = [
+ {
+ fileName: 'blob.md',
+ type: 'blob',
+ commit: {
+ message: 'Updated blob.md',
+ },
+ },
+ {
+ fileName: 'blob2.md',
+ type: 'blob',
+ commit: {
+ message: 'Updated blob2.md',
+ },
+ },
+ {
+ fileName: 'blob3.md',
+ type: 'blob',
+ commit: {
+ message: 'Updated blob3.md',
+ },
+ },
+];
+
+function factory({ path, isLoading = false, hasMore = true, entries = {}, commits = [] }) {
vm = shallowMount(Table, {
propsData: {
path,
isLoading,
entries,
hasMore,
+ commits,
},
mocks: {
$apollo,
},
+ provide: {
+ glFeatures: { lazyLoadCommits: true },
+ },
});
}
@@ -82,12 +110,15 @@ describe('Repository table component', () => {
entries: {
blobs: MOCK_BLOBS,
},
+ commits: MOCK_COMMITS,
});
const rows = vm.findAll(TableRow);
expect(rows.length).toEqual(3);
expect(rows.at(2).attributes().mode).toEqual('120000');
+ expect(rows.at(2).props().rowNumber).toBe(2);
+ expect(rows.at(2).props().commitInfo).toEqual(MOCK_COMMITS[2]);
});
describe('Show more button', () => {
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index da28c9873d9..76e9f7da011 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -1,10 +1,12 @@
-import { GlBadge, GlLink, GlIcon } from '@gitlab/ui';
+import { GlBadge, GlLink, GlIcon, GlIntersectionObserver } from '@gitlab/ui';
import { shallowMount, RouterLinkStub } from '@vue/test-utils';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import TableRow from '~/repository/components/table/row.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import { FILE_SYMLINK_MODE } from '~/vue_shared/constants';
+const COMMIT_MOCK = { lockLabel: 'Locked by Root', committedDate: '2019-01-01' };
+
let vm;
let $router;
@@ -20,12 +22,14 @@ function factory(propsData = {}) {
projectPath: 'gitlab-org/gitlab-ce',
url: `https://test.com`,
totalEntries: 10,
+ commitInfo: COMMIT_MOCK,
+ rowNumber: 123,
},
directives: {
GlHoverLoad: createMockDirective(),
},
provide: {
- glFeatures: { refactorBlobViewer: true },
+ glFeatures: { refactorBlobViewer: true, lazyLoadCommits: true },
},
mocks: {
$router,
@@ -40,6 +44,7 @@ function factory(propsData = {}) {
describe('Repository table row component', () => {
const findRouterLink = () => vm.find(RouterLinkStub);
+ const findIntersectionObserver = () => vm.findComponent(GlIntersectionObserver);
afterEach(() => {
vm.destroy();
@@ -226,8 +231,6 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- vm.setData({ commit: { lockLabel: 'Locked by Root', committedDate: '2019-01-01' } });
-
return vm.vm.$nextTick().then(() => {
expect(vm.find(GlIcon).exists()).toBe(true);
expect(vm.find(GlIcon).props('name')).toBe('lock');
@@ -246,4 +249,27 @@ describe('Repository table row component', () => {
expect(vm.find(FileIcon).props('loading')).toBe(true);
});
+
+ describe('row visibility', () => {
+ beforeEach(() => {
+ factory({
+ id: '1',
+ sha: '1',
+ path: 'test',
+ type: 'tree',
+ currentPath: '/',
+ });
+ });
+ it('emits a `row-appear` event', () => {
+ findIntersectionObserver().vm.$emit('appear');
+ expect(vm.emitted('row-appear')).toEqual([
+ [
+ {
+ hasCommit: true,
+ rowNumber: 123,
+ },
+ ],
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index e36287eff29..49397c77215 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -3,6 +3,13 @@ import paginatedTreeQuery from 'shared_queries/repository/paginated_tree.query.g
import FilePreview from '~/repository/components/preview/index.vue';
import FileTable from '~/repository/components/table/index.vue';
import TreeContent from '~/repository/components/tree_content.vue';
+import { loadCommits, isRequested, resetRequestedCommits } from '~/repository/commits_service';
+
+jest.mock('~/repository/commits_service', () => ({
+ loadCommits: jest.fn(() => Promise.resolve()),
+ isRequested: jest.fn(),
+ resetRequestedCommits: jest.fn(),
+}));
let vm;
let $apollo;
@@ -23,6 +30,7 @@ function factory(path, data = () => ({})) {
glFeatures: {
increasePageSizeExponentially: true,
paginatedTreeGraphqlQuery: true,
+ lazyLoadCommits: true,
},
},
});
@@ -45,7 +53,7 @@ describe('Repository table component', () => {
expect(vm.find(FilePreview).exists()).toBe(true);
});
- it('trigger fetchFiles when mounted', async () => {
+ it('trigger fetchFiles and resetRequestedCommits when mounted', async () => {
factory('/');
jest.spyOn(vm.vm, 'fetchFiles').mockImplementation(() => {});
@@ -53,6 +61,7 @@ describe('Repository table component', () => {
await vm.vm.$nextTick();
expect(vm.vm.fetchFiles).toHaveBeenCalled();
+ expect(resetRequestedCommits).toHaveBeenCalled();
});
describe('normalizeData', () => {
@@ -180,4 +189,15 @@ describe('Repository table component', () => {
});
});
});
+
+ it('loads commit data when row-appear event is emitted', () => {
+ const path = 'some/path';
+ const rowNumber = 1;
+
+ factory(path);
+ findFileTable().vm.$emit('row-appear', { hasCommit: false, rowNumber });
+
+ expect(isRequested).toHaveBeenCalledWith(rowNumber);
+ expect(loadCommits).toHaveBeenCalledWith('', path, '', rowNumber);
+ });
});
diff --git a/spec/frontend/repository/router_spec.js b/spec/frontend/repository/router_spec.js
index bb82fa706fd..3f822db601f 100644
--- a/spec/frontend/repository/router_spec.js
+++ b/spec/frontend/repository/router_spec.js
@@ -24,4 +24,32 @@ describe('Repository router spec', () => {
expect(componentsForRoute).toContain(component);
}
});
+
+ describe('Storing Web IDE path globally', () => {
+ const proj = 'foo-bar-group/foo-bar-proj';
+ let originalGl;
+
+ beforeEach(() => {
+ originalGl = window.gl;
+ });
+
+ afterEach(() => {
+ window.gl = originalGl;
+ });
+
+ it.each`
+ path | branch | expectedPath
+ ${'/'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/`}
+ ${'/tree/main'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/`}
+ ${'/tree/feat(test)'} | ${'feat(test)'} | ${`/-/ide/project/${proj}/edit/feat(test)/-/`}
+ ${'/-/tree/main'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/`}
+ ${'/-/tree/main/app/assets'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/app/assets/`}
+ ${'/-/blob/main/file.md'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/file.md`}
+ `('generates the correct Web IDE url for $path', ({ path, branch, expectedPath } = {}) => {
+ const router = createRouter(proj, branch);
+
+ router.push(path);
+ expect(window.gl.webIDEPath).toBe(expectedPath);
+ });
+ });
});
diff --git a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
index 3292f635f6b..33e9c122080 100644
--- a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
@@ -1,3 +1,4 @@
+import { GlLink } from '@gitlab/ui';
import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -5,6 +6,7 @@ import setWindowLocation from 'helpers/set_window_location_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { updateHistory } from '~/lib/utils/url_utility';
import AdminRunnersApp from '~/runner/admin_runners/admin_runners_app.vue';
@@ -12,7 +14,6 @@ import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerManualSetupHelp from '~/runner/components/runner_manual_setup_help.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
-import RunnerTypeHelp from '~/runner/components/runner_type_help.vue';
import {
ADMIN_FILTERED_SEARCH_NAMESPACE,
@@ -49,7 +50,6 @@ describe('AdminRunnersApp', () => {
let wrapper;
let mockRunnersQuery;
- const findRunnerTypeHelp = () => wrapper.findComponent(RunnerTypeHelp);
const findRunnerManualSetupHelp = () => wrapper.findComponent(RunnerManualSetupHelp);
const findRunnerList = () => wrapper.findComponent(RunnerList);
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
@@ -86,10 +86,6 @@ describe('AdminRunnersApp', () => {
wrapper.destroy();
});
- it('shows the runner type help', () => {
- expect(findRunnerTypeHelp().exists()).toBe(true);
- });
-
it('shows the runner setup instructions', () => {
expect(findRunnerManualSetupHelp().props('registrationToken')).toBe(mockRegistrationToken);
});
@@ -98,6 +94,20 @@ describe('AdminRunnersApp', () => {
expect(findRunnerList().props('runners')).toEqual(runnersData.data.runners.nodes);
});
+ it('runner item links to the runner admin page', async () => {
+ createComponent({ mountFn: mount });
+
+ await waitForPromises();
+
+ const { id, shortSha } = runnersData.data.runners.nodes[0];
+ const numericId = getIdFromGraphQLId(id);
+
+ const runnerLink = wrapper.find('tr [data-testid="td-summary"]').find(GlLink);
+
+ expect(runnerLink.text()).toBe(`#${numericId} (${shortSha})`);
+ expect(runnerLink.attributes('href')).toBe(`http://localhost/admin/runners/${numericId}`);
+ });
+
it('requests the runners with no filters', () => {
expect(mockRunnersQuery).toHaveBeenLastCalledWith({
status: undefined,
diff --git a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
index 95f7c38cafc..5aa3879ac3e 100644
--- a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
@@ -5,15 +5,18 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import RunnerActionCell from '~/runner/components/cells/runner_actions_cell.vue';
+import getGroupRunnersQuery from '~/runner/graphql/get_group_runners.query.graphql';
import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';
import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql';
import runnerUpdateMutation from '~/runner/graphql/runner_update.mutation.graphql';
import { captureException } from '~/runner/sentry_utils';
-import { runnerData } from '../../mock_data';
+import { runnersData, runnerData } from '../../mock_data';
-const mockRunner = runnerData.data.runner;
+const mockRunner = runnersData.data.runners.nodes[0];
+const mockRunnerDetails = runnerData.data.runner;
const getRunnersQueryName = getRunnersQuery.definitions[0].name.value;
+const getGroupRunnersQueryName = getGroupRunnersQuery.definitions[0].name.value;
const localVue = createLocalVue();
localVue.use(VueApollo);
@@ -36,6 +39,7 @@ describe('RunnerTypeCell', () => {
propsData: {
runner: {
id: mockRunner.id,
+ adminUrl: mockRunner.adminUrl,
active,
},
},
@@ -61,7 +65,7 @@ describe('RunnerTypeCell', () => {
runnerUpdateMutationHandler.mockResolvedValue({
data: {
runnerUpdate: {
- runner: runnerData.data.runner,
+ runner: mockRunnerDetails,
errors: [],
},
},
@@ -78,7 +82,7 @@ describe('RunnerTypeCell', () => {
it('Displays the runner edit link with the correct href', () => {
createComponent();
- expect(findEditBtn().attributes('href')).toBe('/admin/runners/1');
+ expect(findEditBtn().attributes('href')).toBe(mockRunner.adminUrl);
});
describe.each`
@@ -231,7 +235,7 @@ describe('RunnerTypeCell', () => {
},
},
awaitRefetchQueries: true,
- refetchQueries: [getRunnersQueryName],
+ refetchQueries: [getRunnersQueryName, getGroupRunnersQueryName],
});
});
diff --git a/spec/frontend/runner/components/cells/runner_name_cell_spec.js b/spec/frontend/runner/components/cells/runner_summary_cell_spec.js
index 26055fc0faf..1c9282e0acd 100644
--- a/spec/frontend/runner/components/cells/runner_name_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_summary_cell_spec.js
@@ -1,6 +1,5 @@
-import { GlLink } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import RunnerNameCell from '~/runner/components/cells/runner_name_cell.vue';
+import RunnerSummaryCell from '~/runner/components/cells/runner_summary_cell.vue';
const mockId = '1';
const mockShortSha = '2P6oDVDm';
@@ -9,10 +8,8 @@ const mockDescription = 'runner-1';
describe('RunnerTypeCell', () => {
let wrapper;
- const findLink = () => wrapper.findComponent(GlLink);
-
- const createComponent = () => {
- wrapper = mount(RunnerNameCell, {
+ const createComponent = (options) => {
+ wrapper = mount(RunnerSummaryCell, {
propsData: {
runner: {
id: `gid://gitlab/Ci::Runner/${mockId}`,
@@ -20,6 +17,7 @@ describe('RunnerTypeCell', () => {
description: mockDescription,
},
},
+ ...options,
});
};
@@ -31,12 +29,23 @@ describe('RunnerTypeCell', () => {
wrapper.destroy();
});
- it('Displays the runner link with id and short token', () => {
- expect(findLink().text()).toBe(`#${mockId} (${mockShortSha})`);
- expect(findLink().attributes('href')).toBe(`/admin/runners/${mockId}`);
+ it('Displays the runner name as id and short token', () => {
+ expect(wrapper.text()).toContain(`#${mockId} (${mockShortSha})`);
});
it('Displays the runner description', () => {
expect(wrapper.text()).toContain(mockDescription);
});
+
+ it('Displays a custom slot', () => {
+ const slotContent = 'My custom runner summary';
+
+ createComponent({
+ slots: {
+ 'runner-name': slotContent,
+ },
+ });
+
+ expect(wrapper.text()).toContain(slotContent);
+ });
});
diff --git a/spec/frontend/runner/components/runner_list_spec.js b/spec/frontend/runner/components/runner_list_spec.js
index 344d1e5c150..e24dffea1eb 100644
--- a/spec/frontend/runner/components/runner_list_spec.js
+++ b/spec/frontend/runner/components/runner_list_spec.js
@@ -1,4 +1,4 @@
-import { GlLink, GlTable, GlSkeletonLoader } from '@gitlab/ui';
+import { GlTable, GlSkeletonLoader } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -67,11 +67,11 @@ describe('RunnerList', () => {
// Badges
expect(findCell({ fieldKey: 'type' }).text()).toMatchInterpolatedText('specific paused');
- // Runner identifier
- expect(findCell({ fieldKey: 'name' }).text()).toContain(
+ // Runner summary
+ expect(findCell({ fieldKey: 'summary' }).text()).toContain(
`#${getIdFromGraphQLId(id)} (${shortSha})`,
);
- expect(findCell({ fieldKey: 'name' }).text()).toContain(description);
+ expect(findCell({ fieldKey: 'summary' }).text()).toContain(description);
// Other fields
expect(findCell({ fieldKey: 'version' }).text()).toBe(version);
@@ -136,12 +136,11 @@ describe('RunnerList', () => {
});
});
- it('Links to the runner page', () => {
- const { id } = mockRunners[0];
+ it('Shows runner identifier', () => {
+ const { id, shortSha } = mockRunners[0];
+ const numericId = getIdFromGraphQLId(id);
- expect(findCell({ fieldKey: 'name' }).find(GlLink).attributes('href')).toBe(
- `/admin/runners/${getIdFromGraphQLId(id)}`,
- );
+ expect(findCell({ fieldKey: 'summary' }).text()).toContain(`#${numericId} (${shortSha})`);
});
describe('When data is loading', () => {
diff --git a/spec/frontend/runner/components/runner_state_locked_badge_spec.js b/spec/frontend/runner/components/runner_state_locked_badge_spec.js
new file mode 100644
index 00000000000..e92b671f5a1
--- /dev/null
+++ b/spec/frontend/runner/components/runner_state_locked_badge_spec.js
@@ -0,0 +1,45 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import RunnerStateLockedBadge from '~/runner/components/runner_state_locked_badge.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+describe('RunnerTypeBadge', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const getTooltip = () => getBinding(findBadge().element, 'gl-tooltip');
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(RunnerStateLockedBadge, {
+ propsData: {
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders locked state', () => {
+ expect(wrapper.text()).toBe('locked');
+ expect(findBadge().props('variant')).toBe('warning');
+ });
+
+ it('renders tooltip', () => {
+ expect(getTooltip().value).toBeDefined();
+ });
+
+ it('passes arbitrary attributes to the badge', () => {
+ createComponent({ props: { size: 'sm' } });
+
+ expect(findBadge().props('size')).toBe('sm');
+ });
+});
diff --git a/spec/frontend/runner/components/runner_state_paused_badge_spec.js b/spec/frontend/runner/components/runner_state_paused_badge_spec.js
new file mode 100644
index 00000000000..8df56d6e3f3
--- /dev/null
+++ b/spec/frontend/runner/components/runner_state_paused_badge_spec.js
@@ -0,0 +1,45 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import RunnerStatePausedBadge from '~/runner/components/runner_state_paused_badge.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+describe('RunnerTypeBadge', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const getTooltip = () => getBinding(findBadge().element, 'gl-tooltip');
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(RunnerStatePausedBadge, {
+ propsData: {
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders paused state', () => {
+ expect(wrapper.text()).toBe('paused');
+ expect(findBadge().props('variant')).toBe('danger');
+ });
+
+ it('renders tooltip', () => {
+ expect(getTooltip().value).toBeDefined();
+ });
+
+ it('passes arbitrary attributes to the badge', () => {
+ createComponent({ props: { size: 'sm' } });
+
+ expect(findBadge().props('size')).toBe('sm');
+ });
+});
diff --git a/spec/frontend/runner/components/runner_type_badge_spec.js b/spec/frontend/runner/components/runner_type_badge_spec.js
index ab5ccf6390f..fb344e65389 100644
--- a/spec/frontend/runner/components/runner_type_badge_spec.js
+++ b/spec/frontend/runner/components/runner_type_badge_spec.js
@@ -1,18 +1,23 @@
import { GlBadge } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
describe('RunnerTypeBadge', () => {
let wrapper;
const findBadge = () => wrapper.findComponent(GlBadge);
+ const getTooltip = () => getBinding(findBadge().element, 'gl-tooltip');
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMount(RunnerTypeBadge, {
propsData: {
...props,
},
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
});
};
@@ -20,16 +25,24 @@ describe('RunnerTypeBadge', () => {
wrapper.destroy();
});
- it.each`
+ describe.each`
type | text | variant
${INSTANCE_TYPE} | ${'shared'} | ${'success'}
${GROUP_TYPE} | ${'group'} | ${'success'}
${PROJECT_TYPE} | ${'specific'} | ${'info'}
- `('displays $type runner with as "$text" with a $variant variant ', ({ type, text, variant }) => {
- createComponent({ props: { type } });
+ `('displays $type runner', ({ type, text, variant }) => {
+ beforeEach(() => {
+ createComponent({ props: { type } });
+ });
- expect(findBadge().text()).toBe(text);
- expect(findBadge().props('variant')).toBe(variant);
+ it(`as "${text}" with a ${variant} variant`, () => {
+ expect(findBadge().text()).toBe(text);
+ expect(findBadge().props('variant')).toBe(variant);
+ });
+
+ it('with a tooltip', () => {
+ expect(getTooltip().value).toBeDefined();
+ });
});
it('validation fails for an incorrect type', () => {
diff --git a/spec/frontend/runner/components/runner_type_help_spec.js b/spec/frontend/runner/components/runner_type_help_spec.js
deleted file mode 100644
index f0d03282f8e..00000000000
--- a/spec/frontend/runner/components/runner_type_help_spec.js
+++ /dev/null
@@ -1,32 +0,0 @@
-import { GlBadge } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import RunnerTypeHelp from '~/runner/components/runner_type_help.vue';
-
-describe('RunnerTypeHelp', () => {
- let wrapper;
-
- const findBadges = () => wrapper.findAllComponents(GlBadge);
-
- const createComponent = () => {
- wrapper = mount(RunnerTypeHelp);
- };
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('Displays each of the runner types', () => {
- expect(findBadges().at(0).text()).toBe('shared');
- expect(findBadges().at(1).text()).toBe('group');
- expect(findBadges().at(2).text()).toBe('specific');
- });
-
- it('Displays runner states', () => {
- expect(findBadges().at(3).text()).toBe('locked');
- expect(findBadges().at(4).text()).toBe('paused');
- });
-});
diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js
index e80da40e3bd..5f3aabd4bc3 100644
--- a/spec/frontend/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js
@@ -1,3 +1,4 @@
+import { GlLink } from '@gitlab/ui';
import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -5,13 +6,13 @@ import setWindowLocation from 'helpers/set_window_location_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { updateHistory } from '~/lib/utils/url_utility';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerManualSetupHelp from '~/runner/components/runner_manual_setup_help.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
-import RunnerTypeHelp from '~/runner/components/runner_type_help.vue';
import {
CREATED_ASC,
@@ -34,8 +35,7 @@ localVue.use(VueApollo);
const mockGroupFullPath = 'group1';
const mockRegistrationToken = 'AABBCC';
-const mockRunners = groupRunnersData.data.group.runners.nodes;
-const mockGroupRunnersLimitedCount = mockRunners.length;
+const mockGroupRunnersLimitedCount = groupRunnersData.data.group.runners.edges.length;
jest.mock('~/flash');
jest.mock('~/runner/sentry_utils');
@@ -48,7 +48,6 @@ describe('GroupRunnersApp', () => {
let wrapper;
let mockGroupRunnersQuery;
- const findRunnerTypeHelp = () => wrapper.findComponent(RunnerTypeHelp);
const findRunnerManualSetupHelp = () => wrapper.findComponent(RunnerManualSetupHelp);
const findRunnerList = () => wrapper.findComponent(RunnerList);
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
@@ -82,16 +81,27 @@ describe('GroupRunnersApp', () => {
await waitForPromises();
});
- it('shows the runner type help', () => {
- expect(findRunnerTypeHelp().exists()).toBe(true);
- });
-
it('shows the runner setup instructions', () => {
expect(findRunnerManualSetupHelp().props('registrationToken')).toBe(mockRegistrationToken);
});
it('shows the runners list', () => {
- expect(findRunnerList().props('runners')).toEqual(groupRunnersData.data.group.runners.nodes);
+ expect(findRunnerList().props('runners')).toEqual(
+ groupRunnersData.data.group.runners.edges.map(({ node }) => node),
+ );
+ });
+
+ it('runner item links to the runner group page', async () => {
+ const { webUrl, node } = groupRunnersData.data.group.runners.edges[0];
+ const { id, shortSha } = node;
+
+ createComponent({ mountFn: mount });
+
+ await waitForPromises();
+
+ const runnerLink = wrapper.find('tr [data-testid="td-summary"]').find(GlLink);
+ expect(runnerLink.text()).toBe(`#${getIdFromGraphQLId(id)} (${shortSha})`);
+ expect(runnerLink.attributes('href')).toBe(webUrl);
});
it('requests the runners with group path and no other filters', () => {
diff --git a/spec/frontend/runner/mock_data.js b/spec/frontend/runner/mock_data.js
index c90b9a4c426..b8d0f1273c7 100644
--- a/spec/frontend/runner/mock_data.js
+++ b/spec/frontend/runner/mock_data.js
@@ -1,14 +1,18 @@
-const runnerFixture = (filename) => getJSONFixture(`graphql/runner/${filename}`);
-
// Fixtures generated by: spec/frontend/fixtures/runner.rb
// Admin queries
-export const runnersData = runnerFixture('get_runners.query.graphql.json');
-export const runnersDataPaginated = runnerFixture('get_runners.query.graphql.paginated.json');
-export const runnerData = runnerFixture('get_runner.query.graphql.json');
+import runnersData from 'test_fixtures/graphql/runner/get_runners.query.graphql.json';
+import runnersDataPaginated from 'test_fixtures/graphql/runner/get_runners.query.graphql.paginated.json';
+import runnerData from 'test_fixtures/graphql/runner/get_runner.query.graphql.json';
// Group queries
-export const groupRunnersData = runnerFixture('get_group_runners.query.graphql.json');
-export const groupRunnersDataPaginated = runnerFixture(
- 'get_group_runners.query.graphql.paginated.json',
-);
+import groupRunnersData from 'test_fixtures/graphql/runner/get_group_runners.query.graphql.json';
+import groupRunnersDataPaginated from 'test_fixtures/graphql/runner/get_group_runners.query.graphql.paginated.json';
+
+export {
+ runnerData,
+ runnersDataPaginated,
+ runnersData,
+ groupRunnersData,
+ groupRunnersDataPaginated,
+};
diff --git a/spec/frontend/search_settings/components/search_settings_spec.js b/spec/frontend/search_settings/components/search_settings_spec.js
index 173936e1ce3..6beaea8dba5 100644
--- a/spec/frontend/search_settings/components/search_settings_spec.js
+++ b/spec/frontend/search_settings/components/search_settings_spec.js
@@ -11,6 +11,7 @@ describe('search_settings/components/search_settings.vue', () => {
const GENERAL_SETTINGS_ID = 'js-general-settings';
const ADVANCED_SETTINGS_ID = 'js-advanced-settings';
const EXTRA_SETTINGS_ID = 'js-extra-settings';
+ const TEXT_CONTAIN_SEARCH_TERM = `This text contain ${SEARCH_TERM} and <script>alert("111")</script> others.`;
let wrapper;
@@ -33,6 +34,21 @@ describe('search_settings/components/search_settings.vue', () => {
const visibleSectionsCount = () =>
document.querySelectorAll(`${SECTION_SELECTOR}:not(.${HIDE_CLASS})`).length;
const highlightedElementsCount = () => document.querySelectorAll(`.${HIGHLIGHT_CLASS}`).length;
+
+ const highlightedTextNodes = () => {
+ const highlightedList = Array.from(document.querySelectorAll(`.${HIGHLIGHT_CLASS}`));
+ return highlightedList.every((element) => {
+ return element.textContent.toLowerCase() === SEARCH_TERM.toLowerCase();
+ });
+ };
+
+ const matchParentElement = () => {
+ const highlightedList = Array.from(document.querySelectorAll(`.${HIGHLIGHT_CLASS}`));
+ return highlightedList.map((element) => {
+ return element.parentNode;
+ });
+ };
+
const findSearchBox = () => wrapper.find(GlSearchBoxByType);
const search = (term) => {
findSearchBox().vm.$emit('input', term);
@@ -52,6 +68,7 @@ describe('search_settings/components/search_settings.vue', () => {
</section>
<section id="${EXTRA_SETTINGS_ID}" class="settings">
<span>${SEARCH_TERM}</span>
+ <span>${TEXT_CONTAIN_SEARCH_TERM}</span>
</section>
</div>
</div>
@@ -82,7 +99,23 @@ describe('search_settings/components/search_settings.vue', () => {
it('highlight elements that match the search term', () => {
search(SEARCH_TERM);
- expect(highlightedElementsCount()).toBe(1);
+ expect(highlightedElementsCount()).toBe(2);
+ });
+
+ it('highlight only search term and not the whole line', () => {
+ search(SEARCH_TERM);
+
+ expect(highlightedTextNodes()).toBe(true);
+ });
+
+ it('prevents search xss', () => {
+ search(SEARCH_TERM);
+
+ const parentNodeList = matchParentElement();
+ parentNodeList.forEach((element) => {
+ const scriptElement = element.getElementsByTagName('script');
+ expect(scriptElement.length).toBe(0);
+ });
});
describe('default', () => {
diff --git a/spec/frontend/sidebar/assignees_spec.js b/spec/frontend/sidebar/assignees_spec.js
index be27a800418..b3a67f18f82 100644
--- a/spec/frontend/sidebar/assignees_spec.js
+++ b/spec/frontend/sidebar/assignees_spec.js
@@ -3,6 +3,7 @@ import { mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
import UsersMockHelper from 'helpers/user_mock_data_helper';
import Assignee from '~/sidebar/components/assignees/assignees.vue';
+import AssigneeAvatarLink from '~/sidebar/components/assignees/assignee_avatar_link.vue';
import UsersMock from './mock_data';
describe('Assignee component', () => {
@@ -19,6 +20,7 @@ describe('Assignee component', () => {
});
};
+ const findAllAvatarLinks = () => wrapper.findAllComponents(AssigneeAvatarLink);
const findComponentTextNoUsers = () => wrapper.find('[data-testid="no-value"]');
const findCollapsedChildren = () => wrapper.findAll('.sidebar-collapsed-icon > *');
@@ -148,7 +150,7 @@ describe('Assignee component', () => {
editable: true,
});
- expect(wrapper.findAll('.user-item').length).toBe(users.length);
+ expect(findAllAvatarLinks()).toHaveLength(users.length);
expect(wrapper.find('.user-list-more').exists()).toBe(false);
});
@@ -178,9 +180,9 @@ describe('Assignee component', () => {
users,
});
- const userItems = wrapper.findAll('.user-list .user-item a');
+ const userItems = findAllAvatarLinks();
- expect(userItems.length).toBe(3);
+ expect(userItems).toHaveLength(3);
expect(userItems.at(0).attributes('title')).toBe(users[2].name);
});
diff --git a/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js b/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
index 9f6878db785..6b80224083a 100644
--- a/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
+++ b/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
@@ -26,9 +26,9 @@ describe('UncollapsedReviewerList component', () => {
});
describe('single reviewer', () => {
- beforeEach(() => {
- const user = userDataMock();
+ const user = userDataMock();
+ beforeEach(() => {
createComponent({
users: [user],
});
@@ -39,6 +39,7 @@ describe('UncollapsedReviewerList component', () => {
});
it('shows one user with avatar, username and author name', () => {
+ expect(wrapper.text()).toContain(user.name);
expect(wrapper.text()).toContain(`@root`);
});
@@ -56,11 +57,18 @@ describe('UncollapsedReviewerList component', () => {
});
describe('multiple reviewers', () => {
- beforeEach(() => {
- const user = userDataMock();
+ const user = userDataMock();
+ const user2 = {
+ ...user,
+ id: 2,
+ name: 'nonrooty-nonrootersen',
+ username: 'hello-world',
+ approved: true,
+ };
+ beforeEach(() => {
createComponent({
- users: [user, { ...user, id: 2, username: 'hello-world', approved: true }],
+ users: [user, user2],
});
});
@@ -69,7 +77,9 @@ describe('UncollapsedReviewerList component', () => {
});
it('shows both users with avatar, username and author name', () => {
+ expect(wrapper.text()).toContain(user.name);
expect(wrapper.text()).toContain(`@root`);
+ expect(wrapper.text()).toContain(user2.name);
expect(wrapper.text()).toContain(`@hello-world`);
});
diff --git a/spec/frontend/sidebar/sidebar_labels_spec.js b/spec/frontend/sidebar/sidebar_labels_spec.js
index 7455f684380..8437ee1b723 100644
--- a/spec/frontend/sidebar/sidebar_labels_spec.js
+++ b/spec/frontend/sidebar/sidebar_labels_spec.js
@@ -27,6 +27,7 @@ describe('sidebar labels', () => {
labelsManagePath: '/gitlab-org/gitlab-test/-/labels',
projectIssuesPath: '/gitlab-org/gitlab-test/-/issues',
projectPath: 'gitlab-org/gitlab-test',
+ fullPath: 'gitlab-org/gitlab-test',
};
const $apollo = {
@@ -110,10 +111,9 @@ describe('sidebar labels', () => {
mutation: updateIssueLabelsMutation,
variables: {
input: {
- addLabelIds: [40],
iid: defaultProps.iid,
projectPath: defaultProps.projectPath,
- removeLabelIds: [26, 55],
+ labelIds: [toLabelGid(29), toLabelGid(28), toLabelGid(27), toLabelGid(40)],
},
},
};
diff --git a/spec/frontend/sidebar/todo_spec.js b/spec/frontend/sidebar/todo_spec.js
index ff6da3abad0..6829e688c65 100644
--- a/spec/frontend/sidebar/todo_spec.js
+++ b/spec/frontend/sidebar/todo_spec.js
@@ -27,7 +27,7 @@ describe('SidebarTodo', () => {
it.each`
state | classes
${false} | ${['gl-button', 'btn', 'btn-default', 'btn-todo', 'issuable-header-btn', 'float-right']}
- ${true} | ${['btn-blank', 'btn-todo', 'sidebar-collapsed-icon', 'dont-change-state']}
+ ${true} | ${['btn-blank', 'btn-todo', 'sidebar-collapsed-icon', 'js-dont-change-state']}
`('returns todo button classes for when `collapsed` prop is `$state`', ({ state, classes }) => {
createComponent({ collapsed: state });
expect(wrapper.find('button').classes()).toStrictEqual(classes);
diff --git a/spec/frontend/snippets/components/show_spec.js b/spec/frontend/snippets/components/show_spec.js
index b7b638b5137..af61f4ea54f 100644
--- a/spec/frontend/snippets/components/show_spec.js
+++ b/spec/frontend/snippets/components/show_spec.js
@@ -41,19 +41,23 @@ describe('Snippet view app', () => {
},
});
}
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findEmbedDropdown = () => wrapper.findComponent(EmbedDropdown);
+
afterEach(() => {
wrapper.destroy();
});
it('renders loader while the query is in flight', () => {
createComponent({ loading: true });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(findLoadingIcon().exists()).toBe(true);
});
- it('renders all simple components after the query is finished', () => {
+ it('renders all simple components required after the query is finished', () => {
createComponent();
- expect(wrapper.find(SnippetHeader).exists()).toBe(true);
- expect(wrapper.find(SnippetTitle).exists()).toBe(true);
+ expect(wrapper.findComponent(SnippetHeader).exists()).toBe(true);
+ expect(wrapper.findComponent(SnippetTitle).exists()).toBe(true);
});
it('renders embed dropdown component if visibility allows', () => {
@@ -65,7 +69,7 @@ describe('Snippet view app', () => {
},
},
});
- expect(wrapper.find(EmbedDropdown).exists()).toBe(true);
+ expect(findEmbedDropdown().exists()).toBe(true);
});
it('renders correct snippet-blob components', () => {
@@ -98,7 +102,7 @@ describe('Snippet view app', () => {
},
},
});
- expect(wrapper.find(EmbedDropdown).exists()).toBe(isRendered);
+ expect(findEmbedDropdown().exists()).toBe(isRendered);
});
});
@@ -120,7 +124,7 @@ describe('Snippet view app', () => {
},
},
});
- expect(wrapper.find(CloneDropdownButton).exists()).toBe(isRendered);
+ expect(wrapper.findComponent(CloneDropdownButton).exists()).toBe(isRendered);
},
);
});
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index fb95be3a77c..552a1c6fcde 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -1,23 +1,30 @@
import { GlButton, GlModal, GlDropdown } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { ApolloMutation } from 'vue-apollo';
+import MockAdapter from 'axios-mock-adapter';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
import { differenceInMilliseconds } from '~/lib/utils/datetime_utility';
-import SnippetHeader from '~/snippets/components/snippet_header.vue';
+import SnippetHeader, { i18n } from '~/snippets/components/snippet_header.vue';
import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
+import axios from '~/lib/utils/axios_utils';
+import createFlash, { FLASH_TYPES } from '~/flash';
+
+jest.mock('~/flash');
describe('Snippet header component', () => {
let wrapper;
let snippet;
let mutationTypes;
let mutationVariables;
+ let mock;
let errorMsg;
let err;
const originalRelativeUrlRoot = gon.relative_url_root;
const reportAbusePath = '/-/snippets/42/mark_as_spam';
+ const canReportSpam = true;
const GlEmoji = { template: '<img/>' };
@@ -47,6 +54,7 @@ describe('Snippet header component', () => {
mocks: { $apollo },
provide: {
reportAbusePath,
+ canReportSpam,
...provide,
},
propsData: {
@@ -118,10 +126,13 @@ describe('Snippet header component', () => {
RESOLVE: jest.fn(() => Promise.resolve({ data: { destroySnippet: { errors: [] } } })),
REJECT: jest.fn(() => Promise.reject(err)),
};
+
+ mock = new MockAdapter(axios);
});
afterEach(() => {
wrapper.destroy();
+ mock.restore();
gon.relative_url_root = originalRelativeUrlRoot;
});
@@ -186,7 +197,6 @@ describe('Snippet header component', () => {
{
category: 'primary',
disabled: false,
- href: reportAbusePath,
text: 'Submit as spam',
variant: 'default',
},
@@ -205,7 +215,6 @@ describe('Snippet header component', () => {
text: 'Delete',
},
{
- href: reportAbusePath,
text: 'Submit as spam',
title: 'Submit as spam',
},
@@ -249,6 +258,31 @@ describe('Snippet header component', () => {
);
});
+ describe('submit snippet as spam', () => {
+ beforeEach(async () => {
+ createComponent();
+ });
+
+ it.each`
+ request | variant | text
+ ${200} | ${'SUCCESS'} | ${i18n.snippetSpamSuccess}
+ ${500} | ${'DANGER'} | ${i18n.snippetSpamFailure}
+ `(
+ 'renders a "$variant" flash message with "$text" message for a request with a "$request" response',
+ async ({ request, variant, text }) => {
+ const submitAsSpamBtn = findButtons().at(2);
+ mock.onPost(reportAbusePath).reply(request);
+ submitAsSpamBtn.trigger('click');
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenLastCalledWith({
+ message: expect.stringContaining(text),
+ type: FLASH_TYPES[variant],
+ });
+ },
+ );
+ });
+
describe('with guest user', () => {
beforeEach(() => {
createComponent({
@@ -258,6 +292,7 @@ describe('Snippet header component', () => {
},
provide: {
reportAbusePath: null,
+ canReportSpam: false,
},
});
});
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 4d1b0f54e42..2c8e0fff848 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -6,7 +6,7 @@ import { setGlobalDateToFakeDate } from 'helpers/fake_date';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import Translate from '~/vue_shared/translate';
-import { getJSONFixture, loadHTMLFixture, setHTMLFixture } from './__helpers__/fixtures';
+import { loadHTMLFixture, setHTMLFixture } from './__helpers__/fixtures';
import { initializeTestTimeout } from './__helpers__/timeout';
import customMatchers from './matchers';
import { setupManualMocks } from './mocks/mocks_helper';
@@ -43,7 +43,6 @@ Vue.use(Translate);
// convenience wrapper for migration from Karma
Object.assign(global, {
- getJSONFixture,
loadFixtures: loadHTMLFixture,
setFixtures: setHTMLFixture,
});
diff --git a/spec/frontend/tracking/get_standard_context_spec.js b/spec/frontend/tracking/get_standard_context_spec.js
index b7bdc56b801..ada914b586c 100644
--- a/spec/frontend/tracking/get_standard_context_spec.js
+++ b/spec/frontend/tracking/get_standard_context_spec.js
@@ -1,5 +1,13 @@
-import { SNOWPLOW_JS_SOURCE } from '~/tracking/constants';
+import { SNOWPLOW_JS_SOURCE, GOOGLE_ANALYTICS_ID_COOKIE_NAME } from '~/tracking/constants';
import getStandardContext from '~/tracking/get_standard_context';
+import { setCookie, removeCookie } from '~/lib/utils/common_utils';
+
+const TEST_GA_ID = 'GA1.2.345678901.234567891';
+const TEST_BASE_DATA = {
+ source: SNOWPLOW_JS_SOURCE,
+ google_analytics_id: '',
+ extra: {},
+};
describe('~/tracking/get_standard_context', () => {
beforeEach(() => {
@@ -10,10 +18,7 @@ describe('~/tracking/get_standard_context', () => {
it('returns default object if called without server context', () => {
expect(getStandardContext()).toStrictEqual({
schema: undefined,
- data: {
- source: SNOWPLOW_JS_SOURCE,
- extra: {},
- },
+ data: TEST_BASE_DATA,
});
});
@@ -28,9 +33,8 @@ describe('~/tracking/get_standard_context', () => {
expect(getStandardContext()).toStrictEqual({
schema: 'iglu:com.gitlab/gitlab_standard',
data: {
+ ...TEST_BASE_DATA,
environment: 'testing',
- source: SNOWPLOW_JS_SOURCE,
- extra: {},
},
});
});
@@ -50,4 +54,15 @@ describe('~/tracking/get_standard_context', () => {
expect(getStandardContext({ extra }).data.extra).toBe(extra);
});
+
+ describe('with Google Analytics cookie present', () => {
+ afterEach(() => {
+ removeCookie(GOOGLE_ANALYTICS_ID_COOKIE_NAME);
+ });
+
+ it('appends Google Analytics ID', () => {
+ setCookie(GOOGLE_ANALYTICS_ID_COOKIE_NAME, TEST_GA_ID);
+ expect(getStandardContext().data.google_analytics_id).toBe(TEST_GA_ID);
+ });
+ });
});
diff --git a/spec/frontend/tracking/tracking_initialization_spec.js b/spec/frontend/tracking/tracking_initialization_spec.js
new file mode 100644
index 00000000000..2b70aacc4cb
--- /dev/null
+++ b/spec/frontend/tracking/tracking_initialization_spec.js
@@ -0,0 +1,140 @@
+import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
+import { getExperimentData, getAllExperimentContexts } from '~/experimentation/utils';
+import Tracking, { initUserTracking, initDefaultTrackers } from '~/tracking';
+import getStandardContext from '~/tracking/get_standard_context';
+
+jest.mock('~/experimentation/utils', () => ({
+ getExperimentData: jest.fn(),
+ getAllExperimentContexts: jest.fn(),
+}));
+
+describe('Tracking', () => {
+ let standardContext;
+ let snowplowSpy;
+ let bindDocumentSpy;
+ let trackLoadEventsSpy;
+ let enableFormTracking;
+ let setAnonymousUrlsSpy;
+
+ beforeAll(() => {
+ window.gl = window.gl || {};
+ window.gl.snowplowStandardContext = {
+ schema: 'iglu:com.gitlab/gitlab_standard',
+ data: {
+ environment: 'testing',
+ source: 'unknown',
+ extra: {},
+ },
+ };
+
+ standardContext = getStandardContext();
+ });
+
+ beforeEach(() => {
+ getExperimentData.mockReturnValue(undefined);
+ getAllExperimentContexts.mockReturnValue([]);
+
+ window.snowplow = window.snowplow || (() => {});
+ window.snowplowOptions = {
+ namespace: 'gl_test',
+ hostname: 'app.test.com',
+ cookieDomain: '.test.com',
+ };
+
+ snowplowSpy = jest.spyOn(window, 'snowplow');
+ });
+
+ describe('initUserTracking', () => {
+ it('calls through to get a new tracker with the expected options', () => {
+ initUserTracking();
+ expect(snowplowSpy).toHaveBeenCalledWith('newTracker', 'gl_test', 'app.test.com', {
+ namespace: 'gl_test',
+ hostname: 'app.test.com',
+ cookieDomain: '.test.com',
+ appId: '',
+ userFingerprint: false,
+ respectDoNotTrack: true,
+ forceSecureTracker: true,
+ eventMethod: 'post',
+ contexts: { webPage: true, performanceTiming: true },
+ formTracking: false,
+ linkClickTracking: false,
+ pageUnloadTimer: 10,
+ formTrackingConfig: {
+ fields: { allow: [] },
+ forms: { allow: [] },
+ },
+ });
+ });
+ });
+
+ describe('initDefaultTrackers', () => {
+ beforeEach(() => {
+ bindDocumentSpy = jest.spyOn(Tracking, 'bindDocument').mockImplementation(() => null);
+ trackLoadEventsSpy = jest.spyOn(Tracking, 'trackLoadEvents').mockImplementation(() => null);
+ enableFormTracking = jest
+ .spyOn(Tracking, 'enableFormTracking')
+ .mockImplementation(() => null);
+ setAnonymousUrlsSpy = jest.spyOn(Tracking, 'setAnonymousUrls').mockImplementation(() => null);
+ });
+
+ it('should activate features based on what has been enabled', () => {
+ initDefaultTrackers();
+ expect(snowplowSpy).toHaveBeenCalledWith('enableActivityTracking', 30, 30);
+ expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [standardContext]);
+ expect(snowplowSpy).not.toHaveBeenCalledWith('enableFormTracking');
+ expect(snowplowSpy).not.toHaveBeenCalledWith('enableLinkClickTracking');
+
+ window.snowplowOptions = {
+ ...window.snowplowOptions,
+ formTracking: true,
+ linkClickTracking: true,
+ formTrackingConfig: { forms: { whitelist: ['foo'] }, fields: { whitelist: ['bar'] } },
+ };
+
+ initDefaultTrackers();
+ expect(enableFormTracking).toHaveBeenCalledWith(window.snowplowOptions.formTrackingConfig);
+ expect(snowplowSpy).toHaveBeenCalledWith('enableLinkClickTracking');
+ });
+
+ it('binds the document event handling', () => {
+ initDefaultTrackers();
+ expect(bindDocumentSpy).toHaveBeenCalled();
+ });
+
+ it('tracks page loaded events', () => {
+ initDefaultTrackers();
+ expect(trackLoadEventsSpy).toHaveBeenCalled();
+ });
+
+ it('calls the anonymized URLs method', () => {
+ initDefaultTrackers();
+ expect(setAnonymousUrlsSpy).toHaveBeenCalled();
+ });
+
+ describe('when there are experiment contexts', () => {
+ const experimentContexts = [
+ {
+ schema: TRACKING_CONTEXT_SCHEMA,
+ data: { experiment: 'experiment1', variant: 'control' },
+ },
+ {
+ schema: TRACKING_CONTEXT_SCHEMA,
+ data: { experiment: 'experiment_two', variant: 'candidate' },
+ },
+ ];
+
+ beforeEach(() => {
+ getAllExperimentContexts.mockReturnValue(experimentContexts);
+ });
+
+ it('includes those contexts alongside the standard context', () => {
+ initDefaultTrackers();
+ expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [
+ standardContext,
+ ...experimentContexts,
+ ]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking/tracking_spec.js
index 21fed51ff10..b7a2e4f4f51 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking/tracking_spec.js
@@ -8,16 +8,16 @@ import getStandardContext from '~/tracking/get_standard_context';
jest.mock('~/experimentation/utils', () => ({
getExperimentData: jest.fn(),
- getAllExperimentContexts: jest.fn(),
+ getAllExperimentContexts: jest.fn().mockReturnValue([]),
}));
+const TEST_CATEGORY = 'root:index';
+const TEST_ACTION = 'generic';
+const TEST_LABEL = 'button';
+
describe('Tracking', () => {
let standardContext;
let snowplowSpy;
- let bindDocumentSpy;
- let trackLoadEventsSpy;
- let enableFormTracking;
- let setAnonymousUrlsSpy;
beforeAll(() => {
window.gl = window.gl || {};
@@ -30,132 +30,46 @@ describe('Tracking', () => {
extra: {},
},
};
+ window.snowplowOptions = {
+ namespace: 'gl_test',
+ hostname: 'app.test.com',
+ cookieDomain: '.test.com',
+ formTracking: true,
+ linkClickTracking: true,
+ formTrackingConfig: { forms: { allow: ['foo'] }, fields: { allow: ['bar'] } },
+ };
standardContext = getStandardContext();
+ window.snowplow = window.snowplow || (() => {});
+ document.body.dataset.page = TEST_CATEGORY;
+
+ initUserTracking();
+ initDefaultTrackers();
});
beforeEach(() => {
getExperimentData.mockReturnValue(undefined);
getAllExperimentContexts.mockReturnValue([]);
- window.snowplow = window.snowplow || (() => {});
- window.snowplowOptions = {
- namespace: '_namespace_',
- hostname: 'app.gitfoo.com',
- cookieDomain: '.gitfoo.com',
- };
snowplowSpy = jest.spyOn(window, 'snowplow');
});
- describe('initUserTracking', () => {
- it('calls through to get a new tracker with the expected options', () => {
- initUserTracking();
- expect(snowplowSpy).toHaveBeenCalledWith('newTracker', '_namespace_', 'app.gitfoo.com', {
- namespace: '_namespace_',
- hostname: 'app.gitfoo.com',
- cookieDomain: '.gitfoo.com',
- appId: '',
- userFingerprint: false,
- respectDoNotTrack: true,
- forceSecureTracker: true,
- eventMethod: 'post',
- contexts: { webPage: true, performanceTiming: true },
- formTracking: false,
- linkClickTracking: false,
- pageUnloadTimer: 10,
- formTrackingConfig: {
- fields: { allow: [] },
- forms: { allow: [] },
- },
- });
- });
- });
-
- describe('initDefaultTrackers', () => {
- beforeEach(() => {
- bindDocumentSpy = jest.spyOn(Tracking, 'bindDocument').mockImplementation(() => null);
- trackLoadEventsSpy = jest.spyOn(Tracking, 'trackLoadEvents').mockImplementation(() => null);
- enableFormTracking = jest
- .spyOn(Tracking, 'enableFormTracking')
- .mockImplementation(() => null);
- setAnonymousUrlsSpy = jest.spyOn(Tracking, 'setAnonymousUrls').mockImplementation(() => null);
- });
-
- it('should activate features based on what has been enabled', () => {
- initDefaultTrackers();
- expect(snowplowSpy).toHaveBeenCalledWith('enableActivityTracking', 30, 30);
- expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [standardContext]);
- expect(snowplowSpy).not.toHaveBeenCalledWith('enableFormTracking');
- expect(snowplowSpy).not.toHaveBeenCalledWith('enableLinkClickTracking');
-
- window.snowplowOptions = {
- ...window.snowplowOptions,
- formTracking: true,
- linkClickTracking: true,
- formTrackingConfig: { forms: { whitelist: ['foo'] }, fields: { whitelist: ['bar'] } },
- };
-
- initDefaultTrackers();
- expect(enableFormTracking).toHaveBeenCalledWith(window.snowplowOptions.formTrackingConfig);
- expect(snowplowSpy).toHaveBeenCalledWith('enableLinkClickTracking');
- });
-
- it('binds the document event handling', () => {
- initDefaultTrackers();
- expect(bindDocumentSpy).toHaveBeenCalled();
- });
-
- it('tracks page loaded events', () => {
- initDefaultTrackers();
- expect(trackLoadEventsSpy).toHaveBeenCalled();
- });
-
- it('calls the anonymized URLs method', () => {
- initDefaultTrackers();
- expect(setAnonymousUrlsSpy).toHaveBeenCalled();
- });
-
- describe('when there are experiment contexts', () => {
- const experimentContexts = [
- {
- schema: TRACKING_CONTEXT_SCHEMA,
- data: { experiment: 'experiment1', variant: 'control' },
- },
- {
- schema: TRACKING_CONTEXT_SCHEMA,
- data: { experiment: 'experiment_two', variant: 'candidate' },
- },
- ];
-
- beforeEach(() => {
- getAllExperimentContexts.mockReturnValue(experimentContexts);
- });
-
- it('includes those contexts alongside the standard context', () => {
- initDefaultTrackers();
- expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [
- standardContext,
- ...experimentContexts,
- ]);
- });
- });
- });
-
describe('.event', () => {
afterEach(() => {
window.doNotTrack = undefined;
navigator.doNotTrack = undefined;
navigator.msDoNotTrack = undefined;
+ jest.clearAllMocks();
});
it('tracks to snowplow (our current tracking system)', () => {
- Tracking.event('_category_', '_eventName_', { label: '_label_' });
+ Tracking.event(TEST_CATEGORY, TEST_ACTION, { label: TEST_LABEL });
expect(snowplowSpy).toHaveBeenCalledWith(
'trackStructEvent',
- '_category_',
- '_eventName_',
- '_label_',
+ TEST_CATEGORY,
+ TEST_ACTION,
+ TEST_LABEL,
undefined,
undefined,
[standardContext],
@@ -165,12 +79,12 @@ describe('Tracking', () => {
it('allows adding extra data to the default context', () => {
const extra = { foo: 'bar' };
- Tracking.event('_category_', '_eventName_', { extra });
+ Tracking.event(TEST_CATEGORY, TEST_ACTION, { extra });
expect(snowplowSpy).toHaveBeenCalledWith(
'trackStructEvent',
- '_category_',
- '_eventName_',
+ TEST_CATEGORY,
+ TEST_ACTION,
undefined,
undefined,
undefined,
@@ -188,28 +102,28 @@ describe('Tracking', () => {
it('skips tracking if snowplow is unavailable', () => {
window.snowplow = false;
- Tracking.event('_category_', '_eventName_');
+ Tracking.event(TEST_CATEGORY, TEST_ACTION);
expect(snowplowSpy).not.toHaveBeenCalled();
});
it('skips tracking if the user does not want to be tracked (general spec)', () => {
window.doNotTrack = '1';
- Tracking.event('_category_', '_eventName_');
+ Tracking.event(TEST_CATEGORY, TEST_ACTION);
expect(snowplowSpy).not.toHaveBeenCalled();
});
it('skips tracking if the user does not want to be tracked (firefox legacy)', () => {
navigator.doNotTrack = 'yes';
- Tracking.event('_category_', '_eventName_');
+ Tracking.event(TEST_CATEGORY, TEST_ACTION);
expect(snowplowSpy).not.toHaveBeenCalled();
});
it('skips tracking if the user does not want to be tracked (IE legacy)', () => {
navigator.msDoNotTrack = '1';
- Tracking.event('_category_', '_eventName_');
+ Tracking.event(TEST_CATEGORY, TEST_ACTION);
expect(snowplowSpy).not.toHaveBeenCalled();
});
@@ -237,7 +151,7 @@ describe('Tracking', () => {
);
});
- it('does not add empty form whitelist rules', () => {
+ it('does not add empty form allow rules', () => {
Tracking.enableFormTracking({ fields: { allow: ['input-class1'] } });
expect(snowplowSpy).toHaveBeenCalledWith(
@@ -287,7 +201,7 @@ describe('Tracking', () => {
describe('.flushPendingEvents', () => {
it('flushes any pending events', () => {
Tracking.initialized = false;
- Tracking.event('_category_', '_eventName_', { label: '_label_' });
+ Tracking.event(TEST_CATEGORY, TEST_ACTION, { label: TEST_LABEL });
expect(snowplowSpy).not.toHaveBeenCalled();
@@ -295,9 +209,9 @@ describe('Tracking', () => {
expect(snowplowSpy).toHaveBeenCalledWith(
'trackStructEvent',
- '_category_',
- '_eventName_',
- '_label_',
+ TEST_CATEGORY,
+ TEST_ACTION,
+ TEST_LABEL,
undefined,
undefined,
[standardContext],
@@ -332,14 +246,13 @@ describe('Tracking', () => {
});
});
- it('appends the hash/fragment to the pseudonymized URL', () => {
- const hash = 'first-heading';
+ it('does not appends the hash/fragment to the pseudonymized URL', () => {
window.gl.snowplowPseudonymizedPageUrl = TEST_HOST;
- window.location.hash = hash;
+ window.location.hash = 'first-heading';
Tracking.setAnonymousUrls();
- expect(snowplowSpy).toHaveBeenCalledWith('setCustomUrl', `${TEST_HOST}#${hash}`);
+ expect(snowplowSpy).toHaveBeenCalledWith('setCustomUrl', TEST_HOST);
});
it('does not set the referrer URL by default', () => {
@@ -409,84 +322,79 @@ describe('Tracking', () => {
});
});
- describe.each`
- term
- ${'event'}
- ${'action'}
- `('tracking interface events with data-track-$term', ({ term }) => {
+ describe('tracking interface events with data-track-action', () => {
let eventSpy;
beforeEach(() => {
eventSpy = jest.spyOn(Tracking, 'event');
- Tracking.bindDocument('_category_'); // only happens once
setHTMLFixture(`
- <input data-track-${term}="click_input1" data-track-label="_label_" value=0 />
- <input data-track-${term}="click_input2" data-track-value=0 value=0/>
- <input type="checkbox" data-track-${term}="toggle_checkbox" value=1 checked/>
- <input class="dropdown" data-track-${term}="toggle_dropdown"/>
- <div data-track-${term}="nested_event"><span class="nested"></span></div>
- <input data-track-bogus="click_bogusinput" data-track-label="_label_" value="_value_"/>
- <input data-track-${term}="click_input3" data-track-experiment="example" value="_value_"/>
- <input data-track-${term}="event_with_extra" data-track-extra='{ "foo": "bar" }' />
- <input data-track-${term}="event_with_invalid_extra" data-track-extra="invalid_json" />
+ <input data-track-action="click_input1" data-track-label="button" value="0" />
+ <input data-track-action="click_input2" data-track-value="0" value="0" />
+ <input type="checkbox" data-track-action="toggle_checkbox" value=1 checked />
+ <input class="dropdown" data-track-action="toggle_dropdown"/>
+ <div data-track-action="nested_event"><span class="nested"></span></div>
+ <input data-track-bogus="click_bogusinput" data-track-label="button" value="1" />
+ <input data-track-action="click_input3" data-track-experiment="example" value="1" />
+ <input data-track-action="event_with_extra" data-track-extra='{ "foo": "bar" }' />
+ <input data-track-action="event_with_invalid_extra" data-track-extra="invalid_json" />
`);
});
- it(`binds to clicks on elements matching [data-track-${term}]`, () => {
- document.querySelector(`[data-track-${term}="click_input1"]`).click();
+ it(`binds to clicks on elements matching [data-track-action]`, () => {
+ document.querySelector(`[data-track-action="click_input1"]`).click();
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input1', {
- label: '_label_',
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'click_input1', {
+ label: TEST_LABEL,
value: '0',
});
});
- it(`does not bind to clicks on elements without [data-track-${term}]`, () => {
+ it(`does not bind to clicks on elements without [data-track-action]`, () => {
document.querySelector('[data-track-bogus="click_bogusinput"]').click();
expect(eventSpy).not.toHaveBeenCalled();
});
it('allows value override with the data-track-value attribute', () => {
- document.querySelector(`[data-track-${term}="click_input2"]`).click();
+ document.querySelector(`[data-track-action="click_input2"]`).click();
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input2', {
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'click_input2', {
value: '0',
});
});
it('handles checkbox values correctly', () => {
- const checkbox = document.querySelector(`[data-track-${term}="toggle_checkbox"]`);
+ const checkbox = document.querySelector(`[data-track-action="toggle_checkbox"]`);
checkbox.click(); // unchecking
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_checkbox', {
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_checkbox', {
value: 0,
});
checkbox.click(); // checking
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_checkbox', {
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_checkbox', {
value: '1',
});
});
it('handles bootstrap dropdowns', () => {
- const dropdown = document.querySelector(`[data-track-${term}="toggle_dropdown"]`);
+ const dropdown = document.querySelector(`[data-track-action="toggle_dropdown"]`);
dropdown.dispatchEvent(new Event('show.bs.dropdown', { bubbles: true }));
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_dropdown_show', {});
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_dropdown_show', {});
dropdown.dispatchEvent(new Event('hide.bs.dropdown', { bubbles: true }));
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_dropdown_hide', {});
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'toggle_dropdown_hide', {});
});
it('handles nested elements inside an element with tracking', () => {
document.querySelector('span.nested').click();
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'nested_event', {});
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'nested_event', {});
});
it('includes experiment data if linked to an experiment', () => {
@@ -497,54 +405,50 @@ describe('Tracking', () => {
};
getExperimentData.mockReturnValue(mockExperimentData);
- document.querySelector(`[data-track-${term}="click_input3"]`).click();
+ document.querySelector(`[data-track-action="click_input3"]`).click();
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input3', {
- value: '_value_',
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'click_input3', {
+ value: '1',
context: { schema: TRACKING_CONTEXT_SCHEMA, data: mockExperimentData },
});
});
it('supports extra data as JSON', () => {
- document.querySelector(`[data-track-${term}="event_with_extra"]`).click();
+ document.querySelector(`[data-track-action="event_with_extra"]`).click();
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'event_with_extra', {
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'event_with_extra', {
extra: { foo: 'bar' },
});
});
it('ignores extra if provided JSON is invalid', () => {
- document.querySelector(`[data-track-${term}="event_with_invalid_extra"]`).click();
+ document.querySelector(`[data-track-action="event_with_invalid_extra"]`).click();
- expect(eventSpy).toHaveBeenCalledWith('_category_', 'event_with_invalid_extra', {});
+ expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'event_with_invalid_extra', {});
});
});
- describe.each`
- term
- ${'event'}
- ${'action'}
- `('tracking page loaded events with -$term', ({ term }) => {
+ describe('tracking page loaded events with -action', () => {
let eventSpy;
beforeEach(() => {
eventSpy = jest.spyOn(Tracking, 'event');
setHTMLFixture(`
- <div data-track-${term}="click_link" data-track-label="all_nested_links">
- <input data-track-${term}="render" data-track-label="label1" value=1 data-track-property="_property_"/>
- <span data-track-${term}="render" data-track-label="label2" data-track-value=1>
+ <div data-track-action="click_link" data-track-label="all_nested_links">
+ <input data-track-action="render" data-track-label="label1" value=1 data-track-property="_property_" />
+ <span data-track-action="render" data-track-label="label2" data-track-value="1">
<a href="#" id="link">Something</a>
</span>
- <input data-track-${term}="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_"/>
+ <input data-track-action="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_" />
</div>
`);
- Tracking.trackLoadEvents('_category_'); // only happens once
+ Tracking.trackLoadEvents(TEST_CATEGORY);
});
- it(`sends tracking events when [data-track-${term}="render"] is on an element`, () => {
+ it(`sends tracking events when [data-track-action="render"] is on an element`, () => {
expect(eventSpy.mock.calls).toEqual([
[
- '_category_',
+ TEST_CATEGORY,
'render',
{
label: 'label1',
@@ -553,7 +457,7 @@ describe('Tracking', () => {
},
],
[
- '_category_',
+ TEST_CATEGORY,
'render',
{
label: 'label2',
@@ -576,16 +480,16 @@ describe('Tracking', () => {
eventSpy.mockClear();
});
- it(`avoids using ancestor [data-track-${term}="render"] tracking configurations`, () => {
+ it(`avoids using ancestor [data-track-action="render"] tracking configurations`, () => {
link.dispatchEvent(new Event(event, { bubbles: true }));
expect(eventSpy).not.toHaveBeenCalledWith(
- '_category_',
+ TEST_CATEGORY,
`render${actionSuffix}`,
expect.any(Object),
);
expect(eventSpy).toHaveBeenCalledWith(
- '_category_',
+ TEST_CATEGORY,
`click_link${actionSuffix}`,
expect.objectContaining({ label: 'all_nested_links' }),
);
diff --git a/spec/frontend/tracking/utils_spec.js b/spec/frontend/tracking/utils_spec.js
new file mode 100644
index 00000000000..d6f2c5095b4
--- /dev/null
+++ b/spec/frontend/tracking/utils_spec.js
@@ -0,0 +1,99 @@
+import {
+ renameKey,
+ getReferrersCache,
+ addExperimentContext,
+ addReferrersCacheEntry,
+ filterOldReferrersCacheEntries,
+} from '~/tracking/utils';
+import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
+import { REFERRER_TTL, URLS_CACHE_STORAGE_KEY } from '~/tracking/constants';
+import { TEST_HOST } from 'helpers/test_constants';
+
+jest.mock('~/experimentation/utils', () => ({
+ getExperimentData: jest.fn().mockReturnValue({}),
+}));
+
+describe('~/tracking/utils', () => {
+ beforeEach(() => {
+ window.gl = window.gl || {};
+ window.gl.snowplowStandardContext = {};
+ });
+
+ describe('addExperimentContext', () => {
+ const options = {
+ category: 'root:index',
+ action: 'generic',
+ };
+
+ it('returns same options if no experiment is provided', () => {
+ expect(addExperimentContext({ options })).toStrictEqual({ options });
+ });
+
+ it('adds experiment if provided', () => {
+ const experiment = 'TEST_EXPERIMENT_NAME';
+
+ expect(addExperimentContext({ experiment, ...options })).toStrictEqual({
+ ...options,
+ context: { data: {}, schema: TRACKING_CONTEXT_SCHEMA },
+ });
+ });
+ });
+
+ describe('renameKey', () => {
+ it('renames a given key', () => {
+ expect(renameKey({ allow: [] }, 'allow', 'permit')).toStrictEqual({ permit: [] });
+ });
+ });
+
+ describe('referrers cache', () => {
+ describe('filterOldReferrersCacheEntries', () => {
+ it('removes entries with old or no timestamp', () => {
+ const now = Date.now();
+ const cache = [{ timestamp: now }, { timestamp: now - REFERRER_TTL }, { referrer: '' }];
+
+ expect(filterOldReferrersCacheEntries(cache)).toStrictEqual([{ timestamp: now }]);
+ });
+ });
+
+ describe('getReferrersCache', () => {
+ beforeEach(() => {
+ localStorage.removeItem(URLS_CACHE_STORAGE_KEY);
+ });
+
+ it('returns an empty array if cache is not found', () => {
+ expect(getReferrersCache()).toHaveLength(0);
+ });
+
+ it('returns an empty array if cache is invalid', () => {
+ localStorage.setItem(URLS_CACHE_STORAGE_KEY, 'Invalid JSON');
+
+ expect(getReferrersCache()).toHaveLength(0);
+ });
+
+ it('returns parsed entries if valid', () => {
+ localStorage.setItem(
+ URLS_CACHE_STORAGE_KEY,
+ JSON.stringify([{ referrer: '', timestamp: Date.now() }]),
+ );
+
+ expect(getReferrersCache()).toHaveLength(1);
+ });
+ });
+
+ describe('addReferrersCacheEntry', () => {
+ it('unshifts entry and adds timestamp', () => {
+ const now = Date.now();
+
+ addReferrersCacheEntry([{ referrer: '', originalUrl: TEST_HOST, timestamp: now }], {
+ referrer: TEST_HOST,
+ });
+
+ const cache = getReferrersCache();
+
+ expect(cache).toHaveLength(2);
+ expect(cache[0].referrer).toBe(TEST_HOST);
+ expect(cache[0].timestamp).toBeDefined();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/users_select/test_helper.js b/spec/frontend/users_select/test_helper.js
index c5adbe9bb09..59edde48eab 100644
--- a/spec/frontend/users_select/test_helper.js
+++ b/spec/frontend/users_select/test_helper.js
@@ -1,6 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { memoize, cloneDeep } from 'lodash';
-import { getFixture, getJSONFixture } from 'helpers/fixtures';
+import usersFixture from 'test_fixtures/autocomplete/users.json';
+import { getFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import UsersSelect from '~/users_select';
@@ -15,7 +16,7 @@ const getUserSearchHTML = memoize((fixturePath) => {
return el.outerHTML;
});
-const getUsersFixture = memoize(() => getJSONFixture('autocomplete/users.json'));
+const getUsersFixture = () => usersFixture;
export const getUsersFixtureAt = (idx) => getUsersFixture()[idx];
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
index ef712ec23a6..c9dea4394f9 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
+++ b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
@@ -61,9 +61,7 @@ describe('MRWidget approvals summary', () => {
it('render message', () => {
const names = toNounSeriesText(testRulesLeft());
- expect(wrapper.text()).toContain(
- `Requires ${TEST_APPROVALS_LEFT} more approvals from ${names}.`,
- );
+ expect(wrapper.text()).toContain(`Requires ${TEST_APPROVALS_LEFT} approvals from ${names}.`);
});
});
@@ -75,7 +73,9 @@ describe('MRWidget approvals summary', () => {
});
it('renders message', () => {
- expect(wrapper.text()).toContain(`Requires ${TEST_APPROVALS_LEFT} more approvals.`);
+ expect(wrapper.text()).toContain(
+ `Requires ${TEST_APPROVALS_LEFT} approvals from eligible users`,
+ );
});
});
diff --git a/spec/frontend/vue_mr_widget/components/extensions/actions_spec.js b/spec/frontend/vue_mr_widget/components/extensions/actions_spec.js
new file mode 100644
index 00000000000..d5d779d7a34
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/extensions/actions_spec.js
@@ -0,0 +1,35 @@
+import { GlButton, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Actions from '~/vue_merge_request_widget/components/extensions/actions.vue';
+
+let wrapper;
+
+function factory(propsData = {}) {
+ wrapper = shallowMount(Actions, {
+ propsData: { ...propsData, widget: 'test' },
+ });
+}
+
+describe('MR widget extension actions', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('tertiaryButtons', () => {
+ it('renders buttons', () => {
+ factory({
+ tertiaryButtons: [{ text: 'hello world', href: 'https://gitlab.com', target: '_blank' }],
+ });
+
+ expect(wrapper.findAllComponents(GlButton)).toHaveLength(1);
+ });
+
+ it('renders tertiary actions in dropdown', () => {
+ factory({
+ tertiaryButtons: [{ text: 'hello world', href: 'https://gitlab.com', target: '_blank' }],
+ });
+
+ expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/extensions/index_spec.js b/spec/frontend/vue_mr_widget/components/extensions/index_spec.js
index 8f6fe3cd37a..63df63a9b00 100644
--- a/spec/frontend/vue_mr_widget/components/extensions/index_spec.js
+++ b/spec/frontend/vue_mr_widget/components/extensions/index_spec.js
@@ -1,4 +1,7 @@
-import { registerExtension, extensions } from '~/vue_merge_request_widget/components/extensions';
+import {
+ registerExtension,
+ registeredExtensions,
+} from '~/vue_merge_request_widget/components/extensions';
import ExtensionBase from '~/vue_merge_request_widget/components/extensions/base.vue';
describe('MR widget extension registering', () => {
@@ -14,7 +17,7 @@ describe('MR widget extension registering', () => {
},
});
- expect(extensions[0]).toEqual(
+ expect(registeredExtensions.extensions[0]).toEqual(
expect.objectContaining({
extends: ExtensionBase,
name: 'Test',
diff --git a/spec/frontend/vue_mr_widget/components/extensions/status_icon_spec.js b/spec/frontend/vue_mr_widget/components/extensions/status_icon_spec.js
new file mode 100644
index 00000000000..f3aa5bb774f
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/extensions/status_icon_spec.js
@@ -0,0 +1,36 @@
+import { GlIcon, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
+
+let wrapper;
+
+function factory(propsData = {}) {
+ wrapper = shallowMount(StatusIcon, {
+ propsData,
+ });
+}
+
+describe('MR widget extensions status icon', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders loading icon', () => {
+ factory({ name: 'test', isLoading: true, iconName: 'failed' });
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('renders status icon', () => {
+ factory({ name: 'test', isLoading: false, iconName: 'failed' });
+
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe('status-failed');
+ });
+
+ it('sets aria-label for status icon', () => {
+ factory({ name: 'test', isLoading: false, iconName: 'failed' });
+
+ expect(wrapper.findComponent(GlIcon).props('ariaLabel')).toBe('Failed test');
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
index 5ec719b17d6..efe2bf75c3f 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
import ArtifactsApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue';
import DeploymentList from '~/vue_merge_request_widget/components/deployment/deployment_list.vue';
@@ -12,12 +13,14 @@ describe('MrWidgetPipelineContainer', () => {
let mock;
const factory = (props = {}) => {
- wrapper = mount(MrWidgetPipelineContainer, {
- propsData: {
- mr: { ...mockStore },
- ...props,
- },
- });
+ wrapper = extendedWrapper(
+ mount(MrWidgetPipelineContainer, {
+ propsData: {
+ mr: { ...mockStore },
+ ...props,
+ },
+ }),
+ );
};
beforeEach(() => {
@@ -30,6 +33,7 @@ describe('MrWidgetPipelineContainer', () => {
});
const findDeploymentList = () => wrapper.findComponent(DeploymentList);
+ const findCIErrorMessage = () => wrapper.findByTestId('ci-error-message');
describe('when pre merge', () => {
beforeEach(() => {
@@ -69,15 +73,21 @@ describe('MrWidgetPipelineContainer', () => {
beforeEach(() => {
factory({
isPostMerge: true,
+ mr: {
+ ...mockStore,
+ pipeline: {},
+ ciStatus: undefined,
+ },
});
});
it('renders pipeline', () => {
expect(wrapper.find(MrWidgetPipeline).exists()).toBe(true);
+ expect(findCIErrorMessage().exists()).toBe(false);
expect(wrapper.find(MrWidgetPipeline).props()).toMatchObject({
pipeline: mockStore.mergePipeline,
pipelineCoverageDelta: mockStore.pipelineCoverageDelta,
- ciStatus: mockStore.ciStatus,
+ ciStatus: mockStore.mergePipeline.details.status.text,
hasCi: mockStore.hasCI,
sourceBranch: mockStore.targetBranch,
sourceBranchLink: mockStore.targetBranch,
@@ -92,7 +102,6 @@ describe('MrWidgetPipelineContainer', () => {
targetBranch: 'Foo<script>alert("XSS")</script>',
},
});
-
expect(wrapper.find(MrWidgetPipeline).props().sourceBranchLink).toBe('Foo');
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
index b31a75f30d3..2ff94a547f4 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlSprintf } from '@gitlab/ui';
import CommitsHeader from '~/vue_merge_request_widget/components/states/commits_header.vue';
describe('Commits header component', () => {
@@ -6,6 +7,9 @@ describe('Commits header component', () => {
const createComponent = (props) => {
wrapper = shallowMount(CommitsHeader, {
+ stubs: {
+ GlSprintf,
+ },
propsData: {
isSquashEnabled: false,
targetBranch: 'main',
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index e41fb815c8d..f0fbb1d5851 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -45,6 +45,8 @@ const createTestMr = (customConfig) => {
preferredAutoMergeStrategy: MWPS_MERGE_STRATEGY,
availableAutoMergeStrategies: [MWPS_MERGE_STRATEGY],
mergeImmediatelyDocsPath: 'path/to/merge/immediately/docs',
+ transitionStateMachine: (transition) => eventHub.$emit('StateMachineValueChanged', transition),
+ translateStateToMachine: () => this.transitionStateMachine(),
};
Object.assign(mr, customConfig.mr);
@@ -304,6 +306,9 @@ describe('ReadyToMerge', () => {
setImmediate(() => {
expect(wrapper.vm.isMakingRequest).toBeTruthy();
expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
+ expect(eventHub.$emit).toHaveBeenCalledWith('StateMachineValueChanged', {
+ transition: 'start-auto-merge',
+ });
const params = wrapper.vm.service.merge.mock.calls[0][0];
@@ -341,10 +346,15 @@ describe('ReadyToMerge', () => {
it('should handle merge action accepted case', (done) => {
createComponent();
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest.spyOn(wrapper.vm.service, 'merge').mockReturnValue(returnPromise('success'));
jest.spyOn(wrapper.vm, 'initiateMergePolling').mockImplementation(() => {});
wrapper.vm.handleMergeButtonClick();
+ expect(eventHub.$emit).toHaveBeenCalledWith('StateMachineValueChanged', {
+ transition: 'start-merge',
+ });
+
setImmediate(() => {
expect(wrapper.vm.isMakingRequest).toBeTruthy();
expect(wrapper.vm.initiateMergePolling).toHaveBeenCalled();
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
index 61e44140efc..be15e4df66d 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
@@ -1,9 +1,9 @@
import Vue from 'vue';
-import createFlash from '~/flash';
import WorkInProgress from '~/vue_merge_request_widget/components/states/work_in_progress.vue';
+import toast from '~/vue_shared/plugins/global_toast';
import eventHub from '~/vue_merge_request_widget/event_hub';
-jest.mock('~/flash');
+jest.mock('~/vue_shared/plugins/global_toast');
const createComponent = () => {
const Component = Vue.extend(WorkInProgress);
@@ -63,10 +63,7 @@ describe('Wip', () => {
setImmediate(() => {
expect(vm.isMakingRequest).toBeTruthy();
expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
- expect(createFlash).toHaveBeenCalledWith({
- message: 'Marked as ready. Merging is now allowed.',
- type: 'notice',
- });
+ expect(toast).toHaveBeenCalledWith('Marked as ready. Merging is now allowed.');
done();
});
});
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
index f356f6fb5bf..34a741cf8f2 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -280,7 +280,7 @@ export default {
merge_train_index: 1,
security_reports_docs_path: 'security-reports-docs-path',
sast_comparison_path: '/sast_comparison_path',
- secret_scanning_comparison_path: '/secret_scanning_comparison_path',
+ secret_detection_comparison_path: '/secret_detection_comparison_path',
gitpod_enabled: true,
show_gitpod_button: true,
gitpod_url: 'http://gitpod.localhost',
diff --git a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js b/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
index bd22183cbea..913d5860b48 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
@@ -8,11 +8,9 @@ describe('MRWidgetHowToMerge', () => {
function mountComponent({ data = {}, props = {} } = {}) {
wrapper = shallowMount(MrWidgetHowToMergeModal, {
data() {
- return { ...data };
- },
- propsData: {
- ...props,
+ return data;
},
+ propsData: props,
stubs: {},
});
}
@@ -57,4 +55,16 @@ describe('MRWidgetHowToMerge', () => {
mountComponent({ props: { isFork: true } });
expect(findInstructionsFields().at(0).text()).toContain('FETCH_HEAD');
});
+
+ it('escapes the target branch name shell-secure', () => {
+ mountComponent({ props: { targetBranch: '";echo$IFS"you_shouldnt_run_this' } });
+
+ expect(findInstructionsFields().at(1).text()).toContain('\'";echo$IFS"you_shouldnt_run_this\'');
+ });
+
+ it('escapes the source branch name shell-secure', () => {
+ mountComponent({ props: { sourceBranch: 'branch-of-$USER' } });
+
+ expect(findInstructionsFields().at(0).text()).toContain("'branch-of-$USER'");
+ });
});
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index c50cf7cb076..5aba6982886 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -1,13 +1,19 @@
+import { GlBadge, GlLink, GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import { securityReportMergeRequestDownloadPathsQueryResponse } from 'jest/vue_shared/security_reports/mock_data';
import axios from '~/lib/utils/axios_utils';
import { setFaviconOverlay } from '~/lib/utils/favicon';
import notify from '~/lib/utils/notify';
import SmartInterval from '~/smart_interval';
+import {
+ registerExtension,
+ registeredExtensions,
+} from '~/vue_merge_request_widget/components/extensions';
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
@@ -15,6 +21,7 @@ import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/graphql/queries/security_report_merge_request_download_paths.query.graphql';
import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
import mockData from './mock_data';
+import testExtension from './test_extension';
jest.mock('~/smart_interval');
@@ -879,4 +886,48 @@ describe('MrWidgetOptions', () => {
});
});
});
+
+ describe('mock extension', () => {
+ beforeEach(() => {
+ registerExtension(testExtension);
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ registeredExtensions.extensions = [];
+ });
+
+ it('renders collapsed data', async () => {
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain('Test extension summary count: 1');
+ });
+
+ it('renders full data', async () => {
+ await waitForPromises();
+
+ wrapper
+ .find('[data-testid="widget-extension"] [data-testid="toggle-button"]')
+ .trigger('click');
+
+ await Vue.nextTick();
+
+ const collapsedSection = wrapper.find('[data-testid="widget-extension-collapsed-section"]');
+ expect(collapsedSection.exists()).toBe(true);
+ expect(collapsedSection.text()).toContain('Hello world');
+
+ // Renders icon in the row
+ expect(collapsedSection.find(GlIcon).exists()).toBe(true);
+ expect(collapsedSection.find(GlIcon).props('name')).toBe('status-failed');
+
+ // Renders badge in the row
+ expect(collapsedSection.find(GlBadge).exists()).toBe(true);
+ expect(collapsedSection.find(GlBadge).text()).toBe('Closed');
+
+ // Renders a link in the row
+ expect(collapsedSection.find(GlLink).exists()).toBe(true);
+ expect(collapsedSection.find(GlLink).text()).toBe('GitLab.com');
+ });
+ });
});
diff --git a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
index bf0179aa425..febcfcd4019 100644
--- a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -162,7 +162,7 @@ describe('MergeRequestStore', () => {
expect(store.securityReportsDocsPath).toBe('security-reports-docs-path');
});
- it.each(['sast_comparison_path', 'secret_scanning_comparison_path'])(
+ it.each(['sast_comparison_path', 'secret_detection_comparison_path'])(
'should set %s path',
(property) => {
// Ensure something is set in the mock data
diff --git a/spec/frontend/vue_mr_widget/test_extension.js b/spec/frontend/vue_mr_widget/test_extension.js
new file mode 100644
index 00000000000..a29a4d2fb46
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/test_extension.js
@@ -0,0 +1,37 @@
+import { EXTENSION_ICONS } from '~/vue_merge_request_widget/constants';
+
+export default {
+ name: 'WidgetTestExtension',
+ props: ['targetProjectFullPath'],
+ computed: {
+ summary({ count, targetProjectFullPath }) {
+ return `Test extension summary count: ${count} & ${targetProjectFullPath}`;
+ },
+ statusIcon({ count }) {
+ return count > 0 ? EXTENSION_ICONS.warning : EXTENSION_ICONS.success;
+ },
+ },
+ methods: {
+ fetchCollapsedData({ targetProjectFullPath }) {
+ return Promise.resolve({ targetProjectFullPath, count: 1 });
+ },
+ fetchFullData() {
+ return Promise.resolve([
+ {
+ id: 1,
+ text: 'Hello world',
+ icon: {
+ name: EXTENSION_ICONS.failed,
+ },
+ badge: {
+ text: 'Closed',
+ },
+ link: {
+ href: 'https://gitlab.com',
+ text: 'GitLab.com',
+ },
+ },
+ ]);
+ },
+ },
+};
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index c7758b0faef..44b4c0398cd 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -4,12 +4,12 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<gl-dropdown-stub
category="primary"
clearalltext="Clear all"
+ clearalltextclass="gl-px-5"
headertext=""
hideheaderborder="true"
highlighteditemstitle="Selected"
highlighteditemstitleclass="gl-px-5"
right="true"
- showhighlighteditemstitle="true"
size="medium"
text="Clone"
variant="info"
@@ -35,6 +35,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-form-input-stub
class="gl-form-input"
debounce="0"
+ formatter="[Function]"
readonly="true"
type="text"
value="ssh://foo.bar"
@@ -78,6 +79,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-form-input-stub
class="gl-form-input"
debounce="0"
+ formatter="[Function]"
readonly="true"
type="text"
value="http://foo.bar"
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
index f2ff12b2acd..2b89e36344d 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
@@ -4,12 +4,12 @@ exports[`SplitButton renders actionItems 1`] = `
<gl-dropdown-stub
category="primary"
clearalltext="Clear all"
+ clearalltextclass="gl-px-5"
headertext=""
hideheaderborder="true"
highlighteditemstitle="Selected"
highlighteditemstitleclass="gl-px-5"
menu-class=""
- showhighlighteditemstitle="true"
size="medium"
split="true"
text="professor"
diff --git a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
index c6c351a7f3f..3277aab43f0 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
@@ -1,25 +1,16 @@
import { shallowMount } from '@vue/test-utils';
-import waitForPromises from 'helpers/wait_for_promises';
import { HIGHLIGHT_CLASS_NAME } from '~/vue_shared/components/blob_viewers/constants';
import SimpleViewer from '~/vue_shared/components/blob_viewers/simple_viewer.vue';
-import SourceEditor from '~/vue_shared/components/source_editor.vue';
describe('Blob Simple Viewer component', () => {
let wrapper;
const contentMock = `<span id="LC1">First</span>\n<span id="LC2">Second</span>\n<span id="LC3">Third</span>`;
const blobHash = 'foo-bar';
- function createComponent(
- content = contentMock,
- isRawContent = false,
- isRefactorFlagEnabled = false,
- ) {
+ function createComponent(content = contentMock, isRawContent = false) {
wrapper = shallowMount(SimpleViewer, {
provide: {
blobHash,
- glFeatures: {
- refactorBlobViewer: isRefactorFlagEnabled,
- },
},
propsData: {
content,
@@ -94,32 +85,4 @@ describe('Blob Simple Viewer component', () => {
});
});
});
-
- describe('Vue refactoring to use Source Editor', () => {
- const findSourceEditor = () => wrapper.find(SourceEditor);
-
- it.each`
- doesRender | condition | isRawContent | isRefactorFlagEnabled
- ${'Does not'} | ${'rawContent is not specified'} | ${false} | ${true}
- ${'Does not'} | ${'feature flag is disabled is not specified'} | ${true} | ${false}
- ${'Does not'} | ${'both, the FF and rawContent are not specified'} | ${false} | ${false}
- ${'Does'} | ${'both, the FF and rawContent are specified'} | ${true} | ${true}
- `(
- '$doesRender render Source Editor component in readonly mode when $condition',
- async ({ isRawContent, isRefactorFlagEnabled } = {}) => {
- createComponent('raw content', isRawContent, isRefactorFlagEnabled);
- await waitForPromises();
-
- if (isRawContent && isRefactorFlagEnabled) {
- expect(findSourceEditor().exists()).toBe(true);
-
- expect(findSourceEditor().props('value')).toBe('raw content');
- expect(findSourceEditor().props('fileName')).toBe('test.js');
- expect(findSourceEditor().props('editorOptions')).toEqual({ readOnly: true });
- } else {
- expect(findSourceEditor().exists()).toBe(false);
- }
- },
- );
- });
});
diff --git a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
index d30f36ec63c..fef50bdaccc 100644
--- a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
+++ b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
@@ -111,15 +111,13 @@ describe('ColorPicker', () => {
gon.suggested_label_colors = {};
createComponent(shallowMount);
- expect(description()).toBe('Choose any color');
+ expect(description()).toBe('Enter any color.');
expect(presetColors().exists()).toBe(false);
});
it('shows the suggested colors', () => {
createComponent(shallowMount);
- expect(description()).toBe(
- 'Choose any color. Or you can choose one of the suggested colors below',
- );
+ expect(description()).toBe('Enter any color or choose one of the suggested colors below.');
expect(presetColors()).toHaveLength(4);
});
diff --git a/spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js b/spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js
index 175d79dd1c2..194681a6138 100644
--- a/spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js
+++ b/spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
+import { GlAlert, GlSprintf } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import Component from '~/vue_shared/components/dismissible_feedback_alert.vue';
@@ -8,20 +8,13 @@ describe('Dismissible Feedback Alert', () => {
let wrapper;
- const defaultProps = {
- featureName: 'Dependency List',
- feedbackLink: 'https://gitlab.link',
- };
-
+ const featureName = 'Dependency List';
const STORAGE_DISMISSAL_KEY = 'dependency_list_feedback_dismissed';
- const createComponent = ({ props, shallow } = {}) => {
- const mountFn = shallow ? shallowMount : mount;
-
+ const createComponent = ({ mountFn = shallowMount } = {}) => {
wrapper = mountFn(Component, {
propsData: {
- ...defaultProps,
- ...props,
+ featureName,
},
stubs: {
GlSprintf,
@@ -34,8 +27,8 @@ describe('Dismissible Feedback Alert', () => {
wrapper = null;
});
- const findAlert = () => wrapper.find(GlAlert);
- const findLink = () => wrapper.find(GlLink);
+ const createFullComponent = () => createComponent({ mountFn: mount });
+ const findAlert = () => wrapper.findComponent(GlAlert);
describe('with default', () => {
beforeEach(() => {
@@ -46,17 +39,6 @@ describe('Dismissible Feedback Alert', () => {
expect(findAlert().exists()).toBe(true);
});
- it('contains feature name', () => {
- expect(findAlert().text()).toContain(defaultProps.featureName);
- });
-
- it('contains provided link', () => {
- const link = findLink();
-
- expect(link.attributes('href')).toBe(defaultProps.feedbackLink);
- expect(link.attributes('target')).toBe('_blank');
- });
-
it('should have the storage key set', () => {
expect(wrapper.vm.storageKey).toBe(STORAGE_DISMISSAL_KEY);
});
@@ -65,7 +47,7 @@ describe('Dismissible Feedback Alert', () => {
describe('dismissible', () => {
describe('after dismissal', () => {
beforeEach(() => {
- createComponent({ shallow: false });
+ createFullComponent();
findAlert().vm.$emit('dismiss');
});
@@ -81,7 +63,7 @@ describe('Dismissible Feedback Alert', () => {
describe('already dismissed', () => {
it('should not show the alert once dismissed', async () => {
localStorage.setItem(STORAGE_DISMISSAL_KEY, 'true');
- createComponent({ shallow: false });
+ createFullComponent();
await wrapper.vm.$nextTick();
expect(findAlert().exists()).toBe(false);
diff --git a/spec/frontend/vue_shared/components/dropdown_keyboard_navigation_spec.js b/spec/frontend/vue_shared/components/dropdown_keyboard_navigation_spec.js
new file mode 100644
index 00000000000..996df34f2ff
--- /dev/null
+++ b/spec/frontend/vue_shared/components/dropdown_keyboard_navigation_spec.js
@@ -0,0 +1,141 @@
+import { shallowMount } from '@vue/test-utils';
+import DropdownKeyboardNavigation from '~/vue_shared/components/dropdown_keyboard_navigation.vue';
+import { UP_KEY_CODE, DOWN_KEY_CODE, TAB_KEY_CODE } from '~/lib/utils/keycodes';
+
+const MOCK_INDEX = 0;
+const MOCK_MAX = 10;
+const MOCK_MIN = 0;
+const MOCK_DEFAULT_INDEX = 0;
+
+describe('DropdownKeyboardNavigation', () => {
+ let wrapper;
+
+ const defaultProps = {
+ index: MOCK_INDEX,
+ max: MOCK_MAX,
+ min: MOCK_MIN,
+ defaultIndex: MOCK_DEFAULT_INDEX,
+ };
+
+ const createComponent = (props) => {
+ wrapper = shallowMount(DropdownKeyboardNavigation, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const helpers = {
+ arrowDown: () => {
+ document.dispatchEvent(new KeyboardEvent('keydown', { keyCode: DOWN_KEY_CODE }));
+ },
+ arrowUp: () => {
+ document.dispatchEvent(new KeyboardEvent('keydown', { keyCode: UP_KEY_CODE }));
+ },
+ tab: () => {
+ document.dispatchEvent(new KeyboardEvent('keydown', { keyCode: TAB_KEY_CODE }));
+ },
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('onInit', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should $emit @change with the default index', async () => {
+ expect(wrapper.emitted('change')[0]).toStrictEqual([MOCK_DEFAULT_INDEX]);
+ });
+
+ it('should $emit @change with the default index when max changes', async () => {
+ wrapper.setProps({ max: 20 });
+ await wrapper.vm.$nextTick();
+ // The first @change`call happens on created() so we test for the second [1]
+ expect(wrapper.emitted('change')[1]).toStrictEqual([MOCK_DEFAULT_INDEX]);
+ });
+ });
+
+ describe('keydown events', () => {
+ let incrementSpy;
+
+ beforeEach(() => {
+ createComponent();
+ incrementSpy = jest.spyOn(wrapper.vm, 'increment');
+ });
+
+ afterEach(() => {
+ incrementSpy.mockRestore();
+ });
+
+ it('onKeydown-Down calls increment(1)', () => {
+ helpers.arrowDown();
+
+ expect(incrementSpy).toHaveBeenCalledWith(1);
+ });
+
+ it('onKeydown-Up calls increment(-1)', () => {
+ helpers.arrowUp();
+
+ expect(incrementSpy).toHaveBeenCalledWith(-1);
+ });
+
+ it('onKeydown-Tab $emits @tab event', () => {
+ helpers.tab();
+
+ expect(wrapper.emitted('tab')).toHaveLength(1);
+ });
+ });
+
+ describe('increment', () => {
+ describe('when max is 0', () => {
+ beforeEach(() => {
+ createComponent({ max: 0 });
+ });
+
+ it('does not $emit any @change events', () => {
+ helpers.arrowDown();
+
+ // The first @change`call happens on created() so we test that we only have 1 call
+ expect(wrapper.emitted('change')).toHaveLength(1);
+ });
+ });
+
+ describe.each`
+ keyboardAction | direction | index | max | min
+ ${helpers.arrowDown} | ${1} | ${10} | ${10} | ${0}
+ ${helpers.arrowUp} | ${-1} | ${0} | ${10} | ${0}
+ `('moving out of bounds', ({ keyboardAction, direction, index, max, min }) => {
+ beforeEach(() => {
+ createComponent({ index, max, min });
+ keyboardAction();
+ });
+
+ it(`in ${direction} direction does not $emit any @change events`, () => {
+ // The first @change`call happens on created() so we test that we only have 1 call
+ expect(wrapper.emitted('change')).toHaveLength(1);
+ });
+ });
+
+ describe.each`
+ keyboardAction | direction | index | max | min
+ ${helpers.arrowDown} | ${1} | ${0} | ${10} | ${0}
+ ${helpers.arrowUp} | ${-1} | ${10} | ${10} | ${0}
+ `('moving in bounds', ({ keyboardAction, direction, index, max, min }) => {
+ beforeEach(() => {
+ createComponent({ index, max, min });
+ keyboardAction();
+ });
+
+ it(`in ${direction} direction $emits @change event with the correct index ${
+ index + direction
+ }`, () => {
+ // The first @change`call happens on created() so we test for the second [1]
+ expect(wrapper.emitted('change')[1]).toStrictEqual([index + direction]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index 134c6c8b929..ae02c554e13 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -141,7 +141,62 @@ export const mockEpicToken = {
token: EpicToken,
operators: OPERATOR_IS_ONLY,
idProperty: 'iid',
- fetchEpics: () => Promise.resolve({ data: mockEpics }),
+ fullPath: 'gitlab-org',
+};
+
+export const mockEpicNode1 = {
+ __typename: 'Epic',
+ parent: null,
+ id: 'gid://gitlab/Epic/40',
+ iid: '2',
+ title: 'Marketing epic',
+ description: 'Mock epic description',
+ state: 'opened',
+ startDate: '2017-12-25',
+ dueDate: '2018-02-15',
+ webUrl: 'http://gdk.test:3000/groups/gitlab-org/marketing/-/epics/1',
+ hasChildren: false,
+ hasParent: false,
+ confidential: false,
+};
+
+export const mockEpicNode2 = {
+ __typename: 'Epic',
+ parent: null,
+ id: 'gid://gitlab/Epic/41',
+ iid: '3',
+ title: 'Another marketing',
+ startDate: '2017-12-26',
+ dueDate: '2018-03-10',
+ state: 'opened',
+ webUrl: 'http://gdk.test:3000/groups/gitlab-org/marketing/-/epics/2',
+};
+
+export const mockGroupEpicsQueryResponse = {
+ data: {
+ group: {
+ id: 'gid://gitlab/Group/1',
+ name: 'Gitlab Org',
+ epics: {
+ edges: [
+ {
+ node: {
+ ...mockEpicNode1,
+ },
+ __typename: 'EpicEdge',
+ },
+ {
+ node: {
+ ...mockEpicNode2,
+ },
+ __typename: 'EpicEdge',
+ },
+ ],
+ __typename: 'EpicConnection',
+ },
+ __typename: 'Group',
+ },
+ },
};
export const mockReactionEmojiToken = {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index d3e1bfef561..14fcffd3c50 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -57,7 +57,7 @@ function createComponent(options = {}) {
provide: {
portalName: 'fake target',
alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
data() {
return { ...data };
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index eb1dbed52cc..f9ce0338d2f 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -67,7 +67,7 @@ function createComponent({
provide: {
portalName: 'fake target',
alignSuggestions: jest.fn(),
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
stubs,
slots,
@@ -206,26 +206,50 @@ describe('BaseToken', () => {
describe('events', () => {
let wrapperWithNoStubs;
- beforeEach(() => {
- wrapperWithNoStubs = createComponent({
- stubs: { Portal: true },
- });
- });
-
afterEach(() => {
wrapperWithNoStubs.destroy();
});
- it('emits `fetch-suggestions` event on component after a delay when component emits `input` event', async () => {
- jest.useFakeTimers();
+ describe('when activeToken has been selected', () => {
+ beforeEach(() => {
+ wrapperWithNoStubs = createComponent({
+ props: {
+ ...mockProps,
+ getActiveTokenValue: () => ({ title: '' }),
+ suggestionsLoading: true,
+ },
+ stubs: { Portal: true },
+ });
+ });
+ it('does not emit `fetch-suggestions` event on component after a delay when component emits `input` event', async () => {
+ jest.useFakeTimers();
- wrapperWithNoStubs.find(GlFilteredSearchToken).vm.$emit('input', { data: 'foo' });
- await wrapperWithNoStubs.vm.$nextTick();
+ wrapperWithNoStubs.find(GlFilteredSearchToken).vm.$emit('input', { data: 'foo' });
+ await wrapperWithNoStubs.vm.$nextTick();
- jest.runAllTimers();
+ jest.runAllTimers();
- expect(wrapperWithNoStubs.emitted('fetch-suggestions')).toBeTruthy();
- expect(wrapperWithNoStubs.emitted('fetch-suggestions')[2]).toEqual(['foo']);
+ expect(wrapperWithNoStubs.emitted('fetch-suggestions')).toEqual([['']]);
+ });
+ });
+
+ describe('when activeToken has not been selected', () => {
+ beforeEach(() => {
+ wrapperWithNoStubs = createComponent({
+ stubs: { Portal: true },
+ });
+ });
+ it('emits `fetch-suggestions` event on component after a delay when component emits `input` event', async () => {
+ jest.useFakeTimers();
+
+ wrapperWithNoStubs.find(GlFilteredSearchToken).vm.$emit('input', { data: 'foo' });
+ await wrapperWithNoStubs.vm.$nextTick();
+
+ jest.runAllTimers();
+
+ expect(wrapperWithNoStubs.emitted('fetch-suggestions')).toBeTruthy();
+ expect(wrapperWithNoStubs.emitted('fetch-suggestions')[2]).toEqual(['foo']);
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
index 09eac636cae..f3e8b2d0c1b 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
@@ -42,7 +42,7 @@ function createComponent(options = {}) {
provide: {
portalName: 'fake target',
alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
stubs,
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
index c2d61fd9f05..36071c900df 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
@@ -48,7 +48,7 @@ function createComponent(options = {}) {
provide: {
portalName: 'fake target',
alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
stubs,
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
index 68ed46fc3a2..6ee5d50d396 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
@@ -1,15 +1,21 @@
-import { GlFilteredSearchToken, GlFilteredSearchTokenSegment } from '@gitlab/ui';
+import { GlFilteredSearchTokenSegment } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
+import searchEpicsQuery from '~/vue_shared/components/filtered_search_bar/queries/search_epics.query.graphql';
import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue';
+import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
-import { mockEpicToken, mockEpics } from '../mock_data';
+import { mockEpicToken, mockEpics, mockGroupEpicsQueryResponse } from '../mock_data';
jest.mock('~/flash');
+Vue.use(VueApollo);
const defaultStubs = {
Portal: true,
@@ -21,31 +27,39 @@ const defaultStubs = {
},
};
-function createComponent(options = {}) {
- const {
- config = mockEpicToken,
- value = { data: '' },
- active = false,
- stubs = defaultStubs,
- } = options;
- return mount(EpicToken, {
- propsData: {
- config,
- value,
- active,
- },
- provide: {
- portalName: 'fake target',
- alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
- },
- stubs,
- });
-}
-
describe('EpicToken', () => {
let mock;
let wrapper;
+ let fakeApollo;
+
+ const findBaseToken = () => wrapper.findComponent(BaseToken);
+
+ function createComponent(
+ options = {},
+ epicsQueryHandler = jest.fn().mockResolvedValue(mockGroupEpicsQueryResponse),
+ ) {
+ fakeApollo = createMockApollo([[searchEpicsQuery, epicsQueryHandler]]);
+ const {
+ config = mockEpicToken,
+ value = { data: '' },
+ active = false,
+ stubs = defaultStubs,
+ } = options;
+ return mount(EpicToken, {
+ apolloProvider: fakeApollo,
+ propsData: {
+ config,
+ value,
+ active,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: 'custom-class',
+ },
+ stubs,
+ });
+ }
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -71,23 +85,20 @@ describe('EpicToken', () => {
describe('methods', () => {
describe('fetchEpicsBySearchTerm', () => {
- it('calls `config.fetchEpics` with provided searchTerm param', () => {
- jest.spyOn(wrapper.vm.config, 'fetchEpics');
+ it('calls fetchEpics with provided searchTerm param', () => {
+ jest.spyOn(wrapper.vm, 'fetchEpics');
- wrapper.vm.fetchEpicsBySearchTerm({ search: 'foo' });
+ findBaseToken().vm.$emit('fetch-suggestions', 'foo');
- expect(wrapper.vm.config.fetchEpics).toHaveBeenCalledWith({
- epicPath: '',
- search: 'foo',
- });
+ expect(wrapper.vm.fetchEpics).toHaveBeenCalledWith('foo');
});
it('sets response to `epics` when request is successful', async () => {
- jest.spyOn(wrapper.vm.config, 'fetchEpics').mockResolvedValue({
+ jest.spyOn(wrapper.vm, 'fetchEpics').mockResolvedValue({
data: mockEpics,
});
- wrapper.vm.fetchEpicsBySearchTerm({});
+ findBaseToken().vm.$emit('fetch-suggestions');
await waitForPromises();
@@ -95,9 +106,9 @@ describe('EpicToken', () => {
});
it('calls `createFlash` with flash error message when request fails', async () => {
- jest.spyOn(wrapper.vm.config, 'fetchEpics').mockRejectedValue({});
+ jest.spyOn(wrapper.vm, 'fetchEpics').mockRejectedValue({});
- wrapper.vm.fetchEpicsBySearchTerm({ search: 'foo' });
+ findBaseToken().vm.$emit('fetch-suggestions', 'foo');
await waitForPromises();
@@ -107,9 +118,9 @@ describe('EpicToken', () => {
});
it('sets `loading` to false when request completes', async () => {
- jest.spyOn(wrapper.vm.config, 'fetchEpics').mockRejectedValue({});
+ jest.spyOn(wrapper.vm, 'fetchEpics').mockRejectedValue({});
- wrapper.vm.fetchEpicsBySearchTerm({ search: 'foo' });
+ findBaseToken().vm.$emit('fetch-suggestions', 'foo');
await waitForPromises();
@@ -123,15 +134,15 @@ describe('EpicToken', () => {
beforeEach(async () => {
wrapper = createComponent({
- value: { data: `${mockEpics[0].group_full_path}::&${mockEpics[0].iid}` },
+ value: { data: `${mockEpics[0].title}::&${mockEpics[0].iid}` },
data: { epics: mockEpics },
});
await wrapper.vm.$nextTick();
});
- it('renders gl-filtered-search-token component', () => {
- expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ it('renders BaseToken component', () => {
+ expect(findBaseToken().exists()).toBe(true);
});
it('renders token item when value is selected', () => {
@@ -142,9 +153,9 @@ describe('EpicToken', () => {
});
it.each`
- value | valueType | tokenValueString
- ${`${mockEpics[0].group_full_path}::&${mockEpics[0].iid}`} | ${'string'} | ${`${mockEpics[0].title}::&${mockEpics[0].iid}`}
- ${`${mockEpics[1].group_full_path}::&${mockEpics[1].iid}`} | ${'number'} | ${`${mockEpics[1].title}::&${mockEpics[1].iid}`}
+ value | valueType | tokenValueString
+ ${`${mockEpics[0].title}::&${mockEpics[0].iid}`} | ${'string'} | ${`${mockEpics[0].title}::&${mockEpics[0].iid}`}
+ ${`${mockEpics[1].title}::&${mockEpics[1].iid}`} | ${'number'} | ${`${mockEpics[1].title}::&${mockEpics[1].iid}`}
`('renders token item when selection is a $valueType', async ({ value, tokenValueString }) => {
wrapper.setProps({
value: { data: value },
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
index a609aaa1c4e..af90ee93543 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
@@ -21,7 +21,7 @@ describe('IterationToken', () => {
provide: {
portalName: 'fake target',
alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index a348344b9dd..f55fb2836e3 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -48,7 +48,7 @@ function createComponent(options = {}) {
provide: {
portalName: 'fake target',
alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
stubs,
listeners,
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index bfb593bf82d..936841651d1 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -48,7 +48,7 @@ function createComponent(options = {}) {
provide: {
portalName: 'fake target',
alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
stubs,
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
index e788c742736..4277899f8db 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
@@ -19,7 +19,7 @@ describe('WeightToken', () => {
provide: {
portalName: 'fake target',
alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
+ suggestionsListClass: () => 'custom-class',
},
});
diff --git a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
index 2658fa4a706..f74b9b37197 100644
--- a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
@@ -94,10 +94,6 @@ describe('IssueAssigneesComponent', () => {
expect(vm.avatarUrlTitle(mockAssigneesList[0])).toBe('Assigned to Terrell Graham');
});
- it('renders component root element with class `issue-assignees`', () => {
- expect(wrapper.element.classList.contains('issue-assignees')).toBe(true);
- });
-
it('renders assignee', () => {
const data = findAvatars().wrappers.map((x) => ({
...x.props(),
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index ba2450b56c9..9bc2aad1895 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -60,7 +60,7 @@ describe('Suggestion Diff component', () => {
expect(findHelpButton().exists()).toBe(true);
});
- it('renders apply suggestion and add to batch buttons', () => {
+ it('renders add to batch button when more than 1 suggestion', () => {
createComponent({
suggestionsCount: 2,
});
@@ -68,8 +68,7 @@ describe('Suggestion Diff component', () => {
const applyBtn = findApplyButton();
const addToBatchBtn = findAddToBatchButton();
- expect(applyBtn.exists()).toBe(true);
- expect(applyBtn.html().includes('Apply suggestion')).toBe(true);
+ expect(applyBtn.exists()).toBe(false);
expect(addToBatchBtn.exists()).toBe(true);
expect(addToBatchBtn.html().includes('Add suggestion to batch')).toBe(true);
@@ -85,7 +84,7 @@ describe('Suggestion Diff component', () => {
describe('when apply suggestion is clicked', () => {
beforeEach(() => {
- createComponent();
+ createComponent({ batchSuggestionsCount: 0 });
findApplyButton().vm.$emit('apply');
});
@@ -140,11 +139,11 @@ describe('Suggestion Diff component', () => {
describe('apply suggestions is clicked', () => {
it('emits applyBatch', () => {
- createComponent({ isBatched: true });
+ createComponent({ isBatched: true, batchSuggestionsCount: 2 });
- findApplyBatchButton().vm.$emit('click');
+ findApplyButton().vm.$emit('apply');
- expect(wrapper.emitted().applyBatch).toEqual([[]]);
+ expect(wrapper.emitted().applyBatch).toEqual([[undefined]]);
});
});
@@ -155,23 +154,24 @@ describe('Suggestion Diff component', () => {
isBatched: true,
});
- const applyBatchBtn = findApplyBatchButton();
+ const applyBatchBtn = findApplyButton();
const removeFromBatchBtn = findRemoveFromBatchButton();
expect(removeFromBatchBtn.exists()).toBe(true);
expect(removeFromBatchBtn.html().includes('Remove from batch')).toBe(true);
expect(applyBatchBtn.exists()).toBe(true);
- expect(applyBatchBtn.html().includes('Apply suggestions')).toBe(true);
+ expect(applyBatchBtn.html().includes('Apply suggestion')).toBe(true);
expect(applyBatchBtn.html().includes(String('9'))).toBe(true);
});
it('hides add to batch and apply buttons', () => {
createComponent({
isBatched: true,
+ batchSuggestionsCount: 9,
});
- expect(findApplyButton().exists()).toBe(false);
+ expect(findApplyButton().exists()).toBe(true);
expect(findAddToBatchButton().exists()).toBe(false);
});
@@ -215,9 +215,8 @@ describe('Suggestion Diff component', () => {
});
it('disables apply suggestion and hides add to batch button', () => {
- expect(findApplyButton().exists()).toBe(true);
+ expect(findApplyButton().exists()).toBe(false);
expect(findAddToBatchButton().exists()).toBe(false);
- expect(findApplyButton().attributes('disabled')).toBe('true');
});
});
@@ -225,7 +224,7 @@ describe('Suggestion Diff component', () => {
const findTooltip = () => getBinding(findApplyButton().element, 'gl-tooltip');
it('renders correct tooltip message when button is applicable', () => {
- createComponent();
+ createComponent({ batchSuggestionsCount: 0 });
const tooltip = findTooltip();
expect(tooltip.modifiers.viewport).toBe(true);
@@ -234,7 +233,7 @@ describe('Suggestion Diff component', () => {
it('renders the inapplicable reason in the tooltip when button is not applicable', () => {
const inapplicableReason = 'lorem';
- createComponent({ canApply: false, inapplicableReason });
+ createComponent({ canApply: false, inapplicableReason, batchSuggestionsCount: 0 });
const tooltip = findTooltip();
expect(tooltip.modifiers.viewport).toBe(true);
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
index 5bd6bda2d2c..af27e953776 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
@@ -77,7 +77,7 @@ describe('Suggestion Diff component', () => {
it.each`
event | childArgs | args
${'apply'} | ${['test-event']} | ${[{ callback: 'test-event', suggestionId }]}
- ${'applyBatch'} | ${[]} | ${[]}
+ ${'applyBatch'} | ${['test-event']} | ${['test-event']}
${'addToBatch'} | ${[]} | ${[suggestionId]}
${'removeFromBatch'} | ${[]} | ${[suggestionId]}
`('emits $event event on sugestion diff header $event', ({ event, childArgs, args }) => {
diff --git a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
index ab028ea52b7..1ed7844b395 100644
--- a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
@@ -1,4 +1,6 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
+// eslint-disable-next-line import/no-deprecated
+import { getJSONFixture } from 'helpers/fixtures';
import { trimText } from 'helpers/text_helper';
import ProjectAvatar from '~/vue_shared/components/deprecated_project_avatar/default.vue';
import ProjectListItem from '~/vue_shared/components/project_selector/project_list_item.vue';
@@ -11,6 +13,7 @@ describe('ProjectListItem component', () => {
let vm;
let options;
+ // eslint-disable-next-line import/no-deprecated
const project = getJSONFixture('static/projects.json')[0];
beforeEach(() => {
diff --git a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
index 06b00a8e196..1f97d3ff3fa 100644
--- a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
+++ b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
@@ -2,6 +2,8 @@ import { GlSearchBoxByType, GlInfiniteScroll } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
import { head } from 'lodash';
import Vue from 'vue';
+// eslint-disable-next-line import/no-deprecated
+import { getJSONFixture } from 'helpers/fixtures';
import { trimText } from 'helpers/text_helper';
import ProjectListItem from '~/vue_shared/components/project_selector/project_list_item.vue';
import ProjectSelector from '~/vue_shared/components/project_selector/project_selector.vue';
@@ -11,6 +13,7 @@ const localVue = createLocalVue();
describe('ProjectSelector component', () => {
let wrapper;
let vm;
+ // eslint-disable-next-line import/no-deprecated
const allProjects = getJSONFixture('static/projects.json');
const searchResults = allProjects.slice(0, 5);
let selected = [];
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
index 14e0c8a2278..d9b7cd5afa2 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -157,9 +157,9 @@ describe('LabelsSelect Mutations', () => {
beforeEach(() => {
labels = [
- { id: 1, title: 'scoped::test', set: true },
- { id: 2, set: false, title: 'scoped::one' },
- { id: 3, title: '' },
+ { id: 1, title: 'scoped' },
+ { id: 2, title: 'scoped::one', set: false },
+ { id: 3, title: 'scoped::test', set: true },
{ id: 4, title: '' },
];
});
@@ -189,9 +189,9 @@ describe('LabelsSelect Mutations', () => {
});
expect(state.labels).toEqual([
- { id: 1, title: 'scoped::test', set: false },
- { id: 2, set: true, title: 'scoped::one', touched: true },
- { id: 3, title: '' },
+ { id: 1, title: 'scoped' },
+ { id: 2, title: 'scoped::one', set: true, touched: true },
+ { id: 3, title: 'scoped::test', set: false },
{ id: 4, title: '' },
]);
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
index 843298a1406..8931584e12c 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
@@ -5,13 +5,14 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
+import { IssuableType } from '~/issue_show/constants';
+import { labelsQueries } from '~/sidebar/constants';
import DropdownContentsCreateView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view.vue';
import createLabelMutation from '~/vue_shared/components/sidebar/labels_select_widget/graphql/create_label.mutation.graphql';
-import projectLabelsQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/project_labels.query.graphql';
import {
mockSuggestedColors,
createLabelSuccessfulResponse,
- labelsQueryResponse,
+ workspaceLabelsQueryResponse,
} from './mock_data';
jest.mock('~/flash');
@@ -47,11 +48,14 @@ describe('DropdownContentsCreateView', () => {
findAllColors().at(0).vm.$emit('click', new Event('mouseclick'));
};
- const createComponent = ({ mutationHandler = createLabelSuccessHandler } = {}) => {
+ const createComponent = ({
+ mutationHandler = createLabelSuccessHandler,
+ issuableType = IssuableType.Issue,
+ } = {}) => {
const mockApollo = createMockApollo([[createLabelMutation, mutationHandler]]);
mockApollo.clients.defaultClient.cache.writeQuery({
- query: projectLabelsQuery,
- data: labelsQueryResponse.data,
+ query: labelsQueries[issuableType].workspaceQuery,
+ data: workspaceLabelsQueryResponse.data,
variables: {
fullPath: '',
searchTerm: '',
@@ -61,6 +65,10 @@ describe('DropdownContentsCreateView', () => {
wrapper = shallowMount(DropdownContentsCreateView, {
localVue,
apolloProvider: mockApollo,
+ propsData: {
+ issuableType,
+ fullPath: '',
+ },
});
};
@@ -135,15 +143,6 @@ describe('DropdownContentsCreateView', () => {
expect(findCreateButton().props('disabled')).toBe(false);
});
- it('calls a mutation with correct parameters on Create button click', () => {
- findCreateButton().vm.$emit('click');
- expect(createLabelSuccessHandler).toHaveBeenCalledWith({
- color: '#009966',
- projectPath: '',
- title: 'Test title',
- });
- });
-
it('renders a loader spinner after Create button click', async () => {
findCreateButton().vm.$emit('click');
await nextTick();
@@ -162,6 +161,30 @@ describe('DropdownContentsCreateView', () => {
});
});
+ it('calls a mutation with `projectPath` variable on the issue', () => {
+ createComponent();
+ fillLabelAttributes();
+ findCreateButton().vm.$emit('click');
+
+ expect(createLabelSuccessHandler).toHaveBeenCalledWith({
+ color: '#009966',
+ projectPath: '',
+ title: 'Test title',
+ });
+ });
+
+ it('calls a mutation with `groupPath` variable on the epic', () => {
+ createComponent({ issuableType: IssuableType.Epic });
+ fillLabelAttributes();
+ findCreateButton().vm.$emit('click');
+
+ expect(createLabelSuccessHandler).toHaveBeenCalledWith({
+ color: '#009966',
+ groupPath: '',
+ title: 'Test title',
+ });
+ });
+
it('calls createFlash is mutation has a user-recoverable error', async () => {
createComponent({ mutationHandler: createLabelUserRecoverableErrorHandler });
fillLabelAttributes();
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
index 537bbc8e71e..fac3331a2b8 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
@@ -1,36 +1,43 @@
-import { GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
+import {
+ GlLoadingIcon,
+ GlSearchBoxByType,
+ GlDropdownItem,
+ GlIntersectionObserver,
+} from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
+import { IssuableType } from '~/issue_show/constants';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_widget/constants';
import DropdownContentsLabelsView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view.vue';
import projectLabelsQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/project_labels.query.graphql';
import LabelItem from '~/vue_shared/components/sidebar/labels_select_widget/label_item.vue';
-import { mockConfig, labelsQueryResponse } from './mock_data';
+import { mockConfig, workspaceLabelsQueryResponse } from './mock_data';
jest.mock('~/flash');
const localVue = createLocalVue();
localVue.use(VueApollo);
-const selectedLabels = [
+const localSelectedLabels = [
{
- id: 28,
- title: 'Bug',
- description: 'Label for bugs',
- color: '#FF0000',
- textColor: '#FFFFFF',
+ color: '#2f7b2e',
+ description: null,
+ id: 'gid://gitlab/ProjectLabel/2',
+ title: 'Label2',
},
];
describe('DropdownContentsLabelsView', () => {
let wrapper;
- const successfulQueryHandler = jest.fn().mockResolvedValue(labelsQueryResponse);
+ const successfulQueryHandler = jest.fn().mockResolvedValue(workspaceLabelsQueryResponse);
+
+ const findFirstLabel = () => wrapper.findAllComponents(GlDropdownItem).at(0);
const createComponent = ({
initialState = mockConfig,
@@ -43,14 +50,13 @@ describe('DropdownContentsLabelsView', () => {
localVue,
apolloProvider: mockApollo,
provide: {
- projectPath: 'test',
- iid: 1,
variant: DropdownVariant.Sidebar,
...injected,
},
propsData: {
...initialState,
- selectedLabels,
+ localSelectedLabels,
+ issuableType: IssuableType.Issue,
},
stubs: {
GlSearchBoxByType,
@@ -65,23 +71,31 @@ describe('DropdownContentsLabelsView', () => {
const findSearchInput = () => wrapper.findComponent(GlSearchBoxByType);
const findLabels = () => wrapper.findAllComponents(LabelItem);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findObserver = () => wrapper.findComponent(GlIntersectionObserver);
const findLabelsList = () => wrapper.find('[data-testid="labels-list"]');
const findNoResultsMessage = () => wrapper.find('[data-testid="no-results"]');
+ async function makeObserverAppear() {
+ await findObserver().vm.$emit('appear');
+ }
+
describe('when loading labels', () => {
it('renders disabled search input field', async () => {
createComponent();
+ await makeObserverAppear();
expect(findSearchInput().props('disabled')).toBe(true);
});
it('renders loading icon', async () => {
createComponent();
+ await makeObserverAppear();
expect(findLoadingIcon().exists()).toBe(true);
});
it('does not render labels list', async () => {
createComponent();
+ await makeObserverAppear();
expect(findLabelsList().exists()).toBe(false);
});
});
@@ -89,6 +103,7 @@ describe('DropdownContentsLabelsView', () => {
describe('when labels are loaded', () => {
beforeEach(async () => {
createComponent();
+ await makeObserverAppear();
await waitForPromises();
});
@@ -118,6 +133,7 @@ describe('DropdownContentsLabelsView', () => {
},
}),
});
+ await makeObserverAppear();
findSearchInput().vm.$emit('input', '123');
await waitForPromises();
await nextTick();
@@ -127,8 +143,26 @@ describe('DropdownContentsLabelsView', () => {
it('calls `createFlash` when fetching labels failed', async () => {
createComponent({ queryHandler: jest.fn().mockRejectedValue('Houston, we have a problem!') });
+ await makeObserverAppear();
jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await waitForPromises();
+
expect(createFlash).toHaveBeenCalled();
});
+
+ it('emits an `input` event on label click', async () => {
+ createComponent();
+ await makeObserverAppear();
+ await waitForPromises();
+ findFirstLabel().trigger('click');
+
+ expect(wrapper.emitted('input')[0][0]).toEqual(expect.arrayContaining(localSelectedLabels));
+ });
+
+ it('does not trigger query when component did not appear', () => {
+ createComponent();
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(findLabelsList().exists()).toBe(false);
+ expect(successfulQueryHandler).not.toHaveBeenCalled();
+ });
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js
index a1b40a891ec..36704ac5ef3 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js
@@ -1,6 +1,5 @@
-import { GlDropdown } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-
+import { nextTick } from 'vue';
import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_widget/constants';
import DropdownContents from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents.vue';
import DropdownContentsCreateView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view.vue';
@@ -8,10 +7,26 @@ import DropdownContentsLabelsView from '~/vue_shared/components/sidebar/labels_s
import { mockLabels } from './mock_data';
+const showDropdown = jest.fn();
+
+const GlDropdownStub = {
+ template: `
+ <div data-testid="dropdown">
+ <slot name="header"></slot>
+ <slot></slot>
+ <slot name="footer"></slot>
+ </div>
+ `,
+ methods: {
+ show: showDropdown,
+ hide: jest.fn(),
+ },
+};
+
describe('DropdownContent', () => {
let wrapper;
- const createComponent = ({ props = {}, injected = {} } = {}) => {
+ const createComponent = ({ props = {}, injected = {}, data = {} } = {}) => {
wrapper = shallowMount(DropdownContents, {
propsData: {
labelsCreateTitle: 'test',
@@ -22,38 +37,112 @@ describe('DropdownContent', () => {
footerManageLabelTitle: 'manage',
dropdownButtonText: 'Labels',
variant: 'sidebar',
+ issuableType: 'issue',
+ fullPath: 'test',
...props,
},
+ data() {
+ return {
+ ...data,
+ };
+ },
provide: {
allowLabelCreate: true,
labelsManagePath: 'foo/bar',
...injected,
},
stubs: {
- GlDropdown,
+ GlDropdown: GlDropdownStub,
},
});
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
});
+ const findCreateView = () => wrapper.findComponent(DropdownContentsCreateView);
+ const findLabelsView = () => wrapper.findComponent(DropdownContentsLabelsView);
+ const findDropdown = () => wrapper.findComponent(GlDropdownStub);
+
const findDropdownFooter = () => wrapper.find('[data-testid="dropdown-footer"]');
+ const findDropdownHeader = () => wrapper.find('[data-testid="dropdown-header"]');
const findCreateLabelButton = () => wrapper.find('[data-testid="create-label-button"]');
const findGoBackButton = () => wrapper.find('[data-testid="go-back-button"]');
+ it('calls dropdown `show` method on `isVisible` prop change', async () => {
+ createComponent();
+ await wrapper.setProps({
+ isVisible: true,
+ });
+
+ expect(findDropdown().emitted('show')).toBeUndefined();
+ });
+
+ it('does not emit `setLabels` event on dropdown hide if labels did not change', () => {
+ createComponent();
+ findDropdown().vm.$emit('hide');
+
+ expect(wrapper.emitted('setLabels')).toBeUndefined();
+ });
+
+ it('emits `setLabels` event on dropdown hide if labels changed on non-sidebar widget', async () => {
+ createComponent({ props: { variant: DropdownVariant.Standalone } });
+ const updatedLabel = {
+ id: 28,
+ title: 'Bug',
+ description: 'Label for bugs',
+ color: '#FF0000',
+ textColor: '#FFFFFF',
+ };
+ findLabelsView().vm.$emit('input', [updatedLabel]);
+ await nextTick();
+ findDropdown().vm.$emit('hide');
+
+ expect(wrapper.emitted('setLabels')).toEqual([[[updatedLabel]]]);
+ });
+
+ it('emits `setLabels` event on visibility change if labels changed on sidebar widget', async () => {
+ createComponent({ props: { variant: DropdownVariant.Standalone, isVisible: true } });
+ const updatedLabel = {
+ id: 28,
+ title: 'Bug',
+ description: 'Label for bugs',
+ color: '#FF0000',
+ textColor: '#FFFFFF',
+ };
+ findLabelsView().vm.$emit('input', [updatedLabel]);
+ wrapper.setProps({ isVisible: false });
+ await nextTick();
+
+ expect(wrapper.emitted('setLabels')).toEqual([[[updatedLabel]]]);
+ });
+
+ it('does not render header on standalone variant', () => {
+ createComponent({ props: { variant: DropdownVariant.Standalone } });
+
+ expect(findDropdownHeader().exists()).toBe(false);
+ });
+
+ it('renders header on embedded variant', () => {
+ createComponent({ props: { variant: DropdownVariant.Embedded } });
+
+ expect(findDropdownHeader().exists()).toBe(true);
+ });
+
+ it('renders header on sidebar variant', () => {
+ createComponent();
+
+ expect(findDropdownHeader().exists()).toBe(true);
+ });
+
describe('Create view', () => {
beforeEach(() => {
- wrapper.vm.toggleDropdownContentsCreateView();
+ createComponent({ data: { showDropdownContentsCreateView: true } });
});
it('renders create view when `showDropdownContentsCreateView` prop is `true`', () => {
- expect(wrapper.findComponent(DropdownContentsCreateView).exists()).toBe(true);
+ expect(findCreateView().exists()).toBe(true);
});
it('does not render footer', () => {
@@ -67,11 +156,31 @@ describe('DropdownContent', () => {
it('renders go back button', () => {
expect(findGoBackButton().exists()).toBe(true);
});
+
+ it('changes the view to Labels view on back button click', async () => {
+ findGoBackButton().vm.$emit('click', new MouseEvent('click'));
+ await nextTick();
+
+ expect(findCreateView().exists()).toBe(false);
+ expect(findLabelsView().exists()).toBe(true);
+ });
+
+ it('changes the view to Labels view on `hideCreateView` event', async () => {
+ findCreateView().vm.$emit('hideCreateView');
+ await nextTick();
+
+ expect(findCreateView().exists()).toBe(false);
+ expect(findLabelsView().exists()).toBe(true);
+ });
});
describe('Labels view', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
it('renders labels view when `showDropdownContentsCreateView` when `showDropdownContentsCreateView` prop is `false`', () => {
- expect(wrapper.findComponent(DropdownContentsLabelsView).exists()).toBe(true);
+ expect(findLabelsView().exists()).toBe(true);
});
it('renders footer on sidebar dropdown', () => {
@@ -109,19 +218,12 @@ describe('DropdownContent', () => {
expect(findCreateLabelButton().exists()).toBe(true);
});
- it('triggers `toggleDropdownContent` method on create label button click', () => {
- jest.spyOn(wrapper.vm, 'toggleDropdownContent').mockImplementation(() => {});
+ it('changes the view to Create on create label button click', async () => {
findCreateLabelButton().trigger('click');
- expect(wrapper.vm.toggleDropdownContent).toHaveBeenCalled();
+ await nextTick();
+ expect(findLabelsView().exists()).toBe(false);
});
});
});
-
- describe('template', () => {
- it('renders component container element with classes `gl-w-full gl-mt-2` and no styles', () => {
- expect(wrapper.attributes('class')).toContain('gl-w-full gl-mt-2');
- expect(wrapper.attributes('style')).toBeUndefined();
- });
- });
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
index a18511fa21d..b5441d711a5 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
@@ -1,28 +1,55 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import { IssuableType } from '~/issue_show/constants';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
import DropdownContents from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents.vue';
import DropdownValue from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_value.vue';
-import DropdownValueCollapsed from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_value_collapsed.vue';
+import issueLabelsQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/issue_labels.query.graphql';
import LabelsSelectRoot from '~/vue_shared/components/sidebar/labels_select_widget/labels_select_root.vue';
+import { mockConfig, issuableLabelsQueryResponse } from './mock_data';
-import { mockConfig } from './mock_data';
+jest.mock('~/flash');
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+const successfulQueryHandler = jest.fn().mockResolvedValue(issuableLabelsQueryResponse);
+const errorQueryHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
describe('LabelsSelectRoot', () => {
let wrapper;
- const createComponent = (config = mockConfig, slots = {}) => {
+ const findSidebarEditableItem = () => wrapper.findComponent(SidebarEditableItem);
+ const findDropdownValue = () => wrapper.findComponent(DropdownValue);
+ const findDropdownContents = () => wrapper.findComponent(DropdownContents);
+
+ const createComponent = ({
+ config = mockConfig,
+ slots = {},
+ queryHandler = successfulQueryHandler,
+ } = {}) => {
+ const mockApollo = createMockApollo([[issueLabelsQuery, queryHandler]]);
+
wrapper = shallowMount(LabelsSelectRoot, {
slots,
- propsData: config,
+ apolloProvider: mockApollo,
+ localVue,
+ propsData: {
+ ...config,
+ issuableType: IssuableType.Issue,
+ },
stubs: {
- DropdownContents,
SidebarEditableItem,
},
provide: {
- iid: '1',
- projectPath: 'test',
canUpdate: true,
allowLabelEdit: true,
+ allowLabelCreate: true,
+ labelsManagePath: 'test',
},
});
};
@@ -42,33 +69,63 @@ describe('LabelsSelectRoot', () => {
${'embedded'} | ${'is-embedded'}
`(
'renders component root element with CSS class `$cssClass` when `state.variant` is "$variant"',
- ({ variant, cssClass }) => {
+ async ({ variant, cssClass }) => {
createComponent({
- ...mockConfig,
- variant,
+ config: { ...mockConfig, variant },
});
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.classes()).toContain(cssClass);
- });
+ await nextTick();
+ expect(wrapper.classes()).toContain(cssClass);
},
);
- it('renders `dropdown-value-collapsed` component when `allowLabelCreate` prop is `true`', async () => {
- createComponent();
- await wrapper.vm.$nextTick;
- expect(wrapper.find(DropdownValueCollapsed).exists()).toBe(true);
- });
+ describe('if dropdown variant is `sidebar`', () => {
+ it('renders sidebar editable item', () => {
+ createComponent();
+ expect(findSidebarEditableItem().exists()).toBe(true);
+ });
+
+ it('passes true `loading` prop to sidebar editable item when loading labels', () => {
+ createComponent();
+ expect(findSidebarEditableItem().props('loading')).toBe(true);
+ });
- it('renders `dropdown-value` component', async () => {
- createComponent(mockConfig, {
- default: 'None',
+ describe('when labels are fetched successfully', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('passes true `loading` prop to sidebar editable item', () => {
+ expect(findSidebarEditableItem().props('loading')).toBe(false);
+ });
+
+ it('renders dropdown value component when query labels is resolved', () => {
+ expect(findDropdownValue().exists()).toBe(true);
+ expect(findDropdownValue().props('selectedLabels')).toEqual(
+ issuableLabelsQueryResponse.data.workspace.issuable.labels.nodes,
+ );
+ });
+
+ it('emits `onLabelRemove` event on dropdown value label remove event', () => {
+ const label = { id: 'gid://gitlab/ProjectLabel/1' };
+ findDropdownValue().vm.$emit('onLabelRemove', label);
+ expect(wrapper.emitted('onLabelRemove')).toEqual([[label]]);
+ });
});
- await wrapper.vm.$nextTick;
- const valueComp = wrapper.find(DropdownValue);
+ it('creates flash with error message when query is rejected', async () => {
+ createComponent({ queryHandler: errorQueryHandler });
+ await waitForPromises();
+ expect(createFlash).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
+ });
+ });
+
+ it('emits `updateSelectedLabels` event on dropdown contents `setLabels` event', async () => {
+ const label = { id: 'gid://gitlab/ProjectLabel/1' };
+ createComponent();
- expect(valueComp.exists()).toBe(true);
- expect(valueComp.text()).toBe('None');
+ findDropdownContents().vm.$emit('setLabels', [label]);
+ expect(wrapper.emitted('updateSelectedLabels')).toEqual([[[label]]]);
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js
index fceaabec2d0..23a457848d9 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js
@@ -34,6 +34,8 @@ export const mockLabels = [
];
export const mockConfig = {
+ iid: '1',
+ fullPath: 'test',
allowMultiselect: true,
labelsListTitle: 'Assign labels',
labelsCreateTitle: 'Create label',
@@ -86,7 +88,7 @@ export const createLabelSuccessfulResponse = {
},
};
-export const labelsQueryResponse = {
+export const workspaceLabelsQueryResponse = {
data: {
workspace: {
labels: {
@@ -108,3 +110,23 @@ export const labelsQueryResponse = {
},
},
};
+
+export const issuableLabelsQueryResponse = {
+ data: {
+ workspace: {
+ issuable: {
+ id: '1',
+ labels: {
+ nodes: [
+ {
+ color: '#330066',
+ description: null,
+ id: 'gid://gitlab/ProjectLabel/1',
+ title: 'Label1',
+ },
+ ],
+ },
+ },
+ },
+ },
+};
diff --git a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
index af4fa462cbf..0f1e118d44c 100644
--- a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
+++ b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
@@ -45,6 +45,7 @@ exports[`Upload dropzone component correctly overrides description and drop mess
>
<div
class="mw-50 gl-text-center"
+ style="display: none;"
>
<h3
class=""
@@ -61,7 +62,6 @@ exports[`Upload dropzone component correctly overrides description and drop mess
<div
class="mw-50 gl-text-center"
- style="display: none;"
>
<h3
class=""
@@ -146,7 +146,6 @@ exports[`Upload dropzone component when dragging renders correct template when d
<div
class="mw-50 gl-text-center"
- style=""
>
<h3
class=""
@@ -231,7 +230,6 @@ exports[`Upload dropzone component when dragging renders correct template when d
<div
class="mw-50 gl-text-center"
- style=""
>
<h3
class=""
@@ -299,6 +297,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
>
<div
class="mw-50 gl-text-center"
+ style=""
>
<h3
class=""
@@ -383,6 +382,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
>
<div
class="mw-50 gl-text-center"
+ style=""
>
<h3
class=""
@@ -467,6 +467,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
>
<div
class="mw-50 gl-text-center"
+ style=""
>
<h3
class=""
@@ -551,6 +552,7 @@ exports[`Upload dropzone component when no slot provided renders default dropzon
>
<div
class="mw-50 gl-text-center"
+ style="display: none;"
>
<h3
class=""
@@ -567,7 +569,6 @@ exports[`Upload dropzone component when no slot provided renders default dropzon
<div
class="mw-50 gl-text-center"
- style="display: none;"
>
<h3
class=""
@@ -603,6 +604,7 @@ exports[`Upload dropzone component when slot provided renders dropzone with slot
>
<div
class="mw-50 gl-text-center"
+ style="display: none;"
>
<h3
class=""
@@ -619,7 +621,6 @@ exports[`Upload dropzone component when slot provided renders dropzone with slot
<div
class="mw-50 gl-text-center"
- style="display: none;"
>
<h3
class=""
diff --git a/spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js b/spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js
new file mode 100644
index 00000000000..a92f058f311
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js
@@ -0,0 +1,116 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { OBSTACLE_TYPES } from '~/vue_shared/components/user_deletion_obstacles/constants';
+import UserDeletionObstaclesList from '~/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list.vue';
+
+const mockSchedules = [
+ {
+ type: OBSTACLE_TYPES.oncallSchedules,
+ name: 'Schedule 1',
+ url: 'http://gitlab.com/gitlab-org/gitlab-shell/-/oncall_schedules',
+ projectName: 'Shell',
+ projectUrl: 'http://gitlab.com/gitlab-org/gitlab-shell/',
+ },
+ {
+ type: OBSTACLE_TYPES.oncallSchedules,
+ name: 'Schedule 2',
+ url: 'http://gitlab.com/gitlab-org/gitlab-ui/-/oncall_schedules',
+ projectName: 'UI',
+ projectUrl: 'http://gitlab.com/gitlab-org/gitlab-ui/',
+ },
+];
+const mockPolicies = [
+ {
+ type: OBSTACLE_TYPES.escalationPolicies,
+ name: 'Policy 1',
+ url: 'http://gitlab.com/gitlab-org/gitlab-ui/-/escalation-policies',
+ projectName: 'UI',
+ projectUrl: 'http://gitlab.com/gitlab-org/gitlab-ui/',
+ },
+];
+const mockObstacles = mockSchedules.concat(mockPolicies);
+
+const userName = "O'User";
+
+describe('User deletion obstacles list', () => {
+ let wrapper;
+
+ function createComponent(props) {
+ wrapper = extendedWrapper(
+ shallowMount(UserDeletionObstaclesList, {
+ propsData: {
+ obstacles: mockObstacles,
+ userName,
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ }),
+ );
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findLinks = () => wrapper.findAllComponents(GlLink);
+ const findTitle = () => wrapper.findByTestId('title');
+ const findFooter = () => wrapper.findByTestId('footer');
+ const findObstacles = () => wrapper.findByTestId('obstacles-list');
+
+ describe.each`
+ isCurrentUser | titleText | footerText
+ ${true} | ${'You are currently a part of:'} | ${'Removing yourself may put your on-call team at risk of missing a notification.'}
+ ${false} | ${`User ${userName} is currently part of:`} | ${'Removing this user may put their on-call team at risk of missing a notification.'}
+ `('when current user', ({ isCurrentUser, titleText, footerText }) => {
+ it(`${isCurrentUser ? 'is' : 'is not'} a part of on-call management`, async () => {
+ createComponent({
+ isCurrentUser,
+ });
+
+ expect(findTitle().text()).toBe(titleText);
+ expect(findFooter().text()).toBe(footerText);
+ });
+ });
+
+ describe.each(mockObstacles)(
+ 'renders all obstacles',
+ ({ type, name, url, projectName, projectUrl }) => {
+ it(`includes the project name and link for ${name}`, () => {
+ createComponent({ obstacles: [{ type, name, url, projectName, projectUrl }] });
+ const msg = findObstacles().text();
+
+ expect(msg).toContain(`in Project ${projectName}`);
+ expect(findLinks().at(1).attributes('href')).toBe(projectUrl);
+ });
+ },
+ );
+
+ describe.each(mockSchedules)(
+ 'renders on-call schedules',
+ ({ type, name, url, projectName, projectUrl }) => {
+ it(`includes the schedule name and link for ${name}`, () => {
+ createComponent({ obstacles: [{ type, name, url, projectName, projectUrl }] });
+ const msg = findObstacles().text();
+
+ expect(msg).toContain(`On-call schedule ${name}`);
+ expect(findLinks().at(0).attributes('href')).toBe(url);
+ });
+ },
+ );
+
+ describe.each(mockPolicies)(
+ 'renders escalation policies',
+ ({ type, name, url, projectName, projectUrl }) => {
+ it(`includes the policy name and link for ${name}`, () => {
+ createComponent({ obstacles: [{ type, name, url, projectName, projectUrl }] });
+ const msg = findObstacles().text();
+
+ expect(msg).toContain(`Escalation policy ${name}`);
+ expect(findLinks().at(0).attributes('href')).toBe(url);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/vue_shared/components/user_deletion_obstacles/utils_spec.js b/spec/frontend/vue_shared/components/user_deletion_obstacles/utils_spec.js
new file mode 100644
index 00000000000..99f739098f7
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_deletion_obstacles/utils_spec.js
@@ -0,0 +1,43 @@
+import { OBSTACLE_TYPES } from '~/vue_shared/components/user_deletion_obstacles/constants';
+import { parseUserDeletionObstacles } from '~/vue_shared/components/user_deletion_obstacles/utils';
+
+describe('parseUserDeletionObstacles', () => {
+ const mockObstacles = [{ name: 'Obstacle' }];
+ const expectedSchedule = { name: 'Obstacle', type: OBSTACLE_TYPES.oncallSchedules };
+ const expectedPolicy = { name: 'Obstacle', type: OBSTACLE_TYPES.escalationPolicies };
+
+ it('is undefined when user is not available', () => {
+ expect(parseUserDeletionObstacles()).toHaveLength(0);
+ });
+
+ it('is empty when obstacles are not available for user', () => {
+ expect(parseUserDeletionObstacles({})).toHaveLength(0);
+ });
+
+ it('is empty when user has no obstacles to deletion', () => {
+ const input = { oncallSchedules: [], escalationPolicies: [] };
+
+ expect(parseUserDeletionObstacles(input)).toHaveLength(0);
+ });
+
+ it('returns obstacles with type when user is part of on-call schedules', () => {
+ const input = { oncallSchedules: mockObstacles, escalationPolicies: [] };
+ const expectedOutput = [expectedSchedule];
+
+ expect(parseUserDeletionObstacles(input)).toEqual(expectedOutput);
+ });
+
+ it('returns obstacles with type when user is part of escalation policies', () => {
+ const input = { oncallSchedules: [], escalationPolicies: mockObstacles };
+ const expectedOutput = [expectedPolicy];
+
+ expect(parseUserDeletionObstacles(input)).toEqual(expectedOutput);
+ });
+
+ it('returns obstacles with type when user have every obstacle type', () => {
+ const input = { oncallSchedules: mockObstacles, escalationPolicies: mockObstacles };
+ const expectedOutput = [expectedSchedule, expectedPolicy];
+
+ expect(parseUserDeletionObstacles(input)).toEqual(expectedOutput);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 926223e0670..09633daf587 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -9,6 +9,7 @@ const DEFAULT_PROPS = {
username: 'root',
name: 'Administrator',
location: 'Vienna',
+ localTime: '2:30 PM',
bot: false,
bio: null,
workInformation: null,
@@ -31,10 +32,11 @@ describe('User Popover Component', () => {
wrapper.destroy();
});
- const findUserStatus = () => wrapper.find('.js-user-status');
+ const findUserStatus = () => wrapper.findByTestId('user-popover-status');
const findTarget = () => document.querySelector('.js-user-link');
const findUserName = () => wrapper.find(UserNameWithStatus);
const findSecurityBotDocsLink = () => wrapper.findByTestId('user-popover-bot-docs-link');
+ const findUserLocalTime = () => wrapper.findByTestId('user-popover-local-time');
const createWrapper = (props = {}, options = {}) => {
wrapper = mountExtended(UserPopover, {
@@ -71,7 +73,6 @@ describe('User Popover Component', () => {
expect(wrapper.text()).toContain(DEFAULT_PROPS.user.name);
expect(wrapper.text()).toContain(DEFAULT_PROPS.user.username);
- expect(wrapper.text()).toContain(DEFAULT_PROPS.user.location);
});
it('shows icon for location', () => {
@@ -164,6 +165,25 @@ describe('User Popover Component', () => {
});
});
+ describe('local time', () => {
+ it('should show local time when it is available', () => {
+ createWrapper();
+
+ expect(findUserLocalTime().exists()).toBe(true);
+ });
+
+ it('should not show local time when it is not available', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ localTime: null,
+ };
+
+ createWrapper({ user });
+
+ expect(findUserLocalTime().exists()).toBe(false);
+ });
+ });
+
describe('status data', () => {
it('should show only message', () => {
const user = { ...DEFAULT_PROPS.user, status: { message_html: 'Hello World' } };
@@ -256,5 +276,11 @@ describe('User Popover Component', () => {
const securityBotDocsLink = findSecurityBotDocsLink();
expect(securityBotDocsLink.text()).toBe('Learn more about %<>\';"');
});
+
+ it('does not display local time', () => {
+ createWrapper({ user: SECURITY_BOT_USER });
+
+ expect(findUserLocalTime().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 5fe4eeb6061..92938b2717f 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -160,4 +160,26 @@ describe('Web IDE link component', () => {
expect(findLocalStorageSync().props('value')).toBe(ACTION_GITPOD.key);
});
});
+
+ describe('edit actions', () => {
+ it.each([
+ {
+ props: { showWebIdeButton: true, showEditButton: false },
+ expectedEventPayload: 'ide',
+ },
+ {
+ props: { showWebIdeButton: false, showEditButton: true },
+ expectedEventPayload: 'simple',
+ },
+ ])(
+ 'emits the correct event when an action handler is called',
+ async ({ props, expectedEventPayload }) => {
+ createComponent({ ...props, needsToFork: true });
+
+ findActionsButton().props('actions')[0].handle();
+
+ expect(wrapper.emitted('edit')).toEqual([[expectedEventPayload]]);
+ },
+ );
+ });
});
diff --git a/spec/frontend/vue_shared/directives/validation_spec.js b/spec/frontend/vue_shared/directives/validation_spec.js
index 51ee73cabde..dcd3a44a6fc 100644
--- a/spec/frontend/vue_shared/directives/validation_spec.js
+++ b/spec/frontend/vue_shared/directives/validation_spec.js
@@ -4,11 +4,13 @@ import validation, { initForm } from '~/vue_shared/directives/validation';
describe('validation directive', () => {
let wrapper;
- const createComponentFactory = ({ inputAttributes, template, data }) => {
- const defaultInputAttributes = {
- type: 'text',
- required: true,
- };
+ const createComponentFactory = (options) => {
+ const {
+ inputAttributes = { type: 'text', required: true },
+ template,
+ data,
+ feedbackMap = {},
+ } = options;
const defaultTemplate = `
<form>
@@ -18,11 +20,11 @@ describe('validation directive', () => {
const component = {
directives: {
- validation: validation(),
+ validation: validation(feedbackMap),
},
data() {
return {
- attributes: inputAttributes || defaultInputAttributes,
+ attributes: inputAttributes,
...data,
};
},
@@ -32,8 +34,10 @@ describe('validation directive', () => {
wrapper = shallowMount(component, { attachTo: document.body });
};
- const createComponent = ({ inputAttributes, showValidation, template } = {}) =>
- createComponentFactory({
+ const createComponent = (options = {}) => {
+ const { inputAttributes, showValidation, template, feedbackMap } = options;
+
+ return createComponentFactory({
inputAttributes,
data: {
showValidation,
@@ -48,10 +52,14 @@ describe('validation directive', () => {
},
},
template,
+ feedbackMap,
});
+ };
+
+ const createComponentWithInitForm = (options = {}) => {
+ const { inputAttributes, feedbackMap } = options;
- const createComponentWithInitForm = ({ inputAttributes } = {}) =>
- createComponentFactory({
+ return createComponentFactory({
inputAttributes,
data: {
form: initForm({
@@ -68,7 +76,9 @@ describe('validation directive', () => {
<input v-validation:[form.showValidation] name="exampleField" v-bind="attributes" />
</form>
`,
+ feedbackMap,
});
+ };
afterEach(() => {
wrapper.destroy();
@@ -209,6 +219,111 @@ describe('validation directive', () => {
});
});
+ describe('with custom feedbackMap', () => {
+ const customMessage = 'Please fill out the name field.';
+ const template = `
+ <form>
+ <div v-validation:[showValidation]>
+ <input name="exampleField" v-bind="attributes" />
+ </div>
+ </form>
+ `;
+ beforeEach(() => {
+ const feedbackMap = {
+ valueMissing: {
+ isInvalid: (el) => el.validity?.valueMissing,
+ message: customMessage,
+ },
+ };
+
+ createComponent({
+ template,
+ inputAttributes: {
+ required: true,
+ },
+ feedbackMap,
+ });
+ });
+
+ describe('with invalid value', () => {
+ beforeEach(() => {
+ setValueAndTriggerValidation('');
+ });
+
+ it('should set correct field state', () => {
+ expect(getFormData().fields.exampleField).toEqual({
+ state: false,
+ feedback: customMessage,
+ });
+ });
+ });
+
+ describe('with valid value', () => {
+ beforeEach(() => {
+ setValueAndTriggerValidation('hello');
+ });
+
+ it('set the correct state', () => {
+ expect(getFormData().fields.exampleField).toEqual({
+ state: true,
+ feedback: '',
+ });
+ });
+ });
+ });
+
+ describe('with validation-message present on the element', () => {
+ const customMessage = 'The name field is required.';
+ const template = `
+ <form>
+ <div v-validation:[showValidation]>
+ <input name="exampleField" v-bind="attributes" validation-message="${customMessage}" />
+ </div>
+ </form>
+ `;
+ beforeEach(() => {
+ const feedbackMap = {
+ valueMissing: {
+ isInvalid: (el) => el.validity?.valueMissing,
+ },
+ };
+
+ createComponent({
+ template,
+ inputAttributes: {
+ required: true,
+ },
+ feedbackMap,
+ });
+ });
+
+ describe('with invalid value', () => {
+ beforeEach(() => {
+ setValueAndTriggerValidation('');
+ });
+
+ it('should set correct field state', () => {
+ expect(getFormData().fields.exampleField).toEqual({
+ state: false,
+ feedback: customMessage,
+ });
+ });
+ });
+
+ describe('with valid value', () => {
+ beforeEach(() => {
+ setValueAndTriggerValidation('hello');
+ });
+
+ it('set the correct state', () => {
+ expect(getFormData().fields.exampleField).toEqual({
+ state: true,
+ feedback: '',
+ });
+ });
+ });
+ });
+
describe('component using initForm', () => {
it('sets the form fields correctly', () => {
createComponentWithInitForm();
diff --git a/spec/frontend/vue_shared/oncall_schedules_list_spec.js b/spec/frontend/vue_shared/oncall_schedules_list_spec.js
deleted file mode 100644
index f83a5187b8b..00000000000
--- a/spec/frontend/vue_shared/oncall_schedules_list_spec.js
+++ /dev/null
@@ -1,87 +0,0 @@
-import { GlLink, GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
-
-const mockSchedules = [
- {
- name: 'Schedule 1',
- scheduleUrl: 'http://gitlab.com/gitlab-org/gitlab-shell/-/oncall_schedules',
- projectName: 'Shell',
- projectUrl: 'http://gitlab.com/gitlab-org/gitlab-shell/',
- },
- {
- name: 'Schedule 2',
- scheduleUrl: 'http://gitlab.com/gitlab-org/gitlab-ui/-/oncall_schedules',
- projectName: 'UI',
- projectUrl: 'http://gitlab.com/gitlab-org/gitlab-ui/',
- },
-];
-
-const userName = "O'User";
-
-describe('On-call schedules list', () => {
- let wrapper;
-
- function createComponent(props) {
- wrapper = extendedWrapper(
- shallowMount(OncallSchedulesList, {
- propsData: {
- schedules: mockSchedules,
- userName,
- ...props,
- },
- stubs: {
- GlSprintf,
- },
- }),
- );
- }
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- const findLinks = () => wrapper.findAllComponents(GlLink);
- const findTitle = () => wrapper.findByTestId('title');
- const findFooter = () => wrapper.findByTestId('footer');
- const findSchedules = () => wrapper.findByTestId('schedules-list');
-
- describe.each`
- isCurrentUser | titleText | footerText
- ${true} | ${'You are currently a part of:'} | ${'Removing yourself may put your on-call team at risk of missing a notification.'}
- ${false} | ${`User ${userName} is currently part of:`} | ${'Removing this user may put their on-call team at risk of missing a notification.'}
- `('when current user ', ({ isCurrentUser, titleText, footerText }) => {
- it(`${isCurrentUser ? 'is' : 'is not'} a part of on-call schedule`, async () => {
- createComponent({
- isCurrentUser,
- });
-
- expect(findTitle().text()).toBe(titleText);
- expect(findFooter().text()).toBe(footerText);
- });
- });
-
- describe.each(mockSchedules)(
- 'renders each on-call schedule data',
- ({ name, scheduleUrl, projectName, projectUrl }) => {
- beforeEach(() => {
- createComponent({ schedules: [{ name, scheduleUrl, projectName, projectUrl }] });
- });
-
- it(`renders schedule ${name}'s name and link`, () => {
- const msg = findSchedules().text();
-
- expect(msg).toContain(`On-call schedule ${name}`);
- expect(findLinks().at(0).attributes('href')).toBe(scheduleUrl);
- });
-
- it(`renders project ${projectName}'s name and link`, () => {
- const msg = findSchedules().text();
-
- expect(msg).toContain(`in Project ${projectName}`);
- expect(findLinks().at(1).attributes('href')).toBe(projectUrl);
- });
- },
- );
-});
diff --git a/spec/frontend/vue_shared/security_reports/mock_data.js b/spec/frontend/vue_shared/security_reports/mock_data.js
index 06631710509..cdaeec78e47 100644
--- a/spec/frontend/vue_shared/security_reports/mock_data.js
+++ b/spec/frontend/vue_shared/security_reports/mock_data.js
@@ -314,7 +314,7 @@ export const sastDiffSuccessMock = {
head_report_created_at: '2020-01-10T10:00:00.000Z',
};
-export const secretScanningDiffSuccessMock = {
+export const secretDetectionDiffSuccessMock = {
added: [mockFindings[0], mockFindings[1]],
fixed: [mockFindings[2]],
base_report_created_at: '2020-01-01T10:00:00.000Z',
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index 4d579fa61df..68a97103d3a 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -12,7 +12,7 @@ import {
securityReportMergeRequestDownloadPathsQueryNoArtifactsResponse,
securityReportMergeRequestDownloadPathsQueryResponse,
sastDiffSuccessMock,
- secretScanningDiffSuccessMock,
+ secretDetectionDiffSuccessMock,
} from 'jest/vue_shared/security_reports/mock_data';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
@@ -31,7 +31,7 @@ Vue.use(VueApollo);
Vue.use(Vuex);
const SAST_COMPARISON_PATH = '/sast.json';
-const SECRET_SCANNING_COMPARISON_PATH = '/secret_detection.json';
+const SECRET_DETECTION_COMPARISON_PATH = '/secret_detection.json';
describe('Security reports app', () => {
let wrapper;
@@ -175,12 +175,12 @@ describe('Security reports app', () => {
const SAST_SUCCESS_MESSAGE =
'Security scanning detected 1 potential vulnerability 1 Critical 0 High and 0 Others';
- const SECRET_SCANNING_SUCCESS_MESSAGE =
+ const SECRET_DETECTION_SUCCESS_MESSAGE =
'Security scanning detected 2 potential vulnerabilities 1 Critical 1 High and 0 Others';
describe.each`
- reportType | pathProp | path | successResponse | successMessage
- ${REPORT_TYPE_SAST} | ${'sastComparisonPath'} | ${SAST_COMPARISON_PATH} | ${sastDiffSuccessMock} | ${SAST_SUCCESS_MESSAGE}
- ${REPORT_TYPE_SECRET_DETECTION} | ${'secretScanningComparisonPath'} | ${SECRET_SCANNING_COMPARISON_PATH} | ${secretScanningDiffSuccessMock} | ${SECRET_SCANNING_SUCCESS_MESSAGE}
+ reportType | pathProp | path | successResponse | successMessage
+ ${REPORT_TYPE_SAST} | ${'sastComparisonPath'} | ${SAST_COMPARISON_PATH} | ${sastDiffSuccessMock} | ${SAST_SUCCESS_MESSAGE}
+ ${REPORT_TYPE_SECRET_DETECTION} | ${'secretDetectionComparisonPath'} | ${SECRET_DETECTION_COMPARISON_PATH} | ${secretDetectionDiffSuccessMock} | ${SECRET_DETECTION_SUCCESS_MESSAGE}
`(
'given a $pathProp and $reportType artifact',
({ pathProp, path, successResponse, successMessage }) => {
diff --git a/spec/frontend/vue_shared/security_reports/store/getters_spec.js b/spec/frontend/vue_shared/security_reports/store/getters_spec.js
index 97746c7c38b..bcc8955ba02 100644
--- a/spec/frontend/vue_shared/security_reports/store/getters_spec.js
+++ b/spec/frontend/vue_shared/security_reports/store/getters_spec.js
@@ -8,7 +8,7 @@ import {
summaryCounts,
} from '~/vue_shared/security_reports/store/getters';
import createSastState from '~/vue_shared/security_reports/store/modules/sast/state';
-import createSecretScanningState from '~/vue_shared/security_reports/store/modules/secret_detection/state';
+import createSecretDetectionState from '~/vue_shared/security_reports/store/modules/secret_detection/state';
import createState from '~/vue_shared/security_reports/store/state';
import { groupedTextBuilder } from '~/vue_shared/security_reports/store/utils';
import { CRITICAL, HIGH, LOW } from '~/vulnerabilities/constants';
@@ -21,7 +21,7 @@ describe('Security reports getters', () => {
beforeEach(() => {
state = createState();
state.sast = createSastState();
- state.secretDetection = createSecretScanningState();
+ state.secretDetection = createSecretDetectionState();
});
describe('summaryCounts', () => {
diff --git a/spec/frontend_integration/fixture_generators.yml b/spec/frontend_integration/fixture_generators.yml
new file mode 100644
index 00000000000..1f6ff85352d
--- /dev/null
+++ b/spec/frontend_integration/fixture_generators.yml
@@ -0,0 +1,5 @@
+- spec/frontend/fixtures/api_projects.rb
+- spec/frontend/fixtures/api_merge_requests.rb
+- spec/frontend/fixtures/projects_json.rb
+- spec/frontend/fixtures/merge_requests_diffs.rb
+- spec/frontend/fixtures/raw.rb
diff --git a/spec/frontend_integration/test_helpers/mock_server/graphql.js b/spec/frontend_integration/test_helpers/mock_server/graphql.js
index 27396842523..d4ee7c02839 100644
--- a/spec/frontend_integration/test_helpers/mock_server/graphql.js
+++ b/spec/frontend_integration/test_helpers/mock_server/graphql.js
@@ -1,9 +1,7 @@
import { buildSchema, graphql } from 'graphql';
import { memoize } from 'lodash';
-// The graphql schema is dynamically generated in CI
-// during the `graphql-schema-dump` job.
-// eslint-disable-next-line global-require, import/no-unresolved
+// eslint-disable-next-line global-require
const getGraphqlSchema = () => require('../../../../tmp/tests/graphql/gitlab_schema.graphql');
const graphqlResolvers = {
diff --git a/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb b/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
new file mode 100644
index 00000000000..fc025c8e3d3
--- /dev/null
+++ b/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Clusters::AgentTokens::Create do
+ subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
+
+ let_it_be(:cluster_agent) { create(:cluster_agent) }
+ let_it_be(:user) { create(:user) }
+
+ let(:context) do
+ GraphQL::Query::Context.new(
+ query: OpenStruct.new(schema: nil),
+ values: { current_user: user },
+ object: nil
+ )
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:create_cluster) }
+
+ describe '#resolve' do
+ let(:description) { 'new token!' }
+ let(:name) { 'new name' }
+
+ subject { mutation.resolve(cluster_agent_id: cluster_agent.to_global_id, description: description, name: name) }
+
+ context 'without token permissions' do
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ cluster_agent.project.add_maintainer(user)
+ end
+
+ it 'creates a new token', :aggregate_failures do
+ expect { subject }.to change { ::Clusters::AgentToken.count }.by(1)
+ expect(subject[:errors]).to eq([])
+ end
+
+ it 'returns token information', :aggregate_failures do
+ token = subject[:token]
+
+ expect(subject[:secret]).not_to be_nil
+ expect(token.created_by_user).to eq(user)
+ expect(token.description).to eq(description)
+ expect(token.name).to eq(name)
+ end
+
+ context 'invalid params' do
+ subject { mutation.resolve(cluster_agent_id: cluster_agent.id) }
+
+ it 'generates an error message when id invalid', :aggregate_failures do
+ expect { subject }.to raise_error(::GraphQL::CoercionError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/clusters/agent_tokens/delete_spec.rb b/spec/graphql/mutations/clusters/agent_tokens/delete_spec.rb
new file mode 100644
index 00000000000..5cdbc0f6d72
--- /dev/null
+++ b/spec/graphql/mutations/clusters/agent_tokens/delete_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Clusters::AgentTokens::Delete do
+ let(:token) { create(:cluster_agent_token) }
+ let(:user) { create(:user) }
+
+ let(:mutation) do
+ described_class.new(
+ object: double,
+ context: { current_user: user },
+ field: double
+ )
+ end
+
+ it { expect(described_class.graphql_name).to eq('ClusterAgentTokenDelete') }
+ it { expect(described_class).to require_graphql_authorizations(:admin_cluster) }
+
+ describe '#resolve' do
+ let(:global_id) { token.to_global_id }
+
+ subject { mutation.resolve(id: global_id) }
+
+ context 'without user permissions' do
+ it 'fails to delete the cluster agent', :aggregate_failures do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect { token.reload }.not_to raise_error
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ token.agent.project.add_maintainer(user)
+ end
+
+ it 'deletes a cluster agent', :aggregate_failures do
+ expect { subject }.to change { ::Clusters::AgentToken.count }.by(-1)
+ expect { token.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with invalid params' do
+ let(:global_id) { token.id }
+
+ it 'raises an error if the cluster agent id is invalid', :aggregate_failures do
+ expect { subject }.to raise_error(::GraphQL::CoercionError)
+ expect { token.reload }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/clusters/agents/create_spec.rb b/spec/graphql/mutations/clusters/agents/create_spec.rb
new file mode 100644
index 00000000000..c80b6f6cdad
--- /dev/null
+++ b/spec/graphql/mutations/clusters/agents/create_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Clusters::Agents::Create do
+ subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user) }
+ let(:context) do
+ GraphQL::Query::Context.new(
+ query: OpenStruct.new(schema: nil),
+ values: { current_user: user },
+ object: nil
+ )
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:create_cluster) }
+
+ describe '#resolve' do
+ subject { mutation.resolve(project_path: project.full_path, name: 'test-agent') }
+
+ context 'without project permissions' do
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'creates a new clusters_agent', :aggregate_failures do
+ expect { subject }.to change { ::Clusters::Agent.count }.by(1)
+ expect(subject[:cluster_agent].name).to eq('test-agent')
+ expect(subject[:errors]).to eq([])
+ end
+
+ context 'invalid params' do
+ subject { mutation.resolve(project_path: project.full_path, name: '@bad_name!') }
+
+ it 'generates an error message when name is invalid', :aggregate_failures do
+ expect(subject[:clusters_agent]).to be_nil
+ expect(subject[:errors]).to eq(["Name can contain only lowercase letters, digits, and '-', but cannot start or end with '-'"])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/clusters/agents/delete_spec.rb b/spec/graphql/mutations/clusters/agents/delete_spec.rb
new file mode 100644
index 00000000000..0aabf53391a
--- /dev/null
+++ b/spec/graphql/mutations/clusters/agents/delete_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Clusters::Agents::Delete do
+ subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
+
+ let(:cluster_agent) { create(:cluster_agent) }
+ let(:project) { cluster_agent.project }
+ let(:user) { create(:user) }
+ let(:context) do
+ GraphQL::Query::Context.new(
+ query: OpenStruct.new(schema: nil),
+ values: { current_user: user },
+ object: nil
+ )
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_cluster) }
+
+ describe '#resolve' do
+ subject { mutation.resolve(id: cluster_agent.to_global_id) }
+
+ context 'without user permissions' do
+ it 'fails to delete the cluster agent', :aggregate_failures do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect { cluster_agent.reload }.not_to raise_error
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'deletes a cluster agent', :aggregate_failures do
+ expect { subject }.to change { ::Clusters::Agent.count }.by(-1)
+ expect { cluster_agent.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with invalid params' do
+ subject { mutation.resolve(id: cluster_agent.id) }
+
+ it 'raises an error if the cluster agent id is invalid', :aggregate_failures do
+ expect { subject }.to raise_error(::GraphQL::CoercionError)
+ expect { cluster_agent.reload }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/customer_relations/contacts/create_spec.rb b/spec/graphql/mutations/customer_relations/contacts/create_spec.rb
new file mode 100644
index 00000000000..21a1aa2741a
--- /dev/null
+++ b/spec/graphql/mutations/customer_relations/contacts/create_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::CustomerRelations::Contacts::Create do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:not_found_or_does_not_belong) { 'The specified organization was not found or does not belong to this group' }
+ let(:valid_params) do
+ attributes_for(:contact,
+ group: group,
+ description: 'Managing Director'
+ )
+ end
+
+ describe '#resolve' do
+ subject(:resolve_mutation) do
+ described_class.new(object: nil, context: { current_user: user }, field: nil).resolve(
+ **valid_params,
+ group_id: group.to_global_id
+ )
+ end
+
+ context 'when the user does not have permission' do
+ before do
+ group.add_reporter(user)
+ end
+
+ it 'raises an error' do
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message("The resource that you are attempting to access does not exist or you don't have permission to perform this action")
+ end
+ end
+
+ context 'when the user has permission' do
+ before_all do
+ group.add_developer(user)
+ end
+
+ context 'when the feature is disabled' do
+ before do
+ stub_feature_flags(customer_relations: false)
+ end
+
+ it 'raises an error' do
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message('Feature disabled')
+ end
+ end
+
+ context 'when the params are invalid' do
+ it 'returns the validation error' do
+ valid_params[:first_name] = nil
+
+ expect(resolve_mutation[:errors]).to match_array(["First name can't be blank"])
+ end
+ end
+
+ context 'when attaching to an organization' do
+ context 'when all ok' do
+ before do
+ organization = create(:organization, group: group)
+ valid_params[:organization_id] = organization.to_global_id
+ end
+
+ it 'creates contact with correct values' do
+ expect(resolve_mutation[:contact].organization).to be_present
+ end
+ end
+
+ context 'when organization_id is invalid' do
+ before do
+ valid_params[:organization_id] = "gid://gitlab/CustomerRelations::Organization/#{non_existing_record_id}"
+ end
+
+ it 'returns the relevant error' do
+ expect(resolve_mutation[:errors]).to match_array([not_found_or_does_not_belong])
+ end
+ end
+
+ context 'when organzation belongs to a different group' do
+ before do
+ organization = create(:organization)
+ valid_params[:organization_id] = organization.to_global_id
+ end
+
+ it 'returns the relevant error' do
+ expect(resolve_mutation[:errors]).to match_array([not_found_or_does_not_belong])
+ end
+ end
+ end
+
+ it 'creates contact with correct values' do
+ expect(resolve_mutation[:contact]).to have_attributes(valid_params)
+ end
+ end
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_contact) }
+end
diff --git a/spec/graphql/mutations/customer_relations/contacts/update_spec.rb b/spec/graphql/mutations/customer_relations/contacts/update_spec.rb
new file mode 100644
index 00000000000..93bc6f53cf9
--- /dev/null
+++ b/spec/graphql/mutations/customer_relations/contacts/update_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::CustomerRelations::Contacts::Update do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:first_name) { 'Lionel' }
+ let(:last_name) { 'Smith' }
+ let(:email) { 'ls@gitlab.com' }
+ let(:description) { 'VIP' }
+ let(:does_not_exist_or_no_permission) { "The resource that you are attempting to access does not exist or you don't have permission to perform this action" }
+ let(:contact) { create(:contact, group: group) }
+ let(:attributes) do
+ {
+ id: contact.to_global_id,
+ first_name: first_name,
+ last_name: last_name,
+ email: email,
+ description: description
+ }
+ end
+
+ describe '#resolve' do
+ subject(:resolve_mutation) do
+ described_class.new(object: nil, context: { current_user: user }, field: nil).resolve(
+ attributes
+ )
+ end
+
+ context 'when the user does not have permission to update a contact' do
+ before do
+ group.add_reporter(user)
+ end
+
+ it 'raises an error' do
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message(does_not_exist_or_no_permission)
+ end
+ end
+
+ context 'when the contact does not exist' do
+ it 'raises an error' do
+ attributes[:id] = "gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"
+
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message(does_not_exist_or_no_permission)
+ end
+ end
+
+ context 'when the user has permission to update a contact' do
+ before_all do
+ group.add_developer(user)
+ end
+
+ it 'updates the organization with correct values' do
+ expect(resolve_mutation[:contact]).to have_attributes(attributes)
+ end
+
+ context 'when the feature is disabled' do
+ before do
+ stub_feature_flags(customer_relations: false)
+ end
+
+ it 'raises an error' do
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message('Feature disabled')
+ end
+ end
+ end
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_contact) }
+end
diff --git a/spec/graphql/mutations/customer_relations/organizations/create_spec.rb b/spec/graphql/mutations/customer_relations/organizations/create_spec.rb
index ab430b9240b..738a8d724ab 100644
--- a/spec/graphql/mutations/customer_relations/organizations/create_spec.rb
+++ b/spec/graphql/mutations/customer_relations/organizations/create_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Mutations::CustomerRelations::Organizations::Create do
let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
let(:valid_params) do
attributes_for(:organization,
@@ -23,22 +24,19 @@ RSpec.describe Mutations::CustomerRelations::Organizations::Create do
end
context 'when the user does not have permission' do
- let_it_be(:group) { create(:group) }
-
before do
- group.add_guest(user)
+ group.add_reporter(user)
end
it 'raises an error' do
expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message("The resource that you are attempting to access does not exist or you don't have permission to perform this action")
end
end
context 'when the user has permission' do
- let_it_be(:group) { create(:group) }
-
before_all do
- group.add_reporter(user)
+ group.add_developer(user)
end
context 'when the feature is disabled' do
@@ -48,6 +46,7 @@ RSpec.describe Mutations::CustomerRelations::Organizations::Create do
it 'raises an error' do
expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message('Feature disabled')
end
end
diff --git a/spec/graphql/mutations/customer_relations/organizations/update_spec.rb b/spec/graphql/mutations/customer_relations/organizations/update_spec.rb
index f5aa6c00301..0bc6f184fe3 100644
--- a/spec/graphql/mutations/customer_relations/organizations/update_spec.rb
+++ b/spec/graphql/mutations/customer_relations/organizations/update_spec.rb
@@ -4,10 +4,12 @@ require 'spec_helper'
RSpec.describe Mutations::CustomerRelations::Organizations::Update do
let_it_be(:user) { create(:user) }
- let_it_be(:name) { 'GitLab' }
- let_it_be(:default_rate) { 1000.to_f }
- let_it_be(:description) { 'VIP' }
+ let_it_be(:group) { create(:group) }
+ let(:name) { 'GitLab' }
+ let(:default_rate) { 1000.to_f }
+ let(:description) { 'VIP' }
+ let(:does_not_exist_or_no_permission) { "The resource that you are attempting to access does not exist or you don't have permission to perform this action" }
let(:organization) { create(:organization, group: group) }
let(:attributes) do
{
@@ -26,32 +28,28 @@ RSpec.describe Mutations::CustomerRelations::Organizations::Update do
end
context 'when the user does not have permission to update an organization' do
- let_it_be(:group) { create(:group) }
-
before do
- group.add_guest(user)
+ group.add_reporter(user)
end
it 'raises an error' do
expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message(does_not_exist_or_no_permission)
end
end
context 'when the organization does not exist' do
- let_it_be(:group) { create(:group) }
-
it 'raises an error' do
- attributes[:id] = 'gid://gitlab/CustomerRelations::Organization/999'
+ attributes[:id] = "gid://gitlab/CustomerRelations::Organization/#{non_existing_record_id}"
expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message(does_not_exist_or_no_permission)
end
end
context 'when the user has permission to update an organization' do
- let_it_be(:group) { create(:group) }
-
before_all do
- group.add_reporter(user)
+ group.add_developer(user)
end
it 'updates the organization with correct values' do
@@ -65,6 +63,7 @@ RSpec.describe Mutations::CustomerRelations::Organizations::Update do
it 'raises an error' do
expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message('Feature disabled')
end
end
end
diff --git a/spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb b/spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
new file mode 100644
index 00000000000..35d3224d5ba
--- /dev/null
+++ b/spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::DependencyProxy::GroupSettings::Update do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:group_settings) { create(:dependency_proxy_group_setting, group: group) }
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { { group_path: group.full_path, enabled: false } }
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_dependency_proxy) }
+
+ describe '#resolve' do
+ subject { described_class.new(object: group, context: { current_user: user }, field: nil).resolve(**params) }
+
+ shared_examples 'updating the dependency proxy group settings' do
+ it_behaves_like 'updating the dependency proxy group settings attributes',
+ from: { enabled: true },
+ to: { enabled: false }
+
+ it 'returns the dependency proxy settings no errors' do
+ expect(subject).to eq(
+ dependency_proxy_setting: group_settings,
+ errors: []
+ )
+ end
+ end
+
+ shared_examples 'denying access to dependency proxy group settings' do
+ it 'raises Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'updating the dependency proxy group settings'
+ :developer | 'updating the dependency proxy group settings'
+ :reporter | 'denying access to dependency proxy group settings'
+ :guest | 'denying access to dependency proxy group settings'
+ :anonymous | 'denying access to dependency proxy group settings'
+ end
+
+ with_them do
+ before do
+ stub_config(dependency_proxy: { enabled: true })
+ group.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+end
diff --git a/spec/graphql/mutations/groups/update_spec.rb b/spec/graphql/mutations/groups/update_spec.rb
index 2118134e8e6..620c9d6ee91 100644
--- a/spec/graphql/mutations/groups/update_spec.rb
+++ b/spec/graphql/mutations/groups/update_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Mutations::Groups::Update do
RSpec.shared_examples 'updating the group shared runners setting' do
it 'updates the group shared runners setting' do
expect { subject }
- .to change { group.reload.shared_runners_setting }.from('enabled').to('disabled_and_unoverridable')
+ .to change { group.reload.shared_runners_setting }.from('enabled').to(Namespace::SR_DISABLED_AND_UNOVERRIDABLE)
end
it 'returns no errors' do
@@ -51,7 +51,7 @@ RSpec.describe Mutations::Groups::Update do
context 'changing shared runners setting' do
let_it_be(:params) do
{ full_path: group.full_path,
- shared_runners_setting: 'disabled_and_unoverridable' }
+ shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE }
end
where(:user_role, :shared_examples_name) do
diff --git a/spec/graphql/mutations/issues/create_spec.rb b/spec/graphql/mutations/issues/create_spec.rb
index 0e7ef0e55b9..825d04ff827 100644
--- a/spec/graphql/mutations/issues/create_spec.rb
+++ b/spec/graphql/mutations/issues/create_spec.rb
@@ -53,7 +53,11 @@ RSpec.describe Mutations::Issues::Create do
stub_spam_services
end
- subject { mutation.resolve(**mutation_params) }
+ def resolve
+ mutation.resolve(**mutation_params)
+ end
+
+ subject { resolve }
context 'when the user does not have permission to create an issue' do
it 'raises an error' do
@@ -61,6 +65,15 @@ RSpec.describe Mutations::Issues::Create do
end
end
+ context 'when the user has exceeded the rate limit' do
+ it 'raises an error' do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+ project.add_developer(user)
+
+ expect { resolve }.to raise_error(RateLimitedService::RateLimitedError, _('This endpoint has been requested too many times. Try again later.'))
+ end
+ end
+
context 'when the user can create an issue' do
context 'when creating an issue a developer' do
before do
diff --git a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
index 26040f4ec1a..53d2c8a853c 100644
--- a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
@@ -31,12 +31,11 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
- it 'returns issues in the correct order with non-nil relative positions', :aggregate_failures do
+ it 'returns the issues in the correct order' do
# by relative_position and then ID
result = resolve_board_list_issues
- expect(result.map(&:id)).to eq [issue3.id, issue1.id, issue2.id, issue4.id]
- expect(result.map(&:relative_position)).not_to include(nil)
+ expect(result.map(&:id)).to eq [issue1.id, issue3.id, issue2.id, issue4.id]
end
it 'finds only issues matching filters' do
@@ -57,6 +56,13 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
expect(result).to match_array([issue1])
end
+ it 'filters issues by negated issue type' do
+ incident = create(:incident, project: project, labels: [label], relative_position: 15)
+ result = resolve_board_list_issues(args: { filters: { not: { types: ['issue'] } } })
+
+ expect(result).to contain_exactly(incident)
+ end
+
it 'raises an exception if both assignee_username and assignee_wildcard_id are present' do
expect do
resolve_board_list_issues(args: { filters: { assignee_username: ['username'], assignee_wildcard_id: 'NONE' } })
@@ -112,6 +118,6 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
end
def resolve_board_list_issues(args: {}, current_user: user)
- resolve(described_class, obj: list, args: args, ctx: { current_user: current_user }).items
+ resolve(described_class, obj: list, args: args, ctx: { current_user: current_user })
end
end
diff --git a/spec/graphql/resolvers/board_list_resolver_spec.rb b/spec/graphql/resolvers/board_list_resolver_spec.rb
new file mode 100644
index 00000000000..5cf9e4b14ea
--- /dev/null
+++ b/spec/graphql/resolvers/board_list_resolver_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::BoardListResolver do
+ include GraphqlHelpers
+ include Gitlab::Graphql::Laziness
+
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:unauth_user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:group_label) { create(:group_label, group: group, name: 'Development') }
+ let_it_be(:board) { create(:board, resource_parent: group) }
+ let_it_be(:label_list) { create(:list, board: board, label: group_label) }
+
+ describe '#resolve' do
+ subject { resolve_board_list(args: { id: global_id_of(label_list) }, current_user: current_user) }
+
+ context 'with unauthorized user' do
+ let(:current_user) { unauth_user }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when authorized' do
+ let(:current_user) { guest }
+
+ before do
+ group.add_guest(guest)
+ end
+
+ it { is_expected.to eq label_list }
+ end
+ end
+
+ def resolve_board_list(args: {}, current_user: user)
+ force(resolve(described_class, obj: nil, args: args, ctx: { current_user: current_user }))
+ end
+end
diff --git a/spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb b/spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb
new file mode 100644
index 00000000000..6b8b88928d8
--- /dev/null
+++ b/spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Clusters::AgentTokensResolver do
+ include GraphqlHelpers
+
+ it { expect(described_class.type).to eq(Types::Clusters::AgentTokenType) }
+ it { expect(described_class.null).to be_truthy }
+
+ describe '#resolve' do
+ let(:agent) { create(:cluster_agent) }
+ let(:user) { create(:user, maintainer_projects: [agent.project]) }
+ let(:ctx) { Hash(current_user: user) }
+
+ let!(:matching_token1) { create(:cluster_agent_token, agent: agent, last_used_at: 5.days.ago) }
+ let!(:matching_token2) { create(:cluster_agent_token, agent: agent, last_used_at: 2.days.ago) }
+ let!(:other_token) { create(:cluster_agent_token) }
+
+ subject { resolve(described_class, obj: agent, ctx: ctx) }
+
+ it 'returns tokens associated with the agent, ordered by last_used_at' do
+ expect(subject).to eq([matching_token2, matching_token1])
+ end
+
+ context 'user does not have permission' do
+ let(:user) { create(:user, developer_projects: [agent.project]) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/clusters/agents_resolver_spec.rb b/spec/graphql/resolvers/clusters/agents_resolver_spec.rb
new file mode 100644
index 00000000000..70f40748e1d
--- /dev/null
+++ b/spec/graphql/resolvers/clusters/agents_resolver_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Clusters::AgentsResolver do
+ include GraphqlHelpers
+
+ specify do
+ expect(described_class).to have_nullable_graphql_type(Types::Clusters::AgentType.connection_type)
+ end
+
+ specify do
+ expect(described_class.field_options).to include(extras: include(:lookahead))
+ end
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) }
+ let_it_be(:developer) { create(:user, developer_projects: [project]) }
+ let_it_be(:agents) { create_list(:cluster_agent, 2, project: project) }
+
+ let(:ctx) { { current_user: current_user } }
+
+ subject { resolve_agents }
+
+ context 'the current user has access to clusters' do
+ let(:current_user) { maintainer }
+
+ it 'finds all agents' do
+ expect(subject).to match_array(agents)
+ end
+ end
+
+ context 'the current user does not have access to clusters' do
+ let(:current_user) { developer }
+
+ it 'returns an empty result' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ def resolve_agents(args = {})
+ resolve(described_class, obj: project, ctx: ctx, lookahead: positive_lookahead, args: args)
+ end
+end
+
+RSpec.describe Resolvers::Clusters::AgentsResolver.single do
+ it { expect(described_class).to be < Resolvers::Clusters::AgentsResolver }
+
+ describe '.field_options' do
+ subject { described_class.field_options }
+
+ specify do
+ expect(subject).to include(
+ type: ::Types::Clusters::AgentType,
+ null: true,
+ extras: [:lookahead]
+ )
+ end
+ end
+
+ describe 'arguments' do
+ subject { described_class.arguments[argument] }
+
+ describe 'name' do
+ let(:argument) { 'name' }
+
+ it do
+ expect(subject).to be_present
+ expect(subject.type).to be_kind_of GraphQL::Schema::NonNull
+ expect(subject.type.unwrap).to eq GraphQL::Types::String
+ expect(subject.description).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index e992b2b04ae..9897e697009 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -26,7 +26,14 @@ RSpec.describe Resolvers::IssuesResolver do
expect(described_class).to have_nullable_graphql_type(Types::IssueType.connection_type)
end
+ shared_context 'filtering for confidential issues' do
+ let_it_be(:confidential_issue1) { create(:issue, project: project, confidential: true) }
+ let_it_be(:confidential_issue2) { create(:issue, project: other_project, confidential: true) }
+ end
+
context "with a project" do
+ let(:obj) { project }
+
before_all do
project.add_developer(current_user)
project.add_reporter(reporter)
@@ -222,6 +229,42 @@ RSpec.describe Resolvers::IssuesResolver do
end
end
+ context 'confidential issues' do
+ include_context 'filtering for confidential issues'
+
+ context "when user is allowed to view confidential issues" do
+ it 'returns all viewable issues by default' do
+ expect(resolve_issues).to contain_exactly(issue1, issue2, confidential_issue1)
+ end
+
+ it 'returns only the non-confidential issues for the project when filter is set to false' do
+ expect(resolve_issues({ confidential: false })).to contain_exactly(issue1, issue2)
+ end
+
+ it "returns only the confidential issues for the project when filter is set to true" do
+ expect(resolve_issues({ confidential: true })).to contain_exactly(confidential_issue1)
+ end
+ end
+
+ context "when user is not allowed to see confidential issues" do
+ before do
+ project.add_guest(current_user)
+ end
+
+ it 'returns all viewable issues by default' do
+ expect(resolve_issues).to contain_exactly(issue1, issue2)
+ end
+
+ it 'does not return the confidential issues when filter is set to false' do
+ expect(resolve_issues({ confidential: false })).to contain_exactly(issue1, issue2)
+ end
+
+ it 'does not return the confidential issues when filter is set to true' do
+ expect(resolve_issues({ confidential: true })).to be_empty
+ end
+ end
+ end
+
context 'when searching issues' do
it 'returns correct issues' do
expect(resolve_issues(search: 'foo')).to contain_exactly(issue2)
@@ -236,6 +279,36 @@ RSpec.describe Resolvers::IssuesResolver do
resolve_issues(search: 'foo')
end
+
+ context 'with anonymous user' do
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:public_issue) { create(:issue, project: public_project, title: 'Test issue') }
+
+ context 'with disable_anonymous_search enabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: true)
+ end
+
+ it 'returns an error' do
+ error_message = "User must be authenticated to include the `search` argument."
+
+ expect { resolve(described_class, obj: public_project, args: { search: 'test' }, ctx: { current_user: nil }) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, error_message)
+ end
+ end
+
+ context 'with disable_anonymous_search disabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: false)
+ end
+
+ it 'returns correct issues' do
+ expect(
+ resolve(described_class, obj: public_project, args: { search: 'test' }, ctx: { current_user: nil })
+ ).to contain_exactly(public_issue)
+ end
+ end
+ end
end
describe 'filters by negated params' do
@@ -260,6 +333,10 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(not: { assignee_id: [assignee.id] })).to contain_exactly(issue1)
end
+ it 'returns issues without the specified issue_type' do
+ expect(resolve_issues(not: { types: ['issue'] })).to contain_exactly(issue1)
+ end
+
context 'when filtering by negated author' do
let_it_be(:issue_by_reporter) { create(:issue, author: reporter, project: project, state: :opened) }
@@ -304,7 +381,7 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:relative_issue4) { create(:issue, project: project, relative_position: nil) }
it 'sorts issues ascending' do
- expect(resolve_issues(sort: :relative_position_asc).to_a).to eq [relative_issue3, relative_issue1, relative_issue4, relative_issue2]
+ expect(resolve_issues(sort: :relative_position_asc).to_a).to eq [relative_issue3, relative_issue1, relative_issue2, relative_issue4]
end
end
@@ -485,26 +562,72 @@ RSpec.describe Resolvers::IssuesResolver do
end
context "with a group" do
+ let(:obj) { group }
+
before do
group.add_developer(current_user)
end
describe '#resolve' do
it 'finds all group issues' do
- result = resolve(described_class, obj: group, ctx: { current_user: current_user })
+ expect(resolve_issues).to contain_exactly(issue1, issue2, issue3)
+ end
+
+ it 'returns issues without the specified issue_type' do
+ expect(resolve_issues({ not: { types: ['issue'] } })).to contain_exactly(issue1)
+ end
+
+ context "confidential issues" do
+ include_context 'filtering for confidential issues'
+
+ context "when user is allowed to view confidential issues" do
+ it 'returns all viewable issues by default' do
+ expect(resolve_issues).to contain_exactly(issue1, issue2, issue3, confidential_issue1, confidential_issue2)
+ end
+
+ context 'filtering for confidential issues' do
+ it 'returns only the non-confidential issues for the group when filter is set to false' do
+ expect(resolve_issues({ confidential: false })).to contain_exactly(issue1, issue2, issue3)
+ end
- expect(result).to contain_exactly(issue1, issue2, issue3)
+ it "returns only the confidential issues for the group when filter is set to true" do
+ expect(resolve_issues({ confidential: true })).to contain_exactly(confidential_issue1, confidential_issue2)
+ end
+ end
+ end
+
+ context "when user is not allowed to see confidential issues" do
+ before do
+ group.add_guest(current_user)
+ end
+
+ it 'returns all viewable issues by default' do
+ expect(resolve_issues).to contain_exactly(issue1, issue2, issue3)
+ end
+
+ context 'filtering for confidential issues' do
+ it 'does not return the confidential issues when filter is set to false' do
+ expect(resolve_issues({ confidential: false })).to contain_exactly(issue1, issue2, issue3)
+ end
+
+ it 'does not return the confidential issues when filter is set to true' do
+ expect(resolve_issues({ confidential: true })).to be_empty
+ end
+ end
+ end
end
end
end
context "when passing a non existent, batch loaded project" do
- let(:project) do
+ let!(:project) do
BatchLoader::GraphQL.for("non-existent-path").batch do |_fake_paths, loader, _|
loader.call("non-existent-path", nil)
end
end
+ let(:obj) { project }
+
it "returns nil without breaking" do
expect(resolve_issues(iids: ["don't", "break"])).to be_empty
end
@@ -525,6 +648,6 @@ RSpec.describe Resolvers::IssuesResolver do
end
def resolve_issues(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: project, args: args, ctx: context)
+ resolve(described_class, obj: obj, args: args, ctx: context)
end
end
diff --git a/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb b/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
new file mode 100644
index 00000000000..bdb1ced46ae
--- /dev/null
+++ b/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Kas::AgentConfigurationsResolver do
+ include GraphqlHelpers
+
+ it { expect(described_class.type).to eq(Types::Kas::AgentConfigurationType) }
+ it { expect(described_class.null).to be_truthy }
+ it { expect(described_class.field_options).to include(calls_gitaly: true) }
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project) }
+
+ let(:user) { create(:user, maintainer_projects: [project]) }
+ let(:ctx) { Hash(current_user: user) }
+
+ let(:agent1) { double }
+ let(:agent2) { double }
+ let(:kas_client) { instance_double(Gitlab::Kas::Client, list_agent_config_files: [agent1, agent2]) }
+
+ subject { resolve(described_class, obj: project, ctx: ctx) }
+
+ before do
+ allow(Gitlab::Kas::Client).to receive(:new).and_return(kas_client)
+ end
+
+ it 'returns agents configured for the project' do
+ expect(subject).to contain_exactly(agent1, agent2)
+ end
+
+ context 'an error is returned from the KAS client' do
+ before do
+ allow(kas_client).to receive(:list_agent_config_files).and_raise(GRPC::DeadlineExceeded)
+ end
+
+ it 'raises a graphql error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, 'GRPC::DeadlineExceeded')
+ end
+ end
+
+ context 'user does not have permission' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb b/spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb
new file mode 100644
index 00000000000..fe6509bcb3c
--- /dev/null
+++ b/spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Kas::AgentConnectionsResolver do
+ include GraphqlHelpers
+
+ it { expect(described_class.type).to eq(Types::Kas::AgentConnectionType) }
+ it { expect(described_class.null).to be_truthy }
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:agent1) { create(:cluster_agent, project: project) }
+ let_it_be(:agent2) { create(:cluster_agent, project: project) }
+
+ let(:user) { create(:user, maintainer_projects: [project]) }
+ let(:ctx) { Hash(current_user: user) }
+
+ let(:connection1) { double(agent_id: agent1.id) }
+ let(:connection2) { double(agent_id: agent1.id) }
+ let(:connection3) { double(agent_id: agent2.id) }
+ let(:connected_agents) { [connection1, connection2, connection3] }
+ let(:kas_client) { instance_double(Gitlab::Kas::Client, get_connected_agents: connected_agents) }
+
+ subject do
+ batch_sync do
+ resolve(described_class, obj: agent1, ctx: ctx)
+ end
+ end
+
+ before do
+ allow(Gitlab::Kas::Client).to receive(:new).and_return(kas_client)
+ end
+
+ it 'returns active connections for the agent' do
+ expect(subject).to contain_exactly(connection1, connection2)
+ end
+
+ it 'queries KAS once when multiple agents are requested' do
+ expect(kas_client).to receive(:get_connected_agents).once
+
+ response = batch_sync do
+ resolve(described_class, obj: agent1, ctx: ctx)
+ resolve(described_class, obj: agent2, ctx: ctx)
+ end
+
+ expect(response).to contain_exactly(connection3)
+ end
+
+ context 'an error is returned from the KAS client' do
+ before do
+ allow(kas_client).to receive(:get_connected_agents).and_raise(GRPC::DeadlineExceeded)
+ end
+
+ it 'raises a graphql error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, 'GRPC::DeadlineExceeded')
+ end
+ end
+
+ context 'user does not have permission' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/project_pipelines_resolver_spec.rb b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
index c7c00f54c0c..51a63e66b93 100644
--- a/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
@@ -11,15 +11,23 @@ RSpec.describe Resolvers::ProjectPipelinesResolver do
let(:current_user) { create(:user) }
- before do
- project.add_developer(current_user)
+ context 'when the user does have access' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'resolves only MRs for the passed merge request' do
+ expect(resolve_pipelines).to contain_exactly(pipeline)
+ end
end
- def resolve_pipelines
- resolve(described_class, obj: project, ctx: { current_user: current_user })
+ context 'when the user does not have access' do
+ it 'does not return pipeline data' do
+ expect(resolve_pipelines).to be_empty
+ end
end
- it 'resolves only MRs for the passed merge request' do
- expect(resolve_pipelines).to contain_exactly(pipeline)
+ def resolve_pipelines
+ resolve(described_class, obj: project, ctx: { current_user: current_user })
end
end
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 82efd618e38..31d07f701e8 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -154,6 +154,17 @@ RSpec.describe Types::BaseField do
end
end
+ describe '#resolve' do
+ context "late_extensions is given" do
+ it 'registers the late extensions after the regular extensions' do
+ extension_class = Class.new(GraphQL::Schema::Field::ConnectionExtension)
+ field = described_class.new(name: 'test', type: GraphQL::Types::String.connection_type, null: true, late_extensions: [extension_class])
+
+ expect(field.extensions.last.class).to be(extension_class)
+ end
+ end
+ end
+
describe '#description' do
context 'feature flag given' do
let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String, feature_flag: flag, null: false, description: 'Test description.') }
diff --git a/spec/graphql/types/board_list_type_spec.rb b/spec/graphql/types/board_list_type_spec.rb
index 7976936fc1f..d78d87c57bd 100644
--- a/spec/graphql/types/board_list_type_spec.rb
+++ b/spec/graphql/types/board_list_type_spec.rb
@@ -3,11 +3,36 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['BoardList'] do
+ include GraphqlHelpers
+ include Gitlab::Graphql::Laziness
+
specify { expect(described_class.graphql_name).to eq('BoardList') }
it 'has specific fields' do
- expected_fields = %w[id list_type position label issues_count issues]
+ expected_fields = %w[id title list_type position label issues_count issues]
expect(described_class).to include_graphql_fields(*expected_fields)
end
+
+ describe 'issues field' do
+ subject { described_class.fields['issues'] }
+
+ it 'has a correct extension' do
+ is_expected.to have_graphql_extension(Gitlab::Graphql::Board::IssuesConnectionExtension)
+ end
+ end
+
+ describe 'title' do
+ subject(:field) { described_class.fields['title'] }
+
+ it 'preloads the label association' do
+ a, b, c = create_list(:list, 3).map { _1.class.find(_1.id) }
+
+ baseline = ActiveRecord::QueryRecorder.new { force(resolve_field(field, a)) }
+
+ expect do
+ [resolve_field(field, b), resolve_field(field, c)].each { force _1 }
+ end.not_to exceed_query_limit(baseline)
+ end
+ end
end
diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb
index 9ba4252bcd5..8c849114cf6 100644
--- a/spec/graphql/types/ci/pipeline_type_spec.rb
+++ b/spec/graphql/types/ci/pipeline_type_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Types::Ci::PipelineType do
]
if Gitlab.ee?
- expected_fields += %w[security_report_summary security_report_findings code_quality_reports]
+ expected_fields += %w[security_report_summary security_report_findings code_quality_reports dast_profile]
end
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/runner_type_spec.rb b/spec/graphql/types/ci/runner_type_spec.rb
index cff4c459d79..cf8650a4a03 100644
--- a/spec/graphql/types/ci/runner_type_spec.rb
+++ b/spec/graphql/types/ci/runner_type_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe GitlabSchema.types['CiRunner'] do
expected_fields = %w[
id description contacted_at maximum_timeout access_level active status
version short_sha revision locked run_untagged ip_address runner_type tag_list
- project_count job_count
+ project_count job_count admin_url user_permissions
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/clusters/agent_token_type_spec.rb b/spec/graphql/types/clusters/agent_token_type_spec.rb
new file mode 100644
index 00000000000..c872d201fd9
--- /dev/null
+++ b/spec/graphql/types/clusters/agent_token_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ClusterAgentToken'] do
+ let(:fields) { %i[cluster_agent created_at created_by_user description id last_used_at name] }
+
+ it { expect(described_class.graphql_name).to eq('ClusterAgentToken') }
+
+ it { expect(described_class).to require_graphql_authorizations(:admin_cluster) }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/clusters/agent_type_spec.rb b/spec/graphql/types/clusters/agent_type_spec.rb
new file mode 100644
index 00000000000..4b4b601b230
--- /dev/null
+++ b/spec/graphql/types/clusters/agent_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ClusterAgent'] do
+ let(:fields) { %i[created_at created_by_user id name project updated_at tokens web_path connections] }
+
+ it { expect(described_class.graphql_name).to eq('ClusterAgent') }
+
+ it { expect(described_class).to require_graphql_authorizations(:admin_cluster) }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/container_expiration_policy_older_than_enum_spec.rb b/spec/graphql/types/container_expiration_policy_older_than_enum_spec.rb
index 72ab605f2e6..1989b87a28f 100644
--- a/spec/graphql/types/container_expiration_policy_older_than_enum_spec.rb
+++ b/spec/graphql/types/container_expiration_policy_older_than_enum_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerExpirationPolicyOlderThanEnum'] do
- let_it_be(:expected_values) { %w[SEVEN_DAYS FOURTEEN_DAYS THIRTY_DAYS NINETY_DAYS] }
+ let_it_be(:expected_values) { %w[SEVEN_DAYS FOURTEEN_DAYS THIRTY_DAYS SIXTY_DAYS NINETY_DAYS] }
it_behaves_like 'exposing container expiration policy option', :older_than
end
diff --git a/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb b/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
index 8723c212486..09746750adc 100644
--- a/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
+++ b/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe GitlabSchema.types['SentryDetailedError'] do
it 'exposes the expected fields' do
expected_fields = %i[
id
+ integrated
sentryId
title
type
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 559f347810b..c0a0fdf3b0b 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
confidential hidden discussion_locked upvotes downvotes merge_requests_count user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert severity current_user_todos moved moved_to
- create_note_email timelogs project_id]
+ create_note_email timelogs project_id customer_relations_contacts]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
diff --git a/spec/graphql/types/kas/agent_configuration_type_spec.rb b/spec/graphql/types/kas/agent_configuration_type_spec.rb
new file mode 100644
index 00000000000..e6cccfa56d2
--- /dev/null
+++ b/spec/graphql/types/kas/agent_configuration_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['AgentConfiguration'] do
+ let(:fields) { %i[agent_name] }
+
+ it { expect(described_class.graphql_name).to eq('AgentConfiguration') }
+ it { expect(described_class.description).to eq('Configuration details for an Agent') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/kas/agent_connection_type_spec.rb b/spec/graphql/types/kas/agent_connection_type_spec.rb
new file mode 100644
index 00000000000..0990d02af11
--- /dev/null
+++ b/spec/graphql/types/kas/agent_connection_type_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Kas::AgentConnectionType do
+ include GraphqlHelpers
+
+ let(:fields) { %i[connected_at connection_id metadata] }
+
+ it { expect(described_class.graphql_name).to eq('ConnectedAgent') }
+ it { expect(described_class.description).to eq('Connection details for an Agent') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+
+ describe '#connected_at' do
+ let(:connected_at) { double(Google::Protobuf::Timestamp, seconds: 123456, nanos: 654321) }
+ let(:object) { double(Gitlab::Agent::AgentTracker::ConnectedAgentInfo, connected_at: connected_at) }
+
+ it 'converts the seconds value to a timestamp' do
+ expect(resolve_field(:connected_at, object)).to eq(Time.at(connected_at.seconds))
+ end
+ end
+end
diff --git a/spec/graphql/types/kas/agent_metadata_type_spec.rb b/spec/graphql/types/kas/agent_metadata_type_spec.rb
new file mode 100644
index 00000000000..ebc12ebb72a
--- /dev/null
+++ b/spec/graphql/types/kas/agent_metadata_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Kas::AgentMetadataType do
+ include GraphqlHelpers
+
+ let(:fields) { %i[version commit pod_namespace pod_name] }
+
+ it { expect(described_class.graphql_name).to eq('AgentMetadata') }
+ it { expect(described_class.description).to eq('Information about a connected Agent') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/packages/nuget/metadatum_type_spec.rb b/spec/graphql/types/packages/nuget/metadatum_type_spec.rb
index e5baa7522e4..94a1dbaee43 100644
--- a/spec/graphql/types/packages/nuget/metadatum_type_spec.rb
+++ b/spec/graphql/types/packages/nuget/metadatum_type_spec.rb
@@ -10,4 +10,10 @@ RSpec.describe GitlabSchema.types['NugetMetadata'] do
expect(described_class).to include_graphql_fields(*expected_fields)
end
+
+ %w[projectUrl licenseUrl iconUrl].each do |optional_field|
+ it "#{optional_field} can be null" do
+ expect(described_class.fields[optional_field].type).to be_nullable
+ end
+ end
end
diff --git a/spec/graphql/types/packages/package_type_spec.rb b/spec/graphql/types/packages/package_type_spec.rb
index 07573044abb..3267c765dc7 100644
--- a/spec/graphql/types/packages/package_type_spec.rb
+++ b/spec/graphql/types/packages/package_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['Package'] do
created_at updated_at
project
tags pipelines metadata versions
- status
+ status can_destroy
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/permission_types/ci/runner_spec.rb b/spec/graphql/types/permission_types/ci/runner_spec.rb
new file mode 100644
index 00000000000..e5fbbb346e4
--- /dev/null
+++ b/spec/graphql/types/permission_types/ci/runner_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::PermissionTypes::Ci::Runner do
+ it do
+ expected_permissions = [
+ :read_runner, :update_runner, :delete_runner
+ ]
+
+ expected_permissions.each do |permission|
+ expect(described_class).to have_graphql_field(permission)
+ end
+ end
+end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index d825bd7ebd4..45a718683be 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe GitlabSchema.types['Project'] do
issue_status_counts terraform_states alert_management_integrations
container_repositories container_repositories_count
pipeline_analytics squash_read_only sast_ci_configuration
+ cluster_agent cluster_agents agent_configurations
ci_template timelogs
]
@@ -186,7 +187,7 @@ RSpec.describe GitlabSchema.types['Project'] do
expect(analyzer['enabled']).to eq(true)
end
- context "with guest user" do
+ context 'with guest user' do
before do
project.add_guest(user)
end
@@ -194,7 +195,7 @@ RSpec.describe GitlabSchema.types['Project'] do
context 'when project is private' do
let(:project) { create(:project, :private, :repository) }
- it "returns no configuration" do
+ it 'returns no configuration' do
secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
expect(secure_analyzers_prefix).to be_nil
end
@@ -214,7 +215,7 @@ RSpec.describe GitlabSchema.types['Project'] do
end
end
- context "with non-member user" do
+ context 'with non-member user', :sidekiq_inline do
before do
project.team.truncate
end
@@ -222,7 +223,7 @@ RSpec.describe GitlabSchema.types['Project'] do
context 'when project is private' do
let(:project) { create(:project, :private, :repository) }
- it "returns no configuration" do
+ it 'returns no configuration' do
secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
expect(secure_analyzers_prefix).to be_nil
end
@@ -240,7 +241,7 @@ RSpec.describe GitlabSchema.types['Project'] do
end
context 'when repository is accessible only by team members' do
- it "returns no configuration" do
+ it 'returns no configuration' do
project.project_feature.update!(
merge_requests_access_level: ProjectFeature::DISABLED,
builds_access_level: ProjectFeature::DISABLED,
@@ -458,4 +459,137 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::Ci::JobTokenScopeType) }
it { is_expected.to have_graphql_resolver(Resolvers::Ci::JobTokenScopeResolver) }
end
+
+ describe 'agent_configurations' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ agentConfigurations {
+ nodes {
+ agentName
+ }
+ }
+ }
+ }
+ )
+ end
+
+ let(:agent_name) { 'example-agent-name' }
+ let(:kas_client) { instance_double(Gitlab::Kas::Client, list_agent_config_files: [double(agent_name: agent_name)]) }
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ before do
+ project.add_maintainer(user)
+ allow(Gitlab::Kas::Client).to receive(:new).and_return(kas_client)
+ end
+
+ it 'returns configured agents' do
+ agents = subject.dig('data', 'project', 'agentConfigurations', 'nodes')
+
+ expect(agents.count).to eq(1)
+ expect(agents.first['agentName']).to eq(agent_name)
+ end
+ end
+
+ describe 'cluster_agents' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: project, name: 'agent-name') }
+ let_it_be(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ clusterAgents {
+ count
+ nodes {
+ id
+ name
+ createdAt
+ updatedAt
+
+ project {
+ id
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns associated cluster agents' do
+ agents = subject.dig('data', 'project', 'clusterAgents', 'nodes')
+
+ expect(agents.count).to be(1)
+ expect(agents.first['id']).to eq(cluster_agent.to_global_id.to_s)
+ expect(agents.first['name']).to eq('agent-name')
+ expect(agents.first['createdAt']).to be_present
+ expect(agents.first['updatedAt']).to be_present
+ expect(agents.first['project']['id']).to eq(project.to_global_id.to_s)
+ end
+
+ it 'returns count of cluster agents' do
+ count = subject.dig('data', 'project', 'clusterAgents', 'count')
+
+ expect(count).to be(project.cluster_agents.size)
+ end
+ end
+
+ describe 'cluster_agent' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: project, name: 'agent-name') }
+ let_it_be(:agent_token) { create(:cluster_agent_token, agent: cluster_agent) }
+ let_it_be(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ clusterAgent(name: "#{cluster_agent.name}") {
+ id
+
+ tokens {
+ count
+ nodes {
+ id
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns associated cluster agents' do
+ agent = subject.dig('data', 'project', 'clusterAgent')
+ tokens = agent.dig('tokens', 'nodes')
+
+ expect(agent['id']).to eq(cluster_agent.to_global_id.to_s)
+
+ expect(tokens.count).to be(1)
+ expect(tokens.first['id']).to eq(agent_token.to_global_id.to_s)
+ end
+
+ it 'returns count of agent tokens' do
+ agent = subject.dig('data', 'project', 'clusterAgent')
+ count = agent.dig('tokens', 'count')
+
+ expect(cluster_agent.agent_tokens.size).to be(count)
+ end
+ end
end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 6a43867f1fe..14ef03a64f9 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe GitlabSchema.types['Query'] do
runner
runners
timelogs
+ board_list
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
@@ -136,4 +137,14 @@ RSpec.describe GitlabSchema.types['Query'] do
is_expected.to have_graphql_resolver(Resolvers::TimelogResolver)
end
end
+
+ describe 'boardList field' do
+ subject { described_class.fields['boardList'] }
+
+ it 'finds a board list by its gid' do
+ is_expected.to have_graphql_arguments(:id, :issue_filters)
+ is_expected.to have_graphql_type(Types::BoardListType)
+ is_expected.to have_graphql_resolver(Resolvers::BoardListResolver)
+ end
+ end
end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index ef5f6931d02..3c2ac954fe5 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -158,26 +158,6 @@ RSpec.describe ApplicationSettingsHelper do
end
end
- describe '.show_documentation_base_url_field?' do
- subject { helper.show_documentation_base_url_field? }
-
- before do
- stub_feature_flags(help_page_documentation_redirect: feature_flag)
- end
-
- context 'when feature flag is enabled' do
- let(:feature_flag) { true }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when feature flag is disabled' do
- let(:feature_flag) { false }
-
- it { is_expected.to eq(false) }
- end
- end
-
describe '.valid_runner_registrars' do
subject { helper.valid_runner_registrars }
@@ -271,18 +251,6 @@ RSpec.describe ApplicationSettingsHelper do
expect(pending_user_count).to eq 1
end
end
-
- context 'when the new_user_signups_cap is not present' do
- let(:user_cap) { nil }
-
- it { is_expected.to eq 0 }
-
- it 'does not query users unnecessarily' do
- expect(User).not_to receive(:blocked_pending_approval)
-
- pending_user_count
- end
- end
end
describe '#sidekiq_job_limiter_modes_for_select' do
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 047a6ca0b7d..7190f2fcd4a 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe AvatarsHelper do
let_it_be(:user) { create(:user) }
- describe '#project_icon & #group_icon' do
+ describe '#project_icon, #group_icon, #topic_icon' do
shared_examples 'resource with a default avatar' do |source_type|
it 'returns a default avatar div' do
expect(public_send("#{source_type}_icon", *helper_args))
@@ -71,6 +71,18 @@ RSpec.describe AvatarsHelper do
let(:helper_args) { [resource] }
end
end
+
+ context 'when providing a topic' do
+ it_behaves_like 'resource with a default avatar', 'topic' do
+ let(:resource) { create(:topic, name: 'foo') }
+ let(:helper_args) { [resource] }
+ end
+
+ it_behaves_like 'resource with a custom avatar', 'topic' do
+ let(:resource) { create(:topic, avatar: File.open(uploaded_image_temp_path)) }
+ let(:helper_args) { [resource] }
+ end
+ end
end
describe '#avatar_icon_for' do
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index 0f15f8be0a9..49ea2ac8d3b 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe Ci::RunnersHelper do
data = group_shared_runners_settings_data(group)
expect(data[:update_path]).to eq("/api/v4/groups/#{group.id}")
- expect(data[:shared_runners_availability]).to eq('disabled_and_unoverridable')
+ expect(data[:shared_runners_availability]).to eq(Namespace::SR_DISABLED_AND_UNOVERRIDABLE)
expect(data[:parent_shared_runners_availability]).to eq('enabled')
end
end
@@ -137,16 +137,15 @@ RSpec.describe Ci::RunnersHelper do
using RSpec::Parameterized::TableSyntax
where(:shared_runners_setting, :is_disabled_and_unoverridable) do
- 'enabled' | "false"
- 'disabled_with_override' | "false"
- 'disabled_and_unoverridable' | "true"
+ :shared_runners_enabled | "false"
+ :disabled_with_override | "false"
+ :disabled_and_unoverridable | "true"
end
with_them do
it 'returns the override runner status for project with group' do
- group = create(:group)
- project = create(:project, group: group)
- allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
+ group = create(:group, shared_runners_setting)
+ project = create(:project, group: group, shared_runners_enabled: false)
data = helper.toggle_shared_runners_settings_data(project)
expect(data[:is_disabled_and_unoverridable]).to eq(is_disabled_and_unoverridable)
diff --git a/spec/helpers/container_expiration_policies_helper_spec.rb b/spec/helpers/container_expiration_policies_helper_spec.rb
index 7ad3804e3a9..acb6b017d2c 100644
--- a/spec/helpers/container_expiration_policies_helper_spec.rb
+++ b/spec/helpers/container_expiration_policies_helper_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe ContainerExpirationPoliciesHelper do
{ key: '7d', label: '7 days until tags are automatically removed' },
{ key: '14d', label: '14 days until tags are automatically removed' },
{ key: '30d', label: '30 days until tags are automatically removed' },
+ { key: '60d', label: '60 days until tags are automatically removed' },
{ key: '90d', label: '90 days until tags are automatically removed', default: true }
]
diff --git a/spec/helpers/feature_flags_helper_spec.rb b/spec/helpers/feature_flags_helper_spec.rb
index 9a080736595..228459277ca 100644
--- a/spec/helpers/feature_flags_helper_spec.rb
+++ b/spec/helpers/feature_flags_helper_spec.rb
@@ -3,10 +3,20 @@
require 'spec_helper'
RSpec.describe FeatureFlagsHelper do
+ include Devise::Test::ControllerHelpers
+
let_it_be(:project) { create(:project) }
let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
let_it_be(:user) { create(:user) }
+ before do
+ allow(helper).to receive(:can?).and_return(true)
+ allow(helper).to receive(:current_user).and_return(user)
+
+ self.instance_variable_set(:@project, project)
+ self.instance_variable_set(:@feature_flag, feature_flag)
+ end
+
describe '#unleash_api_url' do
subject { helper.unleash_api_url(project) }
@@ -18,4 +28,17 @@ RSpec.describe FeatureFlagsHelper do
it { is_expected.not_to be_empty }
end
+
+ describe '#edit_feature_flag_data' do
+ subject { helper.edit_feature_flag_data }
+
+ it 'contains all the data needed to edit feature flags' do
+ is_expected.to include(endpoint: "/#{project.full_path}/-/feature_flags/#{feature_flag.iid}",
+ project_id: project.id,
+ feature_flags_path: "/#{project.full_path}/-/feature_flags",
+ environments_endpoint: "/#{project.full_path}/-/environments/search.json",
+ strategy_type_docs_page_path: "/help/operations/feature_flags#feature-flag-strategies",
+ environments_scope_docs_path: "/help/ci/environments/index.md#scope-environments-with-specs")
+ end
+ end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 825d5236b5d..4d647696130 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe GroupsHelper do
let(:possible_help_texts) do
{
- default_help: "This setting will be applied to all subgroups unless overridden by a group owner",
+ default_help: "Applied to all subgroups unless overridden by a group owner.",
ancestor_locked_but_you_can_override: %r{This setting is applied on <a .+>.+</a>\. You can override the setting or .+},
ancestor_locked_so_ask_the_owner: /This setting is applied on .+\. To share projects in this group with another group, ask the owner to override the setting or remove the share with group lock from .+/,
ancestor_locked_and_has_been_overridden: /This setting is applied on .+ and has been overridden on this subgroup/
diff --git a/spec/helpers/hooks_helper_spec.rb b/spec/helpers/hooks_helper_spec.rb
index 92e082c4974..3b23d705790 100644
--- a/spec/helpers/hooks_helper_spec.rb
+++ b/spec/helpers/hooks_helper_spec.rb
@@ -6,9 +6,10 @@ RSpec.describe HooksHelper do
let(:project) { create(:project) }
let(:project_hook) { create(:project_hook, project: project) }
let(:system_hook) { create(:system_hook) }
- let(:trigger) { 'push_events' }
describe '#link_to_test_hook' do
+ let(:trigger) { 'push_events' }
+
it 'returns project namespaced link' do
expect(helper.link_to_test_hook(project_hook, trigger))
.to include("href=\"#{test_project_hook_path(project, project_hook, trigger: trigger)}\"")
@@ -19,4 +20,24 @@ RSpec.describe HooksHelper do
.to include("href=\"#{test_admin_hook_path(system_hook, trigger: trigger)}\"")
end
end
+
+ describe '#hook_log_path' do
+ context 'with a project hook' do
+ let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook) }
+
+ it 'returns project-namespaced link' do
+ expect(helper.hook_log_path(project_hook, web_hook_log))
+ .to eq(web_hook_log.present.details_path)
+ end
+ end
+
+ context 'with a system hook' do
+ let(:web_hook_log) { create(:web_hook_log, web_hook: system_hook) }
+
+ it 'returns admin-namespaced link' do
+ expect(helper.hook_log_path(system_hook, web_hook_log))
+ .to eq(admin_hook_hook_log_path(system_hook, web_hook_log))
+ end
+ end
+ end
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 3eb3c73cfcc..30049745433 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -133,13 +133,13 @@ RSpec.describe IssuablesHelper do
it 'returns navigation with badges' do
expect(helper.issuables_state_counter_text(:issues, :opened, true))
- .to eq('<span>Open</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
+ .to eq('<span>Open</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm gl-display-none gl-sm-display-inline-flex">42</span>')
expect(helper.issuables_state_counter_text(:issues, :closed, true))
- .to eq('<span>Closed</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
+ .to eq('<span>Closed</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm gl-display-none gl-sm-display-inline-flex">42</span>')
expect(helper.issuables_state_counter_text(:merge_requests, :merged, true))
- .to eq('<span>Merged</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
+ .to eq('<span>Merged</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm gl-display-none gl-sm-display-inline-flex">42</span>')
expect(helper.issuables_state_counter_text(:merge_requests, :all, true))
- .to eq('<span>All</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
+ .to eq('<span>All</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm gl-display-none gl-sm-display-inline-flex">42</span>')
end
end
@@ -176,7 +176,7 @@ RSpec.describe IssuablesHelper do
it 'returns complete count' do
expect(helper.issuables_state_counter_text(:issues, :opened, true))
- .to eq('<span>Open</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">1,100</span>')
+ .to eq('<span>Open</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm gl-display-none gl-sm-display-inline-flex">1,100</span>')
end
end
@@ -187,7 +187,7 @@ RSpec.describe IssuablesHelper do
it 'returns truncated count' do
expect(helper.issuables_state_counter_text(:issues, :opened, true))
- .to eq('<span>Open</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">1.1k</span>')
+ .to eq('<span>Open</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm gl-display-none gl-sm-display-inline-flex">1.1k</span>')
end
end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index f5f26d306fb..850051c7875 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -354,6 +354,7 @@ RSpec.describe IssuesHelper do
let(:group) { create(:group) }
let(:current_user) { double.as_null_object }
let(:issues) { [] }
+ let(:projects) { [] }
it 'returns expected result' do
allow(helper).to receive(:current_user).and_return(current_user)
@@ -367,13 +368,14 @@ RSpec.describe IssuesHelper do
empty_state_svg_path: '#',
full_path: group.full_path,
has_any_issues: issues.to_a.any?.to_s,
+ has_any_projects: any_projects?(projects).to_s,
is_signed_in: current_user.present?.to_s,
jira_integration_path: help_page_url('integration/jira/issues', anchor: 'view-jira-issues'),
rss_path: '#',
sign_in_path: new_user_session_path
}
- expect(helper.group_issues_list_data(group, current_user, issues)).to include(expected)
+ expect(helper.group_issues_list_data(group, current_user, issues, projects)).to include(expected)
end
end
diff --git a/spec/helpers/one_trust_helper_spec.rb b/spec/helpers/one_trust_helper_spec.rb
new file mode 100644
index 00000000000..85c38885304
--- /dev/null
+++ b/spec/helpers/one_trust_helper_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe OneTrustHelper do
+ describe '#one_trust_enabled?' do
+ let(:user) { nil }
+
+ before do
+ stub_config(extra: { one_trust_id: SecureRandom.uuid })
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ subject(:one_trust_enabled?) { helper.one_trust_enabled? }
+
+ context 'with ecomm_instrumentation feature flag disabled' do
+ before do
+ stub_feature_flags(ecomm_instrumentation: false)
+ end
+
+ context 'when id is set and no user is set' do
+ let(:user) { instance_double('User') }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'with ecomm_instrumentation feature flag enabled' do
+ context 'when current user is set' do
+ let(:user) { instance_double('User') }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when no id is set' do
+ before do
+ stub_config(extra: {})
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when id is set and no user is set' do
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+end
diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb
index 06c6cccd488..2af572850da 100644
--- a/spec/helpers/packages_helper_spec.rb
+++ b/spec/helpers/packages_helper_spec.rb
@@ -260,4 +260,34 @@ RSpec.describe PackagesHelper do
end
end
end
+
+ describe '#packages_list_data' do
+ let_it_be(:resource) { project }
+ let_it_be(:type) { 'project' }
+
+ let(:expected_result) do
+ {
+ resource_id: resource.id,
+ full_path: resource.full_path,
+ page_type: type
+ }
+ end
+
+ subject(:result) { helper.packages_list_data(type, resource) }
+
+ context 'at a project level' do
+ it 'populates presenter data' do
+ expect(result).to match(hash_including(expected_result))
+ end
+ end
+
+ context 'at a group level' do
+ let_it_be(:resource) { create(:group) }
+ let_it_be(:type) { 'group' }
+
+ it 'populates presenter data' do
+ expect(result).to match(hash_including(expected_result))
+ end
+ end
+ end
end
diff --git a/spec/helpers/projects/cluster_agents_helper_spec.rb b/spec/helpers/projects/cluster_agents_helper_spec.rb
new file mode 100644
index 00000000000..2935a74586b
--- /dev/null
+++ b/spec/helpers/projects/cluster_agents_helper_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ClusterAgentsHelper do
+ describe '#js_cluster_agent_details_data' do
+ let_it_be(:project) { create(:project) }
+
+ let(:agent_name) { 'agent-name' }
+
+ subject { helper.js_cluster_agent_details_data(agent_name, project) }
+
+ it 'returns name' do
+ expect(subject[:agent_name]).to eq(agent_name)
+ end
+
+ it 'returns project path' do
+ expect(subject[:project_path]).to eq(project.full_path)
+ end
+ end
+end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 85b572d3f68..1100f4a3ad5 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -314,13 +314,13 @@ RSpec.describe ProjectsHelper do
end
it 'returns image tag for member avatar' do
- expect(helper).to receive(:image_tag).with(expected, { width: 16, class: %w[avatar avatar-inline s16], alt: "", "data-src" => anything })
+ expect(helper).to receive(:image_tag).with(expected, { width: 16, class: %w[avatar avatar-inline s16], alt: "" })
helper.link_to_member_avatar(user)
end
it 'returns image tag with avatar class' do
- expect(helper).to receive(:image_tag).with(expected, { width: 16, class: %w[avatar avatar-inline s16 any-avatar-class], alt: "", "data-src" => anything })
+ expect(helper).to receive(:image_tag).with(expected, { width: 16, class: %w[avatar avatar-inline s16 any-avatar-class], alt: "" })
helper.link_to_member_avatar(user, avatar_class: "any-avatar-class")
end
@@ -904,6 +904,14 @@ RSpec.describe ProjectsHelper do
it { is_expected.to be_falsey }
end
+
+ context 'the :show_terraform_banner feature flag is disabled' do
+ before do
+ stub_feature_flags(show_terraform_banner: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
end
end
diff --git a/spec/helpers/routing/pseudonymization_helper_spec.rb b/spec/helpers/routing/pseudonymization_helper_spec.rb
index 10563502555..a28a86d1f53 100644
--- a/spec/helpers/routing/pseudonymization_helper_spec.rb
+++ b/spec/helpers/routing/pseudonymization_helper_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:subproject) { create(:project, group: subgroup) }
let_it_be(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
@@ -56,16 +57,16 @@ RSpec.describe ::Routing::PseudonymizationHelper do
end
context 'with controller for groups with subgroups and project' do
- let(:masked_url) { "http://test.host/namespace:#{subgroup.id}/project:#{project.id}"}
+ let(:masked_url) { "http://test.host/namespace:#{subgroup.id}/project:#{subproject.id}"}
before do
allow(helper).to receive(:group).and_return(subgroup)
- allow(helper.project).to receive(:namespace).and_return(subgroup)
+ allow(helper).to receive(:project).and_return(subproject)
allow(Rails.application.routes).to receive(:recognize_path).and_return({
controller: 'projects',
action: 'show',
namespace_id: subgroup.name,
- id: project.name
+ id: subproject.name
})
end
@@ -129,6 +130,29 @@ RSpec.describe ::Routing::PseudonymizationHelper do
end
end
+ describe 'when it raises exception' do
+ context 'calls error tracking' do
+ before do
+ controller.request.path = '/dashboard/issues'
+ controller.request.query_string = 'assignee_username=root'
+ allow(Rails.application.routes).to receive(:recognize_path).and_return({
+ controller: 'dashboard',
+ action: 'issues'
+ })
+ end
+
+ it 'sends error to sentry and returns nil' do
+ allow(helper).to receive(:mask_params).with(anything).and_raise(ActionController::RoutingError, 'Some routing error')
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ ActionController::RoutingError,
+ url: '/dashboard/issues?assignee_username=root').and_call_original
+
+ expect(helper.masked_page_url).to be_nil
+ end
+ end
+ end
+
describe 'when feature flag is disabled' do
before do
stub_feature_flags(mask_page_urls: false)
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 7b2334ab79e..9e870658870 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -248,13 +248,13 @@ RSpec.describe SearchHelper do
it 'uses the correct singular label' do
collection = Kaminari.paginate_array([:foo]).page(1).per(10)
- expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 #{label} for<span>&nbsp;<code>foo</code>&nbsp;</span>")
+ expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 #{label} for <span>&nbsp;<code>foo</code>&nbsp;</span>")
end
it 'uses the correct plural label' do
collection = Kaminari.paginate_array([:foo] * 23).page(1).per(10)
- expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 - 10 of 23 #{label.pluralize} for<span>&nbsp;<code>foo</code>&nbsp;</span>")
+ expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 - 10 of 23 #{label.pluralize} for <span>&nbsp;<code>foo</code>&nbsp;</span>")
end
end
diff --git a/spec/helpers/startupjs_helper_spec.rb b/spec/helpers/startupjs_helper_spec.rb
index 6d61c38d4a5..8d429b59291 100644
--- a/spec/helpers/startupjs_helper_spec.rb
+++ b/spec/helpers/startupjs_helper_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe StartupjsHelper do
+ using RSpec::Parameterized::TableSyntax
+
describe '#page_startup_graphql_calls' do
let(:query_location) { 'repository/path_last_commit' }
let(:query_content) do
@@ -17,4 +19,24 @@ RSpec.describe StartupjsHelper do
expect(startup_graphql_calls).to include({ query: query_content, variables: { ref: 'foo' } })
end
end
+
+ describe '#page_startup_graphql_headers' do
+ where(:csrf_token, :feature_category, :expected) do
+ 'abc' | 'web_ide' | { 'X-CSRF-Token' => 'abc', 'x-gitlab-feature-category' => 'web_ide' }
+ '' | '' | { 'X-CSRF-Token' => '', 'x-gitlab-feature-category' => '' }
+ 'abc' | nil | { 'X-CSRF-Token' => 'abc', 'x-gitlab-feature-category' => '' }
+ 'something' | ' ' | { 'X-CSRF-Token' => 'something', 'x-gitlab-feature-category' => '' }
+ end
+
+ with_them do
+ before do
+ allow(helper).to receive(:form_authenticity_token).and_return(csrf_token)
+ ::Gitlab::ApplicationContext.push(feature_category: feature_category)
+ end
+
+ it 'returns hash of headers for GraphQL requests' do
+ expect(helper.page_startup_graphql_headers).to eq(expected)
+ end
+ end
+ end
end
diff --git a/spec/helpers/tab_helper_spec.rb b/spec/helpers/tab_helper_spec.rb
index bd8a8fa174a..346bfc7850c 100644
--- a/spec/helpers/tab_helper_spec.rb
+++ b/spec/helpers/tab_helper_spec.rb
@@ -5,6 +5,60 @@ require 'spec_helper'
RSpec.describe TabHelper do
include ApplicationHelper
+ describe 'gl_tabs_nav' do
+ it 'creates a tabs navigation' do
+ expect(gl_tabs_nav).to match(%r{<ul class=".*" role="tablist"><\/ul>})
+ end
+
+ it 'captures block output' do
+ expect(gl_tabs_nav { "block content" }).to match(/block content/)
+ end
+
+ it 'adds styles classes' do
+ expect(gl_tabs_nav).to match(/class="nav gl-tabs-nav"/)
+ end
+
+ it 'adds custom class' do
+ expect(gl_tabs_nav(class: 'my-class' )).to match(/class=".*my-class.*"/)
+ end
+ end
+
+ describe 'gl_tab_link_to' do
+ before do
+ allow(self).to receive(:current_page?).and_return(false)
+ end
+
+ it 'creates a tab' do
+ expect(gl_tab_link_to('Link', '/url')).to eq('<li class="nav-item" role="presentation"><a class="nav-link gl-tab-nav-item" href="/url">Link</a></li>')
+ end
+
+ it 'creates a tab with block output' do
+ expect(gl_tab_link_to('/url') { 'block content' }).to match(/block content/)
+ end
+
+ it 'creates a tab with custom classes' do
+ expect(gl_tab_link_to('Link', '/url', { class: 'my-class' })).to match(/<a class=".*my-class.*"/)
+ end
+
+ it 'creates an active tab with item_active = true' do
+ expect(gl_tab_link_to('Link', '/url', { item_active: true })).to match(/<a class=".*active gl-tab-nav-item-active gl-tab-nav-item-active-indigo.*"/)
+ end
+
+ context 'when on the active page' do
+ before do
+ allow(self).to receive(:current_page?).and_return(true)
+ end
+
+ it 'creates an active tab' do
+ expect(gl_tab_link_to('Link', '/url')).to match(/<a class=".*active gl-tab-nav-item-active gl-tab-nav-item-active-indigo.*"/)
+ end
+
+ it 'creates an inactive tab with item_active = false' do
+ expect(gl_tab_link_to('Link', '/url', { item_active: false })).not_to match(/<a class=".*active.*"/)
+ end
+ end
+ end
+
describe 'nav_link' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/time_zone_helper_spec.rb b/spec/helpers/time_zone_helper_spec.rb
index e6cb20b5800..43ad130c4b5 100644
--- a/spec/helpers/time_zone_helper_spec.rb
+++ b/spec/helpers/time_zone_helper_spec.rb
@@ -76,6 +76,18 @@ RSpec.describe TimeZoneHelper, :aggregate_failures do
travel_to Time.find_zone(timezone).local(2021, 7, 20, 15, 30, 45)
end
+ context 'when timezone is `nil`' do
+ it 'returns `nil`' do
+ expect(helper.local_time(nil)).to eq(nil)
+ end
+ end
+
+ context 'when timezone is blank' do
+ it 'returns `nil`' do
+ expect(helper.local_time('')).to eq(nil)
+ end
+ end
+
context 'when a valid timezone is passed' do
it 'returns local time' do
expect(helper.local_time(timezone)).to eq('3:30 PM')
diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb
index 794ff5ee945..f738ba855b8 100644
--- a/spec/helpers/user_callouts_helper_spec.rb
+++ b/spec/helpers/user_callouts_helper_spec.rb
@@ -293,4 +293,37 @@ RSpec.describe UserCalloutsHelper do
it { is_expected.to eq(false) }
end
end
+
+ describe '.show_security_newsletter_user_callout?' do
+ let_it_be(:admin) { create(:user, :admin) }
+
+ subject { helper.show_security_newsletter_user_callout? }
+
+ context 'when `current_user` is not an admin' do
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:user_dismissed?).with(described_class::SECURITY_NEWSLETTER_CALLOUT) { false }
+ end
+
+ it { is_expected.to be false }
+ end
+
+ context 'when user has dismissed callout' do
+ before do
+ allow(helper).to receive(:current_user).and_return(admin)
+ allow(helper).to receive(:user_dismissed?).with(described_class::SECURITY_NEWSLETTER_CALLOUT) { true }
+ end
+
+ it { is_expected.to be false }
+ end
+
+ context 'when `current_user` is an admin and user has not dismissed callout' do
+ before do
+ allow(helper).to receive(:current_user).and_return(admin)
+ allow(helper).to receive(:user_dismissed?).with(described_class::SECURITY_NEWSLETTER_CALLOUT) { false }
+ end
+
+ it { is_expected.to be true }
+ end
+ end
end
diff --git a/spec/initializers/carrierwave_patch_spec.rb b/spec/initializers/carrierwave_patch_spec.rb
index cbdad4aa9ac..e219db2299d 100644
--- a/spec/initializers/carrierwave_patch_spec.rb
+++ b/spec/initializers/carrierwave_patch_spec.rb
@@ -81,19 +81,32 @@ RSpec.describe 'CarrierWave::Storage::Fog::File' do
end
describe '#authenticated_url' do
+ let(:expire_at) { 24.hours.from_now }
+ let(:options) { { expire_at: expire_at } }
+
it 'has an authenticated URL' do
- expect(subject.authenticated_url).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
+ expect(subject.authenticated_url(options)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
end
context 'with custom expire_at' do
it 'properly sets expires param' do
- expire_at = 24.hours.from_now
+ expect_next_instance_of(Fog::Storage::AzureRM::File) do |file|
+ expect(file).to receive(:url).with(expire_at, options).and_call_original
+ end
+
+ expect(subject.authenticated_url(options)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
+ end
+ end
+
+ context 'with content_disposition option' do
+ let(:options) { { expire_at: expire_at, content_disposition: 'attachment' } }
+ it 'passes options' do
expect_next_instance_of(Fog::Storage::AzureRM::File) do |file|
- expect(file).to receive(:url).with(expire_at).and_call_original
+ expect(file).to receive(:url).with(expire_at, options).and_call_original
end
- expect(subject.authenticated_url(expire_at: expire_at)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
+ expect(subject.authenticated_url(options)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
end
end
end
diff --git a/spec/initializers/database_config_spec.rb b/spec/initializers/database_config_spec.rb
index 5ddfbd64c23..23f7fd06254 100644
--- a/spec/initializers/database_config_spec.rb
+++ b/spec/initializers/database_config_spec.rb
@@ -2,19 +2,11 @@
require 'spec_helper'
-RSpec.describe 'Database config initializer' do
+RSpec.describe 'Database config initializer', :reestablished_active_record_base do
subject do
load Rails.root.join('config/initializers/database_config.rb')
end
- around do |example|
- original_config = ActiveRecord::Base.connection_db_config
-
- example.run
-
- ActiveRecord::Base.establish_connection(original_config)
- end
-
before do
allow(Gitlab::Runtime).to receive(:max_threads).and_return(max_threads)
end
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index 4d2aa6e74de..9e58fa289ac 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -230,39 +230,21 @@ RSpec.describe 'lograge', type: :request do
end
end
- context 'when load balancing is enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
-
- context 'with db payload' do
- context 'when RequestStore is enabled', :request_store do
- it 'includes db counters for load balancing' do
- subscriber.process_action(event)
-
- expect(log_data).to include(*db_load_balancing_logging_keys)
- end
- end
-
- context 'when RequestStore is disabled' do
- it 'does not include db counters for load balancing' do
- subscriber.process_action(event)
+ context 'with db payload' do
+ context 'when RequestStore is enabled', :request_store do
+ it 'includes db counters for load balancing' do
+ subscriber.process_action(event)
- expect(log_data).not_to include(*db_load_balancing_logging_keys)
- end
+ expect(log_data).to include(*db_load_balancing_logging_keys)
end
end
- end
- context 'when load balancing is disabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
- end
+ context 'when RequestStore is disabled' do
+ it 'does not include db counters for load balancing' do
+ subscriber.process_action(event)
- it 'does not include db counters for load balancing' do
- subscriber.process_action(event)
-
- expect(log_data).not_to include(*db_load_balancing_logging_keys)
+ expect(log_data).not_to include(*db_load_balancing_logging_keys)
+ end
end
end
end
diff --git a/spec/initializers/zz_metrics_spec.rb b/spec/initializers/zz_metrics_spec.rb
deleted file mode 100644
index ad93c30ee22..00000000000
--- a/spec/initializers/zz_metrics_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'instrument_classes' do
- let(:config) { double(:config) }
-
- before do
- allow(config).to receive(:instrument_method)
- allow(config).to receive(:instrument_methods)
- allow(config).to receive(:instrument_instance_method)
- allow(config).to receive(:instrument_instance_methods)
- allow(Gitlab::Application).to receive(:configure)
- end
-
- it 'can autoload and instrument all files' do
- require_relative '../../config/initializers/zz_metrics'
- expect { instrument_classes(config) }.not_to raise_error
- end
-end
diff --git a/spec/lib/api/base_spec.rb b/spec/lib/api/base_spec.rb
new file mode 100644
index 00000000000..8513b800273
--- /dev/null
+++ b/spec/lib/api/base_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# rubocop:disable Rails/HttpPositionalArguments
+RSpec.describe ::API::Base do
+ let(:app_hello) do
+ route = double(:route, request_method: 'GET', path: '/:version/test/hello')
+ double(:endpoint, route: route, options: { for: api_handler, path: ["hello"] }, namespace: '/test')
+ end
+
+ let(:app_hi) do
+ route = double(:route, request_method: 'GET', path: '/:version//test/hi')
+ double(:endpoint, route: route, options: { for: api_handler, path: ["hi"] }, namespace: '/test')
+ end
+
+ describe 'declare feature categories at handler level for all routes' do
+ let(:api_handler) do
+ Class.new(described_class) do
+ feature_category :foo
+ urgency :medium
+
+ namespace '/test' do
+ get 'hello' do
+ end
+ post 'hi' do
+ end
+ end
+ end
+ end
+
+ it 'sets feature category for a particular route', :aggregate_failures do
+ expect(api_handler.feature_category_for_app(app_hello)).to eq(:foo)
+ expect(api_handler.feature_category_for_app(app_hi)).to eq(:foo)
+ end
+
+ it 'sets request urgency for a particular route', :aggregate_failures do
+ expect(api_handler.urgency_for_app(app_hello)).to be_request_urgency(:medium)
+ expect(api_handler.urgency_for_app(app_hi)).to be_request_urgency(:medium)
+ end
+ end
+
+ describe 'declare feature categories at route level' do
+ let(:api_handler) do
+ Class.new(described_class) do
+ namespace '/test' do
+ get 'hello', feature_category: :foo, urgency: :low do
+ end
+ post 'hi', feature_category: :bar, urgency: :medium do
+ end
+ end
+ end
+ end
+
+ it 'sets feature category for a particular route', :aggregate_failures do
+ expect(api_handler.feature_category_for_app(app_hello)).to eq(:foo)
+ expect(api_handler.feature_category_for_app(app_hi)).to eq(:bar)
+ end
+
+ it 'sets request urgency for a particular route', :aggregate_failures do
+ expect(api_handler.urgency_for_app(app_hello)).to be_request_urgency(:low)
+ expect(api_handler.urgency_for_app(app_hi)).to be_request_urgency(:medium)
+ end
+ end
+
+ describe 'declare feature categories at both handler level and route level' do
+ let(:api_handler) do
+ Class.new(described_class) do
+ feature_category :foo, ['/test/hello']
+ urgency :low, ['/test/hello']
+
+ namespace '/test' do
+ get 'hello' do
+ end
+ post 'hi', feature_category: :bar, urgency: :medium do
+ end
+ end
+ end
+ end
+
+ it 'sets feature category for a particular route', :aggregate_failures do
+ expect(api_handler.feature_category_for_app(app_hello)).to eq(:foo)
+ expect(api_handler.feature_category_for_app(app_hi)).to eq(:bar)
+ end
+
+ it 'sets target duration for a particular route', :aggregate_failures do
+ expect(api_handler.urgency_for_app(app_hello)).to be_request_urgency(:low)
+ expect(api_handler.urgency_for_app(app_hi)).to be_request_urgency(:medium)
+ end
+ end
+end
+# rubocop:enable Rails/HttpPositionalArguments
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index 99f2db544a5..cc871d66d40 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe API::Ci::Helpers::Runner do
it 'handles sticking of a build when a build ID is specified' do
allow(helper).to receive(:params).and_return(id: build.id)
- expect(Gitlab::Database::LoadBalancing::RackMiddleware)
- .to receive(:stick_or_unstick)
+ expect(ApplicationRecord.sticking)
+ .to receive(:stick_or_unstick_request)
.with({}, :build, build.id)
helper.current_job
@@ -25,8 +25,8 @@ RSpec.describe API::Ci::Helpers::Runner do
it 'does not handle sticking if no build ID was specified' do
allow(helper).to receive(:params).and_return({})
- expect(Gitlab::Database::LoadBalancing::RackMiddleware)
- .not_to receive(:stick_or_unstick)
+ expect(ApplicationRecord.sticking)
+ .not_to receive(:stick_or_unstick_request)
helper.current_job
end
@@ -44,8 +44,8 @@ RSpec.describe API::Ci::Helpers::Runner do
it 'handles sticking of a runner if a token is specified' do
allow(helper).to receive(:params).and_return(token: runner.token)
- expect(Gitlab::Database::LoadBalancing::RackMiddleware)
- .to receive(:stick_or_unstick)
+ expect(ApplicationRecord.sticking)
+ .to receive(:stick_or_unstick_request)
.with({}, :runner, runner.token)
helper.current_runner
@@ -54,8 +54,8 @@ RSpec.describe API::Ci::Helpers::Runner do
it 'does not handle sticking if no token was specified' do
allow(helper).to receive(:params).and_return({})
- expect(Gitlab::Database::LoadBalancing::RackMiddleware)
- .not_to receive(:stick_or_unstick)
+ expect(ApplicationRecord.sticking)
+ .not_to receive(:stick_or_unstick_request)
helper.current_runner
end
diff --git a/spec/lib/api/entities/clusters/agent_authorization_spec.rb b/spec/lib/api/entities/clusters/agent_authorization_spec.rb
index 101a8af4ac4..3a1deb43bf8 100644
--- a/spec/lib/api/entities/clusters/agent_authorization_spec.rb
+++ b/spec/lib/api/entities/clusters/agent_authorization_spec.rb
@@ -3,15 +3,34 @@
require 'spec_helper'
RSpec.describe API::Entities::Clusters::AgentAuthorization do
- let_it_be(:authorization) { create(:agent_group_authorization) }
-
subject { described_class.new(authorization).as_json }
- it 'includes basic fields' do
- expect(subject).to include(
- id: authorization.agent_id,
- config_project: a_hash_including(id: authorization.agent.project_id),
- configuration: authorization.config
- )
+ shared_examples 'generic authorization' do
+ it 'includes shared fields' do
+ expect(subject).to include(
+ id: authorization.agent_id,
+ config_project: a_hash_including(id: authorization.agent.project_id),
+ configuration: authorization.config
+ )
+ end
+ end
+
+ context 'project authorization' do
+ let(:authorization) { create(:agent_project_authorization) }
+
+ include_examples 'generic authorization'
+ end
+
+ context 'group authorization' do
+ let(:authorization) { create(:agent_group_authorization) }
+
+ include_examples 'generic authorization'
+ end
+
+ context 'implicit authorization' do
+ let(:agent) { create(:cluster_agent) }
+ let(:authorization) { Clusters::Agents::ImplicitAuthorization.new(agent: agent) }
+
+ include_examples 'generic authorization'
end
end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index 860f007f284..9c9a157d68a 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -3,10 +3,13 @@
require 'spec_helper'
RSpec.describe API::Entities::User do
- let(:user) { create(:user) }
+ let_it_be(:timezone) { 'America/Los_Angeles' }
+
+ let(:user) { create(:user, timezone: timezone) }
let(:current_user) { create(:user) }
+ let(:entity) { described_class.new(user, current_user: current_user) }
- subject { described_class.new(user, current_user: current_user).as_json }
+ subject { entity.as_json }
it 'exposes correct attributes' do
expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information, :pronouns)
@@ -35,4 +38,10 @@ RSpec.describe API::Entities::User do
expect(subject[:bot]).to eq(true)
end
end
+
+ it 'exposes local_time' do
+ local_time = '2:30 PM'
+ expect(entity).to receive(:local_time).with(timezone).and_return(local_time)
+ expect(subject[:local_time]).to eq(local_time)
+ end
end
diff --git a/spec/lib/api/every_api_endpoint_spec.rb b/spec/lib/api/every_api_endpoint_spec.rb
index ebf75e733d0..5fe14823a29 100644
--- a/spec/lib/api/every_api_endpoint_spec.rb
+++ b/spec/lib/api/every_api_endpoint_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
RSpec.describe 'Every API endpoint' do
context 'feature categories' do
let_it_be(:feature_categories) do
- YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).map(&:to_sym).to_set
+ Gitlab::FeatureCategories.default.categories.map(&:to_sym).to_set
end
let_it_be(:api_endpoints) do
- API::API.routes.map do |route|
+ Gitlab::RequestEndpoints.all_api_endpoints.map do |route|
[route.app.options[:for], API::Base.path_for_app(route.app)]
end
end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 587fe60860a..37e040a422b 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -32,15 +32,11 @@ RSpec.describe API::Helpers do
helper
end
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
-
it 'handles sticking when a user could be found' do
allow_any_instance_of(API::Helpers).to receive(:initial_current_user).and_return(user)
- expect(Gitlab::Database::LoadBalancing::RackMiddleware)
- .to receive(:stick_or_unstick).with(any_args, :user, 42)
+ expect(ApplicationRecord.sticking)
+ .to receive(:stick_or_unstick_request).with(any_args, :user, 42)
get 'user'
@@ -50,8 +46,8 @@ RSpec.describe API::Helpers do
it 'does not handle sticking if no user could be found' do
allow_any_instance_of(API::Helpers).to receive(:initial_current_user).and_return(nil)
- expect(Gitlab::Database::LoadBalancing::RackMiddleware)
- .not_to receive(:stick_or_unstick)
+ expect(ApplicationRecord.sticking)
+ .not_to receive(:stick_or_unstick_request)
get 'user'
diff --git a/spec/lib/api/validations/validators/project_portable_spec.rb b/spec/lib/api/validations/validators/project_portable_spec.rb
new file mode 100644
index 00000000000..8c1a49d5214
--- /dev/null
+++ b/spec/lib/api/validations/validators/project_portable_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Validations::Validators::ProjectPortable do
+ include ApiValidatorsHelpers
+
+ let(:portable) { 'labels' }
+ let(:not_portable) { 'project_members' }
+
+ subject do
+ described_class.new(['test'], {}, false, scope.new)
+ end
+
+ context 'valid portable' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => portable)
+ end
+ end
+
+ context 'empty params' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => nil)
+ expect_validation_error('test' => '')
+ end
+ end
+
+ context 'not portable' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => not_portable) # Sha length > 40
+ end
+ end
+end
diff --git a/spec/lib/atlassian/jira_connect/asymmetric_jwt_spec.rb b/spec/lib/atlassian/jira_connect/asymmetric_jwt_spec.rb
new file mode 100644
index 00000000000..c57d8ece86b
--- /dev/null
+++ b/spec/lib/atlassian/jira_connect/asymmetric_jwt_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Atlassian::JiraConnect::AsymmetricJwt do
+ describe '#valid?' do
+ subject(:asymmetric_jwt) { described_class.new(jwt, verification_claims) }
+
+ let(:verification_claims) { jwt_claims }
+ let(:jwt_claims) { { aud: aud, iss: client_key, qsh: qsh } }
+ let(:aud) { 'https://test.host/-/jira_connect' }
+ let(:client_key) { '1234' }
+ let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/events/installed', 'POST', 'https://gitlab.test') }
+ let(:public_key_id) { '123e4567-e89b-12d3-a456-426614174000' }
+ let(:jwt_headers) { { kid: public_key_id } }
+ let(:private_key) { OpenSSL::PKey::RSA.generate 2048 }
+ let(:jwt) { JWT.encode(jwt_claims, private_key, 'RS256', jwt_headers) }
+ let(:public_key) { private_key.public_key }
+
+ before do
+ stub_request(:get, "https://connect-install-keys.atlassian.com/#{public_key_id}").to_return(body: public_key.to_s, status: 200)
+ end
+
+ it 'returns true when verified with public key from CDN' do
+ expect(JWT).to receive(:decode).twice.and_call_original
+
+ expect(asymmetric_jwt).to be_valid
+
+ expect(WebMock).to have_requested(:get, "https://connect-install-keys.atlassian.com/#{public_key_id}")
+ end
+
+ context 'JWT does not contain a key ID' do
+ let(:public_key_id) { nil }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'JWT contains a key ID that is not a valid UUID4' do
+ let(:public_key_id) { '123' }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'public key can not be retrieved' do
+ before do
+ stub_request(:get, "https://connect-install-keys.atlassian.com/#{public_key_id}").to_return(body: '', status: 404)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'retrieving the public raises an error' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_raise(SocketError)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'token decoding raises an error' do
+ before do
+ allow(JWT).to receive(:decode).and_call_original
+ allow(JWT).to receive(:decode).with(
+ jwt, anything, true, aud: anything, verify_aud: true, iss: client_key, verify_iss: true, algorithm: 'RS256'
+ ).and_raise(JWT::DecodeError)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when iss could not be verified' do
+ let(:verification_claims) { { aud: jwt_claims[:aud], iss: 'some other iss', qsh: jwt_claims[:qsh] } }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when qsh could not be verified' do
+ let(:verification_claims) { { aud: jwt_claims[:aud], iss: client_key, qsh: 'some other qsh' } }
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ describe '#iss_claim' do
+ subject { asymmetric_jwt.iss_claim }
+
+ let(:asymmetric_jwt) { described_class.new('123', anything) }
+
+ it { is_expected.to eq(nil) }
+
+ context 'when jwt is verified' do
+ before do
+ asymmetric_jwt.instance_variable_set(:@claims, { 'iss' => 'client_key' })
+ end
+
+ it { is_expected.to eq('client_key') }
+ end
+ end
+end
diff --git a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
index 82bcbdc4561..8ccc3253a46 100644
--- a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
@@ -46,12 +46,22 @@ RSpec.describe Atlassian::JiraConnect::Serializers::DeploymentEntity do
using RSpec::Parameterized::TableSyntax
where(:env_name, :env_type) do
+ 'PRODUCTION' | 'production'
'prod' | 'production'
+ 'prod-east-2' | 'production'
+ 'us-prod-east' | 'production'
+ 'fe-production' | 'production'
'test' | 'testing'
+ 'qa-env-2' | 'testing'
'staging' | 'staging'
+ 'pre-prod' | 'staging'
+ 'blue-kit-stage' | 'staging'
+ 'pre-prod' | 'staging'
'dev' | 'development'
'review/app' | 'development'
'something-else' | 'unmapped'
+ 'store-produce' | 'unmapped'
+ 'unproductive' | 'unmapped'
end
with_them do
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 7797bd12f0e..2ccde517533 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -5,12 +5,20 @@ require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
let(:parallel) { nil }
let(:parallel_storage) { nil }
+
let(:progress) do
Tempfile.new('progress').tap do |progress|
progress.unlink
end
end
+ let(:expected_env) do
+ {
+ 'SSL_CERT_FILE' => OpenSSL::X509::DEFAULT_CERT_FILE,
+ 'SSL_CERT_DIR' => OpenSSL::X509::DEFAULT_CERT_DIR
+ }.merge(ENV)
+ end
+
after do
progress.close
end
@@ -32,7 +40,7 @@ RSpec.describe Backup::GitalyBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.owner)
- expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
subject.start(:create)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
@@ -53,7 +61,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
subject.start(:create)
subject.wait
@@ -64,7 +72,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel_storage) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:create)
subject.wait
@@ -90,6 +98,26 @@ RSpec.describe Backup::GitalyBackup do
it_behaves_like 'creates a repository backup'
end
+
+ context 'custom SSL envs set' do
+ let(:ssl_env) do
+ {
+ 'SSL_CERT_FILE' => '/some/cert/file',
+ 'SSL_CERT_DIR' => '/some/cert'
+ }
+ end
+
+ before do
+ stub_const('ENV', ssl_env)
+ end
+
+ it 'passes through SSL envs' do
+ expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original
+
+ subject.start(:create)
+ subject.wait
+ end
+ end
end
context 'restore' do
@@ -109,7 +137,7 @@ RSpec.describe Backup::GitalyBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
- expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
subject.start(:restore)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
@@ -132,7 +160,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
subject.start(:restore)
subject.wait
@@ -143,7 +171,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel_storage) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:restore)
subject.wait
diff --git a/spec/lib/banzai/cross_project_reference_spec.rb b/spec/lib/banzai/cross_project_reference_spec.rb
index 60ff15a88e0..e703bbc4927 100644
--- a/spec/lib/banzai/cross_project_reference_spec.rb
+++ b/spec/lib/banzai/cross_project_reference_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Banzai::CrossProjectReference do
let(:including_class) { Class.new.include(described_class).new }
- let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {})}
+ let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {}, {})}
before do
allow(including_class).to receive(:context).and_return({})
diff --git a/spec/lib/banzai/filter/front_matter_filter_spec.rb b/spec/lib/banzai/filter/front_matter_filter_spec.rb
index 3f966c94dd3..cef6a2ddcce 100644
--- a/spec/lib/banzai/filter/front_matter_filter_spec.rb
+++ b/spec/lib/banzai/filter/front_matter_filter_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include '---'
- expect(output).to include "```yaml\nfoo: :foo_symbol\n"
+ expect(output).to include "```yaml:frontmatter\nfoo: :foo_symbol\n"
end
end
@@ -59,7 +59,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include '+++'
- expect(output).to include "```toml\nfoo = :foo_symbol\n"
+ expect(output).to include "```toml:frontmatter\nfoo = :foo_symbol\n"
end
end
@@ -81,7 +81,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include ';;;'
- expect(output).to include "```json\n{\n \"foo\": \":foo_symbol\",\n"
+ expect(output).to include "```json:frontmatter\n{\n \"foo\": \":foo_symbol\",\n"
end
end
@@ -101,7 +101,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include '---arbitrary'
- expect(output).to include "```arbitrary\nfoo = :foo_symbol\n"
+ expect(output).to include "```arbitrary:frontmatter\nfoo = :foo_symbol\n"
end
end
@@ -130,7 +130,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).to eq <<~MD
- ```yaml
+ ```yaml:frontmatter
foo: :foo_symbol
bar: :bar_symbol
```
diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index cdf6110dd6c..c21a9339ebb 100644
--- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -437,6 +437,19 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do
expect(reference_filter(act, context).to_html).to eq exp
end
end
+
+ context 'when referencing both project and group milestones' do
+ let(:milestone) { create(:milestone, project: project) }
+ let(:group_milestone) { create(:milestone, title: 'group_milestone', group: group) }
+
+ it 'links to valid references' do
+ links = reference_filter("See #{milestone.to_reference(full: true)} and #{group_milestone.to_reference}", context).css('a')
+
+ expect(links.length).to eq(2)
+ expect(links[0].attr('href')).to eq(urls.milestone_url(milestone))
+ expect(links[1].attr('href')).to eq(urls.milestone_url(group_milestone))
+ end
+ end
end
context 'when milestone is open' do
diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb
index c9404c381d3..dcd153da16a 100644
--- a/spec/lib/banzai/filter/references/reference_cache_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb
@@ -12,15 +12,48 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do
let(:filter_class) { Banzai::Filter::References::IssueReferenceFilter }
let(:filter) { filter_class.new(doc, project: project) }
- let(:cache) { described_class.new(filter, { project: project }) }
+ let(:cache) { described_class.new(filter, { project: project }, result) }
+ let(:result) { {} }
describe '#load_references_per_parent' do
+ subject { cache.load_references_per_parent(filter.nodes) }
+
it 'loads references grouped per parent paths' do
- cache.load_references_per_parent(filter.nodes)
+ expect(doc).to receive(:to_html).and_call_original
+
+ subject
expect(cache.references_per_parent).to eq({ project.full_path => [issue1.iid, issue2.iid].to_set,
project2.full_path => [issue3.iid].to_set })
end
+
+ context 'when rendered_html is memoized' do
+ let(:result) { { rendered_html: 'html' } }
+
+ it 'reuses memoized rendered HTML when available' do
+ expect(doc).not_to receive(:to_html)
+
+ subject
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(reference_cache_memoization: false)
+ end
+
+ it 'ignores memoized rendered HTML' do
+ expect(doc).to receive(:to_html).and_call_original
+
+ subject
+ end
+ end
+ end
+
+ context 'when result is not available' do
+ let(:result) { nil }
+
+ it { expect { subject }.not_to raise_error }
+ end
end
describe '#load_parent_per_reference' do
@@ -47,7 +80,7 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do
it 'does not have an N+1 query problem with cross projects' do
doc_single = Nokogiri::HTML.fragment("#1")
filter_single = filter_class.new(doc_single, project: project)
- cache_single = described_class.new(filter_single, { project: project })
+ cache_single = described_class.new(filter_single, { project: project }, {})
control_count = ActiveRecord::QueryRecorder.new do
cache_single.load_references_per_parent(filter_single.nodes)
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 16e30604c99..7e45ecdd135 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -98,6 +98,14 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
end
end
+ context "when sourcepos metadata is available" do
+ it "includes it in the highlighted code block" do
+ result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>')
+
+ expect(result.to_html).to eq('<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
+ end
+ end
+
context "when Rouge lexing fails" do
before do
allow_next_instance_of(Rouge::Lexers::Ruby) do |instance|
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 72661003361..7a335fad3f8 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -34,15 +34,16 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
let(:identifier) { html[/.*fnref1-(\d+).*/, 1] }
let(:footnote_markdown) do
<<~EOF
- first[^1] and second[^second]
+ first[^1] and second[^second] and twenty[^twenty]
[^1]: one
[^second]: two
+ [^twenty]: twenty
EOF
end
let(:filtered_footnote) do
<<~EOF
- <p dir="auto">first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup></p>
+ <p dir="auto">first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn3-#{identifier}" id="fnref3-#{identifier}">3</a></sup></p>
<section class="footnotes"><ol>
<li id="fn1-#{identifier}">
@@ -51,6 +52,9 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
<li id="fn2-#{identifier}">
<p>two <a href="#fnref2-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
+ <li id="fn3-#{identifier}">
+ <p>twenty <a href="#fnref3-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ </li>
</ol></section>
EOF
end
diff --git a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
index c628d8d5b41..5021ef3a79a 100644
--- a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Banzai::Pipeline::PreProcessPipeline do
aggregate_failures do
expect(result[:output]).not_to include "\xEF\xBB\xBF"
expect(result[:output]).not_to include '---'
- expect(result[:output]).to include "```yaml\nfoo: :foo_symbol\n"
+ expect(result[:output]).to include "```yaml:frontmatter\nfoo: :foo_symbol\n"
expect(result[:output]).to include "> blockquote\n"
end
end
diff --git a/spec/lib/bulk_imports/clients/graphql_spec.rb b/spec/lib/bulk_imports/clients/graphql_spec.rb
index 2f212458c4a..a5b5e96e594 100644
--- a/spec/lib/bulk_imports/clients/graphql_spec.rb
+++ b/spec/lib/bulk_imports/clients/graphql_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe BulkImports::Clients::Graphql do
let(:version) { '13.0.0' }
it 'raises an error' do
- expect { subject.execute('test') }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.")
+ expect { subject.execute('test') }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MIN_MAJOR_VERSION}.")
end
end
end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index c36cb80851a..623f9aa453a 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe BulkImports::Clients::HTTP do
let(:url) { 'http://gitlab.example' }
let(:token) { 'token' }
let(:resource) { 'resource' }
- let(:version) { "#{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.0.0" }
+ let(:version) { "#{BulkImport::MIN_MAJOR_VERSION}.0.0" }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
let(:version_response) { double(code: 200, success?: true, parsed_response: { 'version' => version }) }
@@ -32,7 +32,7 @@ RSpec.describe BulkImports::Clients::HTTP do
it 'raises BulkImports::Error' do
allow(Gitlab::HTTP).to receive(method).and_raise(Errno::ECONNREFUSED)
- expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::Error)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
end
end
@@ -42,7 +42,7 @@ RSpec.describe BulkImports::Clients::HTTP do
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::Error)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
end
end
end
@@ -176,11 +176,37 @@ RSpec.describe BulkImports::Clients::HTTP do
end
end
+ describe '#instance_version' do
+ it 'returns version as an instance of Gitlab::VersionInfo' do
+ expect(subject.instance_version).to eq(Gitlab::VersionInfo.parse(version))
+ end
+ end
+
+ describe '#compatible_for_project_migration?' do
+ context 'when instance version is lower the the expected minimum' do
+ it 'returns false' do
+ expect(subject.compatible_for_project_migration?).to be false
+ end
+ end
+
+ context 'when instance version is at least the expected minimum' do
+ let(:version) { "14.4.4" }
+
+ it 'returns true' do
+ expect(subject.compatible_for_project_migration?).to be true
+ end
+ end
+ end
+
context 'when source instance is incompatible' do
let(:version) { '13.0.0' }
it 'raises an error' do
- expect { subject.get(resource) }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.")
+ expect { subject.get(resource) }
+ .to raise_error(
+ ::BulkImports::Error,
+ "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MIN_MAJOR_VERSION}."
+ )
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
new file mode 100644
index 00000000000..241bd694a2c
--- /dev/null
+++ b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let(:board_data) do
+ {
+ "name" => "Test Board",
+ "lists" => [
+ {
+ "list_type" => "backlog",
+ "position" => 0
+ },
+ {
+ "list_type" => "closed",
+ "position" => 1
+ },
+ {
+ "list_type" => "label",
+ "position" => 2,
+ "label" => {
+ "title" => "test",
+ "type" => "GroupLabel",
+ "group_id" => group.id
+ }
+ }
+ ]
+ }
+ end
+
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject { described_class.new(context) }
+
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: board_data))
+ end
+ group.add_owner(user)
+ end
+
+ context 'when issue board belongs to a project' do
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ source_type: :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path
+ )
+ end
+
+ describe '#run' do
+ it 'imports issue boards into destination project' do
+ expect { subject.run }.to change(::Board, :count).by(1)
+ board = project.boards.find_by(name: board_data["name"])
+ expect(board).to be
+ expect(board.project.id).to eq(project.id)
+ expect(board.lists.count).to eq(3)
+ expect(board.lists.map(&:list_type).sort).to match_array(%w(backlog closed label))
+ expect(board.lists.find_by(list_type: "label").label.title).to eq("test")
+ end
+ end
+ end
+
+ context 'when issue board belongs to a group' do
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ group: group,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path
+ )
+ end
+
+ describe '#run' do
+ it 'imports issue boards into destination group' do
+ expect { subject.run }.to change(::Board, :count).by(1)
+ board = group.boards.find_by(name: board_data["name"])
+ expect(board).to be
+ expect(board.group.id).to eq(group.id)
+ expect(board.lists.count).to eq(3)
+ expect(board.lists.map(&:list_type).sort).to match_array(%w(backlog closed label))
+ expect(board.lists.find_by(list_type: "label").label.title).to eq("test")
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
index 6344dae0fb7..9e3a6d5b8df 100644
--- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
+RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
diff --git a/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb
deleted file mode 100644
index 8b2f03ca15f..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::BoardsPipeline do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/boards.ndjson.gz' }
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- group: group,
- bulk_import: bulk_import,
- source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
- destination_namespace: group.full_path
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- let(:tmpdir) { Dir.mktmpdir }
-
- before do
- FileUtils.copy_file(filepath, File.join(tmpdir, 'boards.ndjson.gz'))
- group.add_owner(user)
- end
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'imports group boards into destination group and removes tmpdir' do
- allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
- allow_next_instance_of(BulkImports::FileDownloadService) do |service|
- allow(service).to receive(:execute)
- end
-
- expect { subject.run }.to change(Board, :count).by(1)
-
- lists = group.boards.find_by(name: 'first board').lists
-
- expect(lists.count).to eq(3)
- expect(lists.first.label.title).to eq('TSL')
- expect(lists.second.label.title).to eq('Sosync')
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 81c0ffc14d4..b322b7b0edf 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -3,36 +3,42 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Stage do
+ let(:bulk_import) { build(:bulk_import) }
+
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
[1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
- [1, BulkImports::Groups::Pipelines::LabelsPipeline],
+ [1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Groups::Pipelines::MilestonesPipeline],
[1, BulkImports::Groups::Pipelines::BadgesPipeline],
- [2, BulkImports::Groups::Pipelines::BoardsPipeline]
+ [2, BulkImports::Common::Pipelines::BoardsPipeline]
]
end
+ it 'raises error when initialized without a BulkImport' do
+ expect { described_class.new({}) }.to raise_error(ArgumentError, 'Expected an argument of type ::BulkImport')
+ end
+
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.pipelines & pipelines).to eq(pipelines)
- expect(described_class.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
+ expect(described_class.new(bulk_import).pipelines & pipelines).to eq(pipelines)
+ expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: true)
- expect(described_class.pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(bulk_import).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
end
context 'when bulk_import_projects feature flag is disabled' do
it 'does not include project entities pipeline' do
stub_feature_flags(bulk_import_projects: false)
- expect(described_class.pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline)
+ expect(described_class.new(bulk_import).pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline)
end
end
end
diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
index 57a258b0d9f..7d156c2c3df 100644
--- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
@@ -186,4 +186,20 @@ RSpec.describe BulkImports::NdjsonPipeline do
end
end
end
+
+ describe '#relation_factory' do
+ context 'when portable is group' do
+ it 'returns group relation factory' do
+ expect(subject.relation_factory).to eq(Gitlab::ImportExport::Group::RelationFactory)
+ end
+ end
+
+ context 'when portable is project' do
+ subject { NdjsonPipelineClass.new(project, user) }
+
+ it 'returns project relation factory' do
+ expect(subject.relation_factory).to eq(Gitlab::ImportExport::Project::RelationFactory)
+ end
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/network_error_spec.rb b/spec/lib/bulk_imports/network_error_spec.rb
new file mode 100644
index 00000000000..11f555fee09
--- /dev/null
+++ b/spec/lib/bulk_imports/network_error_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache do
+ let(:tracker) { double(id: 1, stage: 2, entity: double(id: 3)) }
+
+ describe '.new' do
+ it 'requires either a message or a HTTP response' do
+ expect { described_class.new }
+ .to raise_error(ArgumentError, 'message or response required')
+ end
+ end
+
+ describe '#retriable?' do
+ it 'returns true for MAX_RETRIABLE_COUNT times when cause if one of RETRIABLE_EXCEPTIONS' do
+ raise described_class::RETRIABLE_EXCEPTIONS.sample
+ rescue StandardError => cause
+ begin
+ raise described_class, cause
+ rescue StandardError => exception
+ described_class::MAX_RETRIABLE_COUNT.times do
+ expect(exception.retriable?(tracker)).to eq(true)
+ end
+
+ expect(exception.retriable?(tracker)).to eq(false)
+ end
+ end
+
+ it 'returns true for MAX_RETRIABLE_COUNT times when response is one of RETRIABLE_CODES' do
+ exception = described_class.new(response: double(code: 429))
+
+ described_class::MAX_RETRIABLE_COUNT.times do
+ expect(exception.retriable?(tracker)).to eq(true)
+ end
+
+ expect(exception.retriable?(tracker)).to eq(false)
+ end
+
+ it 'returns false for other exceptions' do
+ raise StandardError
+ rescue StandardError => cause
+ begin
+ raise described_class, cause
+ rescue StandardError => exception
+ expect(exception.retriable?(tracker)).to eq(false)
+ end
+ end
+ end
+
+ describe '#retry_delay' do
+ it 'returns the default value when there is not a rate limit error' do
+ exception = described_class.new('foo')
+
+ expect(exception.retry_delay).to eq(described_class::DEFAULT_RETRY_DELAY_SECONDS.seconds)
+ end
+
+ context 'when the exception is a rate limit error' do
+ it 'returns the "Retry-After"' do
+ exception = described_class.new(response: double(code: 429, headers: { 'Retry-After' => 20 }))
+
+ expect(exception.retry_delay).to eq(20.seconds)
+ end
+
+ it 'returns the default value when there is no "Retry-After" header' do
+ exception = described_class.new(response: double(code: 429, headers: {}))
+
+ expect(exception.retry_delay).to eq(described_class::DEFAULT_RETRY_DELAY_SECONDS.seconds)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb
new file mode 100644
index 00000000000..4dba81dc0d2
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Graphql::GetRepositoryQuery do
+ describe 'query repository based on full_path' do
+ let(:entity) { double(source_full_path: 'test', bulk_import: nil) }
+ let(:tracker) { double(entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ it 'returns project repository url' do
+ expect(described_class.to_s).to include('httpUrlToRepo')
+ end
+
+ it 'queries project based on source_full_path' do
+ expected = { full_path: entity.source_full_path }
+
+ expect(described_class.variables(context)).to eq(expected)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
new file mode 100644
index 00000000000..97fcddefd42
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:issue_attributes) { {} }
+ let(:issue) do
+ {
+ 'iid' => 7,
+ 'title' => 'Imported Issue',
+ 'description' => 'Description',
+ 'state' => 'opened',
+ 'updated_at' => '2016-06-14T15:02:47.967Z',
+ 'author_id' => 22
+ }.merge(issue_attributes)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ group.add_owner(user)
+ issue_with_index = [issue, 0]
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [issue_with_index]))
+ end
+
+ pipeline.run
+ end
+
+ it 'imports issue into destination project' do
+ expect(project.issues.count).to eq(1)
+
+ imported_issue = project.issues.last
+
+ aggregate_failures do
+ expect(imported_issue.iid).to eq(7)
+ expect(imported_issue.title).to eq(issue['title'])
+ expect(imported_issue.description).to eq(issue['description'])
+ expect(imported_issue.author).to eq(user)
+ expect(imported_issue.state).to eq('opened')
+ expect(imported_issue.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
+ end
+ end
+
+ context 'zoom meetings' do
+ let(:issue_attributes) { { 'zoom_meetings' => [{ 'url' => 'https://zoom.us/j/123456789' }] } }
+
+ it 'restores zoom meetings' do
+ expect(project.issues.last.zoom_meetings.first.url).to eq('https://zoom.us/j/123456789')
+ end
+ end
+
+ context 'sentry issue' do
+ let(:issue_attributes) { { 'sentry_issue' => { 'sentry_issue_identifier' => '1234567891' } } }
+
+ it 'restores sentry issue information' do
+ expect(project.issues.last.sentry_issue.sentry_issue_identifier).to eq(1234567891)
+ end
+ end
+
+ context 'award emoji' do
+ let(:issue_attributes) { { 'award_emoji' => [{ 'name' => 'musical_keyboard', 'user_id' => 22 }] } }
+
+ it 'has award emoji on an issue' do
+ award_emoji = project.issues.last.award_emoji.first
+
+ expect(award_emoji.name).to eq('musical_keyboard')
+ expect(award_emoji.user).to eq(user)
+ end
+ end
+ context 'issue state' do
+ let(:issue_attributes) { { 'state' => 'closed' } }
+
+ it 'restores issue state' do
+ expect(project.issues.last.state).to eq('closed')
+ end
+ end
+
+ context 'labels' do
+ let(:issue_attributes) do
+ {
+ 'label_links' => [
+ { 'label' => { 'title' => 'imported label 1', 'type' => 'ProjectLabel' } },
+ { 'label' => { 'title' => 'imported label 2', 'type' => 'ProjectLabel' } }
+ ]
+ }
+ end
+
+ it 'restores issue labels' do
+ expect(project.issues.last.labels.pluck(:title)).to contain_exactly('imported label 1', 'imported label 2')
+ end
+ end
+
+ context 'milestone' do
+ let(:issue_attributes) { { 'milestone' => { 'title' => 'imported milestone' } } }
+
+ it 'restores issue milestone' do
+ expect(project.issues.last.milestone.title).to eq('imported milestone')
+ end
+ end
+
+ context 'timelogs' do
+ let(:issue_attributes) { { 'timelogs' => [{ 'time_spent' => 72000, 'spent_at' => '2019-12-27T00:00:00.000Z', 'user_id' => 22 }] } }
+
+ it 'restores issue timelogs' do
+ timelog = project.issues.last.timelogs.first
+
+ aggregate_failures do
+ expect(timelog.time_spent).to eq(72000)
+ expect(timelog.spent_at).to eq("2019-12-27T00:00:00.000Z")
+ end
+ end
+ end
+
+ context 'notes' do
+ let(:issue_attributes) do
+ {
+ 'notes' => [
+ {
+ 'note' => 'Issue note',
+ 'author_id' => 22,
+ 'author' => {
+ 'name' => 'User 22'
+ },
+ 'updated_at' => '2016-06-14T15:02:47.770Z',
+ 'award_emoji' => [
+ {
+ 'name' => 'clapper',
+ 'user_id' => 22
+ }
+ ]
+ }
+ ]
+ }
+ end
+
+ it 'restores issue notes and their award emoji' do
+ note = project.issues.last.notes.first
+
+ aggregate_failures do
+ expect(note.note).to eq("Issue note\n\n *By User 22 on 2016-06-14T15:02:47 (imported from GitLab)*")
+ expect(note.award_emoji.first.name).to eq('clapper')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
new file mode 100644
index 00000000000..af39ec7a11c
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do
+ describe '#run' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent) { create(:project) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Repository',
+ destination_namespace: parent.full_path,
+ project: parent
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ context 'successfully imports repository' do
+ let(:project_data) do
+ {
+ 'httpUrlToRepo' => 'http://test.git'
+ }
+ end
+
+ subject { described_class.new(context) }
+
+ it 'imports new repository into destination project' do
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data))
+ end
+
+ expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
+ url = project_data['httpUrlToRepo'].sub("://", "://oauth2:#{bulk_import_configuration.access_token}@")
+ expect(repository_service).to receive(:import_repository).with(url).and_return 0
+ end
+
+ subject.run
+ end
+ end
+
+ context 'blocked local networks' do
+ let(:project_data) do
+ {
+ 'httpUrlToRepo' => 'http://localhost/foo.git'
+ }
+ end
+
+ before do
+ allow(Gitlab.config.gitlab).to receive(:host).and_return('notlocalhost.gitlab.com')
+ allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data))
+ end
+ end
+
+ subject { described_class.new(context) }
+
+ it 'imports new repository into destination project' do
+ subject.run
+ expect(context.entity.failed?).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index 428812a34ef..c606cf7c556 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -6,13 +6,23 @@ RSpec.describe BulkImports::Projects::Stage do
let(:pipelines) do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
- [1, BulkImports::Common::Pipelines::EntityFinisher]
+ [1, BulkImports::Projects::Pipelines::RepositoryPipeline],
+ [2, BulkImports::Common::Pipelines::LabelsPipeline],
+ [3, BulkImports::Projects::Pipelines::IssuesPipeline],
+ [4, BulkImports::Common::Pipelines::BoardsPipeline],
+ [5, BulkImports::Common::Pipelines::EntityFinisher]
]
end
- describe '.pipelines' do
+ subject do
+ bulk_import = build(:bulk_import)
+
+ described_class.new(bulk_import)
+ end
+
+ describe '#pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.pipelines).to eq(pipelines)
+ expect(subject.pipelines).to eq(pipelines)
end
end
end
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index 9d6f4db537d..47a8fcf5dd0 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -111,6 +111,49 @@ RSpec.describe ContainerRegistry::Client do
it_behaves_like 'handling timeouts'
end
+ shared_examples 'handling repository info' do
+ context 'when the check is successful' do
+ context 'when using the GitLab container registry' do
+ before do
+ stub_registry_info(headers: {
+ 'GitLab-Container-Registry-Version' => '2.9.1-gitlab',
+ 'GitLab-Container-Registry-Features' => 'a,b,c'
+ })
+ end
+
+ it 'identifies the vendor as "gitlab"' do
+ expect(subject).to include(vendor: 'gitlab')
+ end
+
+ it 'identifies version and features' do
+ expect(subject).to include(version: '2.9.1-gitlab', features: %w[a b c])
+ end
+ end
+
+ context 'when using a third-party container registry' do
+ before do
+ stub_registry_info
+ end
+
+ it 'identifies the vendor as "other"' do
+ expect(subject).to include(vendor: 'other')
+ end
+
+ it 'does not identify version or features' do
+ expect(subject).to include(version: nil, features: [])
+ end
+ end
+ end
+
+ context 'when the check is not successful' do
+ it 'does not identify vendor, version or features' do
+ stub_registry_info(status: 500)
+
+ expect(subject).to eq({})
+ end
+ end
+ end
+
describe '#repository_manifest' do
subject { client.repository_manifest('group/test', 'mytag') }
@@ -316,46 +359,7 @@ RSpec.describe ContainerRegistry::Client do
describe '#registry_info' do
subject { client.registry_info }
- context 'when the check is successful' do
- context 'when using the GitLab container registry' do
- before do
- stub_registry_info(headers: {
- 'GitLab-Container-Registry-Version' => '2.9.1-gitlab',
- 'GitLab-Container-Registry-Features' => 'a,b,c'
- })
- end
-
- it 'identifies the vendor as "gitlab"' do
- expect(subject).to include(vendor: 'gitlab')
- end
-
- it 'identifies version and features' do
- expect(subject).to include(version: '2.9.1-gitlab', features: %w[a b c])
- end
- end
-
- context 'when using a third-party container registry' do
- before do
- stub_registry_info
- end
-
- it 'identifies the vendor as "other"' do
- expect(subject).to include(vendor: 'other')
- end
-
- it 'does not identify version or features' do
- expect(subject).to include(version: nil, features: [])
- end
- end
- end
-
- context 'when the check is not successful' do
- it 'does not identify vendor, version or features' do
- stub_registry_info(status: 500)
-
- expect(subject).to eq({})
- end
- end
+ it_behaves_like 'handling repository info'
end
describe '.supports_tag_delete?' do
@@ -418,6 +422,16 @@ RSpec.describe ContainerRegistry::Client do
end
end
+ describe '.registry_info' do
+ subject { described_class.registry_info }
+
+ before do
+ stub_container_registry_config(enabled: true, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key')
+ end
+
+ it_behaves_like 'handling repository info'
+ end
+
def stub_upload(path, content, digest, status = 200)
stub_request(:post, "#{registry_api_url}/v2/#{path}/blobs/uploads/")
.with(headers: headers_with_accept_types)
diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb
index d696b61ac9d..d6e6b254dd9 100644
--- a/spec/lib/container_registry/tag_spec.rb
+++ b/spec/lib/container_registry/tag_spec.rb
@@ -60,6 +60,20 @@ RSpec.describe ContainerRegistry::Tag do
end
context 'manifest processing' do
+ shared_examples 'using the value manually set on created_at' do
+ let(:value) { 5.seconds.ago }
+
+ before do
+ tag.created_at = value
+ end
+
+ it 'does not use the config' do
+ expect(tag).not_to receive(:config)
+
+ expect(subject).to eq(value)
+ end
+ end
+
context 'schema v1' do
before do
stub_request(:get, 'http://registry.gitlab/v2/group/test/manifests/tag')
@@ -93,6 +107,8 @@ RSpec.describe ContainerRegistry::Tag do
subject { tag.created_at }
it { is_expected.to be_nil }
+
+ it_behaves_like 'using the value manually set on created_at'
end
end
end
@@ -117,6 +133,8 @@ RSpec.describe ContainerRegistry::Tag do
subject { tag.created_at }
it { is_expected.to be_nil }
+
+ it_behaves_like 'using the value manually set on created_at'
end
end
@@ -154,6 +172,8 @@ RSpec.describe ContainerRegistry::Tag do
subject { tag.created_at }
it { is_expected.not_to be_nil }
+
+ it_behaves_like 'using the value manually set on created_at'
end
end
diff --git a/spec/lib/error_tracking/sentry_client/issue_spec.rb b/spec/lib/error_tracking/sentry_client/issue_spec.rb
index e54296c58e0..82db0f70f2e 100644
--- a/spec/lib/error_tracking/sentry_client/issue_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/issue_spec.rb
@@ -257,6 +257,10 @@ RSpec.describe ErrorTracking::SentryClient::Issue do
expect(subject.gitlab_issue).to eq('https://gitlab.com/gitlab-org/gitlab/issues/1')
end
+ it 'has an integrated attribute set to false' do
+ expect(subject.integrated).to be_falsey
+ end
+
context 'when issue annotations exist' do
before do
issue_sample_response['annotations'] = [
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
index b6f9c8106c9..2e96fd09602 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
@@ -19,4 +19,16 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::CodeStageStart do
expect(records).to eq([merge_request])
expect(records).not_to include(other_merge_request)
end
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) do
+ mr_closing_issue = FactoryBot.create(:merge_requests_closing_issues)
+ issue = mr_closing_issue.issue
+ issue.metrics.update!(first_mentioned_in_commit_at: Time.current)
+
+ mr_closing_issue.merge_request
+ end
+
+ let_it_be(:record_without_data) { create(:merge_request) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb
index 224a18653ed..3f50dd38322 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb
@@ -4,4 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueCreated do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:issue) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb
index 93e588675d3..e807565ecb5 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_deployed_to_production_spec.rb
@@ -4,4 +4,16 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueDeployedToProduction do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) do
+ mr_closing_issue = FactoryBot.create(:merge_requests_closing_issues)
+ mr = mr_closing_issue.merge_request
+ mr.metrics.update!(first_deployed_to_production_at: Time.current)
+
+ mr_closing_issue.issue
+ end
+
+ let_it_be(:record_without_data) { create(:issue) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb
index bc0e388cf53..9bb023f9fdc 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb
@@ -4,4 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueFirstMentionedInCommit do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:issue).tap { |i| i.metrics.update!(first_mentioned_in_commit_at: Time.current) } }
+ let_it_be(:record_without_data) { create(:issue) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb
index ddc5f015a8c..7b46a86cbe2 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb
@@ -4,4 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueStageEnd do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:issue).tap { |i| i.metrics.update!(first_added_to_board_at: Time.current) } }
+ let_it_be(:record_without_data) { create(:issue) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb
index 281cc31c9e0..1139f9099cb 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb
@@ -4,4 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestCreated do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:merge_request) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb
index e1dd2e56e2b..a62facb6974 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb
@@ -4,4 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestFirstDeployedToProduction do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:merge_request).tap { |mr| mr.metrics.update!(first_deployed_to_production_at: Time.current) } }
+ let_it_be(:record_without_data) { create(:merge_request) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb
index 51324966f26..c5cfe43895e 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb
@@ -4,4 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestLastBuildFinished do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:merge_request).tap { |mr| mr.metrics.update!(latest_build_finished_at: Time.current) } }
+ let_it_be(:record_without_data) { create(:merge_request) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb
index 10dcaf23b81..6f8a82a9ae5 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb
@@ -4,4 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestLastBuildStarted do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:merge_request).tap { |mr| mr.metrics.update!(latest_build_started_at: Time.current) } }
+ let_it_be(:record_without_data) { create(:merge_request) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb
index 6e20eb73ed9..0060ed0fd48 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb
@@ -4,4 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestMerged do
it_behaves_like 'value stream analytics event'
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:merge_request).tap { |mr| mr.metrics.update!(merged_at: Time.current) } }
+ let_it_be(:record_without_data) { create(:merge_request) }
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb
index b8c68003127..379d59e4c5e 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb
@@ -21,4 +21,9 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::PlanStageStart do
expect(records).to match_array([issue1, issue2])
expect(records).not_to include(issue_without_metrics)
end
+
+ it_behaves_like 'LEFT JOIN-able value stream analytics event' do
+ let_it_be(:record_with_data) { create(:issue).tap { |i| i.metrics.update!(first_added_to_board_at: Time.current) } }
+ let_it_be(:record_without_data) { create(:issue) }
+ end
end
diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb
index 2525b1ce41e..0fb99688d27 100644
--- a/spec/lib/gitlab/application_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/application_rate_limiter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::ApplicationRateLimiter do
let(:redis) { double('redis') }
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_cache do
subject { described_class }
before do
- allow(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
+ allow(Gitlab::Redis::RateLimiting).to receive(:with).and_yield(redis)
allow(described_class).to receive(:rate_limits).and_return(rate_limits)
end
@@ -106,9 +106,9 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_cache do
let(:attributes) do
base_attributes.merge({
- user_id: current_user.id,
- username: current_user.username
- })
+ user_id: current_user.id,
+ username: current_user.username
+ })
end
it 'logs information to auth.log' do
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
index 2543eb3a5e9..6f3d6187076 100644
--- a/spec/lib/gitlab/auth/request_authenticator_spec.rb
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Auth::RequestAuthenticator do
+ include DependencyProxyHelpers
+
let(:env) do
{
'rack.input' => '',
@@ -15,8 +17,8 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
subject { described_class.new(request) }
describe '#user' do
- let!(:sessionless_user) { build(:user) }
- let!(:session_user) { build(:user) }
+ let_it_be(:sessionless_user) { build(:user) }
+ let_it_be(:session_user) { build(:user) }
it 'returns sessionless user first' do
allow_any_instance_of(described_class).to receive(:find_sessionless_user).and_return(sessionless_user)
@@ -41,15 +43,25 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
end
describe '#find_sessionless_user' do
- let!(:access_token_user) { build(:user) }
- let!(:feed_token_user) { build(:user) }
- let!(:static_object_token_user) { build(:user) }
- let!(:job_token_user) { build(:user) }
- let!(:lfs_token_user) { build(:user) }
- let!(:basic_auth_access_token_user) { build(:user) }
- let!(:basic_auth_password_user) { build(:user) }
-
- it 'returns access_token user first' do
+ let_it_be(:dependency_proxy_user) { build(:user) }
+ let_it_be(:access_token_user) { build(:user) }
+ let_it_be(:feed_token_user) { build(:user) }
+ let_it_be(:static_object_token_user) { build(:user) }
+ let_it_be(:job_token_user) { build(:user) }
+ let_it_be(:lfs_token_user) { build(:user) }
+ let_it_be(:basic_auth_access_token_user) { build(:user) }
+ let_it_be(:basic_auth_password_user) { build(:user) }
+
+ it 'returns dependency_proxy user first' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_dependency_proxy_token)
+ .and_return(dependency_proxy_user)
+
+ allow_any_instance_of(described_class).to receive(:find_user_from_web_access_token).and_return(access_token_user)
+
+ expect(subject.find_sessionless_user(:api)).to eq dependency_proxy_user
+ end
+
+ it 'returns access_token user if no dependency_proxy user found' do
allow_any_instance_of(described_class).to receive(:find_user_from_web_access_token)
.with(anything, scopes: [:api, :read_api])
.and_return(access_token_user)
@@ -154,6 +166,75 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
expect(subject.find_sessionless_user(:api)).to be_blank
end
+
+ context 'dependency proxy' do
+ let_it_be(:dependency_proxy_user) { create(:user) }
+
+ let(:token) { build_jwt(dependency_proxy_user).encoded }
+ let(:authenticator) { described_class.new(request) }
+
+ subject { authenticator.find_sessionless_user(:api) }
+
+ before do
+ env['SCRIPT_NAME'] = accessed_path
+ env['HTTP_AUTHORIZATION'] = "Bearer #{token}"
+ end
+
+ shared_examples 'identifying dependency proxy urls properly with' do |user_type|
+ context 'with pulling a manifest' do
+ let(:accessed_path) { '/v2/group1/dependency_proxy/containers/alpine/manifests/latest' }
+
+ it { is_expected.to eq(dependency_proxy_user) } if user_type == :user
+ it { is_expected.to eq(nil) } if user_type == :no_user
+ end
+
+ context 'with pulling a blob' do
+ let(:accessed_path) { '/v2/group1/dependency_proxy/containers/alpine/blobs/sha256:a0d0a0d46f8b52473982a3c466318f479767577551a53ffc9074c9fa7035982e' }
+
+ it { is_expected.to eq(dependency_proxy_user) } if user_type == :user
+ it { is_expected.to eq(nil) } if user_type == :no_user
+ end
+
+ context 'with any other path' do
+ let(:accessed_path) { '/foo/bar' }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
+ context 'with a user' do
+ it_behaves_like 'identifying dependency proxy urls properly with', :user
+
+ context 'with an invalid id' do
+ let(:token) { build_jwt { |jwt| jwt['user_id'] = 'this_is_not_a_user' } }
+
+ it_behaves_like 'identifying dependency proxy urls properly with', :no_user
+ end
+ end
+
+ context 'with a deploy token' do
+ let_it_be(:dependency_proxy_user) { create(:deploy_token) }
+
+ it_behaves_like 'identifying dependency proxy urls properly with', :no_user
+ end
+
+ context 'with no jwt token' do
+ let(:token) { nil }
+
+ it_behaves_like 'identifying dependency proxy urls properly with', :no_user
+ end
+
+ context 'with an expired jwt token' do
+ let(:token) { build_jwt(dependency_proxy_user).encoded }
+ let(:accessed_path) { '/v2/group1/dependency_proxy/containers/alpine/manifests/latest' }
+
+ it 'returns nil' do
+ travel_to(Time.zone.now + Auth::DependencyProxyAuthenticationService.token_expire_at + 1.minute) do
+ expect(subject).to eq(nil)
+ end
+ end
+ end
+ end
end
describe '#find_personal_access_token_from_http_basic_auth' do
@@ -201,8 +282,8 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
end
describe '#find_user_from_job_token' do
- let!(:user) { build(:user) }
- let!(:job) { build(:ci_build, user: user, status: :running) }
+ let_it_be(:user) { build(:user) }
+ let_it_be(:job) { build(:ci_build, user: user, status: :running) }
before do
env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = 'token'
@@ -239,7 +320,7 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
end
describe '#runner' do
- let!(:runner) { build(:ci_runner) }
+ let_it_be(:runner) { build(:ci_runner) }
it 'returns the runner using #find_runner_from_token' do
expect_any_instance_of(described_class)
diff --git a/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb b/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb
new file mode 100644
index 00000000000..d2bfa86f0d1
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt, :migration, schema: 20211004110500 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:issues) { table(:issues) }
+ let(:issue_metrics) { table(:issue_metrics) }
+ let(:merge_requests_closing_issues) { table(:merge_requests_closing_issues) }
+ let(:diffs) { table(:merge_request_diffs) }
+ let(:ten_days_ago) { 10.days.ago }
+ let(:commits) do
+ table(:merge_request_diff_commits).tap do |t|
+ t.extend(SuppressCompositePrimaryKeyWarning)
+ end
+ end
+
+ let(:namespace) { namespaces.create!(name: 'ns', path: 'ns') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+
+ let!(:issue1) do
+ issues.create!(
+ title: 'issue',
+ description: 'description',
+ project_id: project.id
+ )
+ end
+
+ let!(:issue2) do
+ issues.create!(
+ title: 'issue',
+ description: 'description',
+ project_id: project.id
+ )
+ end
+
+ let!(:merge_request1) do
+ merge_requests.create!(
+ source_branch: 'a',
+ target_branch: 'master',
+ target_project_id: project.id
+ )
+ end
+
+ let!(:merge_request2) do
+ merge_requests.create!(
+ source_branch: 'b',
+ target_branch: 'master',
+ target_project_id: project.id
+ )
+ end
+
+ let!(:merge_request_closing_issue1) do
+ merge_requests_closing_issues.create!(issue_id: issue1.id, merge_request_id: merge_request1.id)
+ end
+
+ let!(:merge_request_closing_issue2) do
+ merge_requests_closing_issues.create!(issue_id: issue2.id, merge_request_id: merge_request2.id)
+ end
+
+ let!(:diff1) { diffs.create!(merge_request_id: merge_request1.id) }
+ let!(:diff2) { diffs.create!(merge_request_id: merge_request1.id) }
+
+ let!(:other_diff) { diffs.create!(merge_request_id: merge_request2.id) }
+
+ let!(:commit1) do
+ commits.create!(
+ merge_request_diff_id: diff2.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('aaa'),
+ authored_date: 5.days.ago
+ )
+ end
+
+ let!(:commit2) do
+ commits.create!(
+ merge_request_diff_id: diff2.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('aaa'),
+ authored_date: 10.days.ago
+ )
+ end
+
+ let!(:commit3) do
+ commits.create!(
+ merge_request_diff_id: other_diff.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('aaa'),
+ authored_date: 5.days.ago
+ )
+ end
+
+ def run_migration
+ described_class
+ .new
+ .perform(issue_metrics.minimum(:issue_id), issue_metrics.maximum(:issue_id))
+ end
+
+ it "marks successful slices as completed" do
+ min_issue_id = issue_metrics.minimum(:issue_id)
+ max_issue_id = issue_metrics.maximum(:issue_id)
+
+ expect(subject).to receive(:mark_job_as_succeeded).with(min_issue_id, max_issue_id)
+
+ subject.perform(min_issue_id, max_issue_id)
+ end
+
+ context 'when the persisted first_mentioned_in_commit_at is later than the first commit authored_date' do
+ it 'updates the issue_metrics record' do
+ record1 = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: Time.current)
+ record2 = issue_metrics.create!(issue_id: issue2.id, first_mentioned_in_commit_at: Time.current)
+
+ run_migration
+ record1.reload
+ record2.reload
+
+ expect(record1.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit2.authored_date)
+ expect(record2.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit3.authored_date)
+ end
+ end
+
+ context 'when the persisted first_mentioned_in_commit_at is earlier than the first commit authored_date' do
+ it 'does not update the issue_metrics record' do
+ record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: 20.days.ago)
+
+ expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at }
+ end
+ end
+
+ context 'when the first_mentioned_in_commit_at is null' do
+ it 'does nothing' do
+ record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: nil)
+
+ expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb b/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
index 452fc962c7b..35ec8be691a 100644
--- a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::FixPromotedEpicsDiscussionIds, schema: 20190715193142 do
+RSpec.describe Gitlab::BackgroundMigration::FixPromotedEpicsDiscussionIds, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:epics) { table(:epics) }
diff --git a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
index 0d0ad2cc39e..95509f9b897 100644
--- a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::FixUserNamespaceNames, schema: 20190620112608 do
+RSpec.describe Gitlab::BackgroundMigration::FixUserNamespaceNames, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:user) { users.create!(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
diff --git a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
index 211693d917b..b4444df674e 100644
--- a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, schema: 20190620112608 do
+RSpec.describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
let(:routes) { table(:routes) }
diff --git a/spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb b/spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb
deleted file mode 100644
index 6ff1157cb86..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_null_private_profile_to_false_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigrateNullPrivateProfileToFalse, schema: 20190620105427 do
- let(:users) { table(:users) }
-
- it 'correctly migrates nil private_profile to false' do
- private_profile_true = users.create!(private_profile: true, projects_limit: 1, email: 'a@b.com')
- private_profile_false = users.create!(private_profile: false, projects_limit: 1, email: 'b@c.com')
- private_profile_nil = users.create!(private_profile: nil, projects_limit: 1, email: 'c@d.com')
-
- described_class.new.perform(private_profile_true.id, private_profile_nil.id)
-
- private_profile_true.reload
- private_profile_false.reload
- private_profile_nil.reload
-
- expect(private_profile_true.private_profile).to eq(true)
- expect(private_profile_false.private_profile).to eq(false)
- expect(private_profile_nil.private_profile).to eq(false)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
index 815dc2e73e5..b6d93b9ff54 100644
--- a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 20190919040324 do
+RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 20181228175414 do
let(:projects) { table(:projects) }
subject(:migrate_pages_metadata) { described_class.new }
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
index 4e7872a9a1b..1d8eed53553 100644
--- a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, schema: 20190315191339 do
+RSpec.describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, schema: 20181228175414 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb b/spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb
new file mode 100644
index 00000000000..8e07b43f5b9
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateTopicsTotalProjectsCountCache, schema: 20211006060436 do
+ it 'correctly populates total projects count cache' do
+ namespaces = table(:namespaces)
+ projects = table(:projects)
+ topics = table(:topics)
+ project_topics = table(:project_topics)
+
+ group = namespaces.create!(name: 'group', path: 'group')
+ project_1 = projects.create!(namespace_id: group.id)
+ project_2 = projects.create!(namespace_id: group.id)
+ project_3 = projects.create!(namespace_id: group.id)
+ topic_1 = topics.create!(name: 'Topic1')
+ topic_2 = topics.create!(name: 'Topic2')
+ topic_3 = topics.create!(name: 'Topic3')
+ topic_4 = topics.create!(name: 'Topic4')
+
+ project_topics.create!(project_id: project_1.id, topic_id: topic_1.id)
+ project_topics.create!(project_id: project_1.id, topic_id: topic_3.id)
+ project_topics.create!(project_id: project_2.id, topic_id: topic_3.id)
+ project_topics.create!(project_id: project_1.id, topic_id: topic_4.id)
+ project_topics.create!(project_id: project_2.id, topic_id: topic_4.id)
+ project_topics.create!(project_id: project_3.id, topic_id: topic_4.id)
+
+ subject.perform(topic_1.id, topic_4.id)
+
+ expect(topic_1.reload.total_projects_count).to eq(1)
+ expect(topic_2.reload.total_projects_count).to eq(0)
+ expect(topic_3.reload.total_projects_count).to eq(2)
+ expect(topic_4.reload.total_projects_count).to eq(3)
+ end
+end
diff --git a/spec/lib/gitlab/backtrace_cleaner_spec.rb b/spec/lib/gitlab/backtrace_cleaner_spec.rb
index 51d99bf5f74..e46a90e8606 100644
--- a/spec/lib/gitlab/backtrace_cleaner_spec.rb
+++ b/spec/lib/gitlab/backtrace_cleaner_spec.rb
@@ -22,9 +22,6 @@ RSpec.describe Gitlab::BacktraceCleaner do
"lib/gitlab/git/repository.rb:1451:in `gitaly_migrate'",
"lib/gitlab/git/commit.rb:66:in `find'",
"app/models/repository.rb:1047:in `find_commit'",
- "lib/gitlab/metrics/instrumentation.rb:159:in `block in find_commit'",
- "lib/gitlab/metrics/method_call.rb:36:in `measure'",
- "lib/gitlab/metrics/instrumentation.rb:159:in `find_commit'",
"app/models/repository.rb:113:in `commit'",
"lib/gitlab/i18n.rb:50:in `with_locale'",
"lib/gitlab/middleware/multipart.rb:95:in `call'",
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index f770960e27a..946a7c604a1 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -58,6 +58,16 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
end
+ describe '.increment' do
+ it 'increment a key and returns the current value' do
+ expect(described_class.increment('foo')).to eq(1)
+
+ value = Gitlab::Redis::Cache.with { |r| r.get(described_class.cache_key_for('foo')) }
+
+ expect(value.to_i).to eq(1)
+ end
+ end
+
describe '.set_add' do
it 'adds a value to a set' do
described_class.set_add('foo', 10)
diff --git a/spec/lib/gitlab/chat/command_spec.rb b/spec/lib/gitlab/chat/command_spec.rb
index d99c07d1fa3..c8b4b3f73b2 100644
--- a/spec/lib/gitlab/chat/command_spec.rb
+++ b/spec/lib/gitlab/chat/command_spec.rb
@@ -72,6 +72,7 @@ RSpec.describe Gitlab::Chat::Command do
expect(vars['CHAT_INPUT']).to eq('foo')
expect(vars['CHAT_CHANNEL']).to eq('123')
+ expect(vars['CHAT_USER_ID']).to eq(chat_name.chat_id)
end
end
end
diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
index 2e562a5a350..c65a1e4d656 100644
--- a/spec/lib/gitlab/checks/matching_merge_request_spec.rb
+++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
@@ -31,33 +31,30 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do
expect(matcher.match?).to be false
end
- context 'with load balancing disabled', :request_store, :redis do
- before do
- expect(::Gitlab::Database::LoadBalancing).to receive(:enable?).at_least(:once).and_return(false)
- expect(::Gitlab::Database::LoadBalancing::Sticking).not_to receive(:unstick_or_continue_sticking)
- expect(::Gitlab::Database::LoadBalancing::Sticking).not_to receive(:select_valid_replicas)
- end
-
- it 'does not attempt to stick to primary' do
- expect(subject.match?).to be true
- end
-
- it 'increments no counters' do
- expect { subject.match? }
- .to change { total_counter.get }.by(0)
- .and change { stale_counter.get }.by(0)
- end
- end
-
- context 'with load balancing enabled', :db_load_balancing do
+ context 'with load balancing enabled' do
let(:session) { ::Gitlab::Database::LoadBalancing::Session.current }
let(:all_caught_up) { true }
before do
- allow(::Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?).and_return(all_caught_up)
+ Gitlab::Database::LoadBalancing::Session.clear_session
+
+ allow(::ApplicationRecord.sticking)
+ .to receive(:all_caught_up?)
+ .and_return(all_caught_up)
+
+ expect(::ApplicationRecord.sticking)
+ .to receive(:select_valid_host)
+ .with(:project, project.id)
+ .and_call_original
+
+ allow(::ApplicationRecord.sticking)
+ .to receive(:select_caught_up_replicas)
+ .with(:project, project.id)
+ .and_return(all_caught_up)
+ end
- expect(::Gitlab::Database::LoadBalancing::Sticking).to receive(:select_valid_host).with(:project, project.id).and_call_original
- allow(::Gitlab::Database::LoadBalancing::Sticking).to receive(:select_caught_up_replicas).with(:project, project.id).and_return(all_caught_up)
+ after do
+ Gitlab::Database::LoadBalancing::Session.clear_session
end
shared_examples 'secondary that has caught up to a primary' do
diff --git a/spec/lib/gitlab/ci/build/auto_retry_spec.rb b/spec/lib/gitlab/ci/build/auto_retry_spec.rb
index e83e1326206..fc5999d59ac 100644
--- a/spec/lib/gitlab/ci/build/auto_retry_spec.rb
+++ b/spec/lib/gitlab/ci/build/auto_retry_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe Gitlab::Ci::Build::AutoRetry do
"default for scheduler failure" | 1 | {} | :scheduler_failure | true
"quota is exceeded" | 0 | { max: 2 } | :ci_quota_exceeded | false
"no matching runner" | 0 | { max: 2 } | :no_matching_runner | false
+ "missing dependencies" | 0 | { max: 2 } | :missing_dependency_failure | false
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/retry_spec.rb b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
index b38387a437e..84ef5344a8b 100644
--- a/spec/lib/gitlab/ci/config/entry/retry_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
@@ -101,7 +101,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
api_failure
stuck_or_timeout_failure
runner_system_failure
- missing_dependency_failure
runner_unsupported
stale_schedule
job_execution_timeout
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index a471997e43a..cebe8984741 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -363,17 +363,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote),
an_instance_of(Gitlab::Ci::Config::External::File::Local))
end
-
- context 'when the FF ci_include_rules is disabled' do
- before do
- stub_feature_flags(ci_include_rules: false)
- end
-
- it 'includes the file' do
- expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote),
- an_instance_of(Gitlab::Ci::Config::External::File::Local))
- end
- end
end
context 'when the rules does not match' do
@@ -382,17 +371,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
it 'does not include the file' do
expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote))
end
-
- context 'when the FF ci_include_rules is disabled' do
- before do
- stub_feature_flags(ci_include_rules: false)
- end
-
- it 'includes the file' do
- expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote),
- an_instance_of(Gitlab::Ci::Config::External::File::Local))
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index e032d372ecb..c2f28253f54 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -402,5 +402,17 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
expect(output.keys).to match_array([:image, :my_build, :my_test])
end
end
+
+ context 'when rules defined' do
+ context 'when a rule is invalid' do
+ let(:values) do
+ { include: [{ local: 'builds.yml', rules: [{ exists: ['$MY_VAR'] }] }] }
+ end
+
+ it 'raises IncludeError' do
+ expect { subject }.to raise_error(described_class::IncludeError, /invalid include rule/)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb
index 89ea13d710d..9a5c29befa2 100644
--- a/spec/lib/gitlab/ci/config/external/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do
it { is_expected.to eq(true) }
end
- context 'when there is a rule' do
+ context 'when there is a rule with if' do
let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
context 'when the rule matches' do
@@ -31,5 +31,23 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do
it { is_expected.to eq(false) }
end
end
+
+ context 'when there is a rule with if and when' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
+ end
+ end
+
+ context 'when there is a rule with exists' do
+ let(:rule_hashes) { [{ exists: ['$MY_VAR'] }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:exists=>["$MY_VAR"]}')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 0c28515b574..3aa6b2e3c05 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -440,17 +440,30 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'when the environment name is invalid' do
let(:attributes) { { name: 'deploy', ref: 'master', environment: '!!!' } }
- it_behaves_like 'non-deployment job'
- it_behaves_like 'ensures environment inexistence'
+ it 'fails the job with a failure reason and does not create an environment' do
+ expect(subject).to be_failed
+ expect(subject).to be_environment_creation_failure
+ expect(subject.metadata.expanded_environment_name).to be_nil
+ expect(Environment.exists?(name: expected_environment_name)).to eq(false)
+ end
+
+ context 'when surface_environment_creation_failure feature flag is disabled' do
+ before do
+ stub_feature_flags(surface_environment_creation_failure: false)
+ end
- it 'tracks an exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(an_instance_of(described_class::EnvironmentCreationFailure),
- project_id: project.id,
- reason: %q{Name can contain only letters, digits, '-', '_', '/', '$', '{', '}', '.', and spaces, but it cannot start or end with '/'})
- .once
+ it_behaves_like 'non-deployment job'
+ it_behaves_like 'ensures environment inexistence'
- subject
+ it 'tracks an exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(an_instance_of(described_class::EnvironmentCreationFailure),
+ project_id: project.id,
+ reason: %q{Name can contain only letters, digits, '-', '_', '/', '$', '{', '}', '.', and spaces, but it cannot start or end with '/'})
+ .once
+
+ subject
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/flag_spec.rb b/spec/lib/gitlab/ci/reports/security/flag_spec.rb
index 27f83694ac2..d677425a8da 100644
--- a/spec/lib/gitlab/ci/reports/security/flag_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/flag_spec.rb
@@ -18,9 +18,9 @@ RSpec.describe Gitlab::Ci::Reports::Security::Flag do
end
end
- describe '#to_hash' do
+ describe '#to_h' do
it 'returns expected hash' do
- expect(security_flag.to_hash).to eq(
+ expect(security_flag.to_h).to eq(
{
flag_type: :false_positive,
origin: 'post analyzer X',
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index 81fc66c4a11..cdda7e953d0 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -13,13 +13,6 @@ RSpec.describe 'CI YML Templates' do
excluded + ["Terraform.gitlab-ci.yml"]
end
- before do
- stub_feature_flags(
- redirect_to_latest_template_terraform: false,
- redirect_to_latest_template_security_api_fuzzing: false,
- redirect_to_latest_template_security_dast: false)
- end
-
shared_examples 'require default stages to be included' do
it 'require default stages to be included' do
expect(subject.stages).to include(*Gitlab::Ci::Config::Entry::Stages.default)
diff --git a/spec/lib/gitlab/ci/trace/archive_spec.rb b/spec/lib/gitlab/ci/trace/archive_spec.rb
new file mode 100644
index 00000000000..c9fc4e720c4
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/archive_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Trace::Archive do
+ let_it_be(:job) { create(:ci_build, :success, :trace_live) }
+ let_it_be_with_reload(:trace_metadata) { create(:ci_build_trace_metadata, build: job) }
+ let_it_be(:src_checksum) do
+ job.trace.read { |stream| Digest::MD5.hexdigest(stream.raw) }
+ end
+
+ let(:metrics) { spy('metrics') }
+
+ describe '#execute' do
+ subject { described_class.new(job, trace_metadata, metrics) }
+
+ it 'computes and assigns checksum' do
+ Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream|
+ expect { subject.execute!(stream) }.to change { Ci::JobArtifact.count }.by(1)
+ end
+
+ expect(trace_metadata.checksum).to eq(src_checksum)
+ expect(trace_metadata.trace_artifact).to eq(job.job_artifacts_trace)
+ end
+
+ context 'validating artifact checksum' do
+ let(:trace) { 'abc' }
+ let(:stream) { StringIO.new(trace, 'rb') }
+ let(:src_checksum) { Digest::MD5.hexdigest(trace) }
+
+ context 'when the object store is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it 'skips validation' do
+ subject.execute!(stream)
+
+ expect(trace_metadata.checksum).to eq(src_checksum)
+ expect(trace_metadata.remote_checksum).to be_nil
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :archive_invalid_checksum)
+ end
+ end
+
+ context 'with background_upload enabled' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
+ end
+
+ it 'skips validation' do
+ subject.execute!(stream)
+
+ expect(trace_metadata.checksum).to eq(src_checksum)
+ expect(trace_metadata.remote_checksum).to be_nil
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :archive_invalid_checksum)
+ end
+ end
+
+ context 'with direct_upload enabled' do
+ before do
+ stub_artifacts_object_storage(direct_upload: true)
+ end
+
+ it 'validates the archived trace' do
+ subject.execute!(stream)
+
+ expect(trace_metadata.checksum).to eq(src_checksum)
+ expect(trace_metadata.remote_checksum).to eq(src_checksum)
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :archive_invalid_checksum)
+ end
+
+ context 'when the checksum does not match' do
+ let(:invalid_remote_checksum) { SecureRandom.hex }
+
+ before do
+ expect(::Gitlab::Ci::Trace::RemoteChecksum)
+ .to receive(:new)
+ .with(an_instance_of(Ci::JobArtifact))
+ .and_return(double(md5_checksum: invalid_remote_checksum))
+ end
+
+ it 'validates the archived trace' do
+ subject.execute!(stream)
+
+ expect(trace_metadata.checksum).to eq(src_checksum)
+ expect(trace_metadata.remote_checksum).to eq(invalid_remote_checksum)
+ expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :archive_invalid_checksum)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/metrics_spec.rb b/spec/lib/gitlab/ci/trace/metrics_spec.rb
index 6518d0ab075..53e55a57973 100644
--- a/spec/lib/gitlab/ci/trace/metrics_spec.rb
+++ b/spec/lib/gitlab/ci/trace/metrics_spec.rb
@@ -15,4 +15,27 @@ RSpec.describe Gitlab::Ci::Trace::Metrics, :prometheus do
end
end
end
+
+ describe '#increment_error_counter' do
+ context 'when the operation type is known' do
+ it 'increments the counter' do
+ subject.increment_error_counter(type: :chunks_invalid_size)
+ subject.increment_error_counter(type: :chunks_invalid_checksum)
+ subject.increment_error_counter(type: :archive_invalid_checksum)
+
+ expect(described_class.trace_errors_counter.get(type: :chunks_invalid_size)).to eq 1
+ expect(described_class.trace_errors_counter.get(type: :chunks_invalid_checksum)).to eq 1
+ expect(described_class.trace_errors_counter.get(type: :archive_invalid_checksum)).to eq 1
+
+ expect(described_class.trace_errors_counter.values.count).to eq 3
+ end
+ end
+
+ context 'when the operation type is known' do
+ it 'raises an exception' do
+ expect { subject.increment_error_counter(type: :invalid_type) }
+ .to raise_error(ArgumentError)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb b/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
new file mode 100644
index 00000000000..8837ebc3652
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Trace::RemoteChecksum do
+ let_it_be(:job) { create(:ci_build, :success) }
+
+ let(:file_store) { JobArtifactUploader::Store::LOCAL }
+ let(:trace_artifact) { create(:ci_job_artifact, :trace, job: job, file_store: file_store) }
+ let(:checksum) { Digest::MD5.hexdigest(trace_artifact.file.read) }
+ let(:base64checksum) { Digest::MD5.base64digest(trace_artifact.file.read) }
+ let(:fetcher) { described_class.new(trace_artifact) }
+
+ describe '#md5_checksum' do
+ subject { fetcher.md5_checksum }
+
+ context 'when the file is stored locally' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when object store is enabled' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ context 'with local files' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with remote files' do
+ let(:file_store) { JobArtifactUploader::Store::REMOTE }
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_archived_build_trace_checksum: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with AWS as provider' do
+ it { is_expected.to eq(checksum) }
+ end
+
+ context 'with Google as provider' do
+ before do
+ spy_file = spy(:file)
+ expect(fetcher).to receive(:provider_google?) { true }
+ expect(fetcher).not_to receive(:provider_aws?) { false }
+ allow(spy_file).to receive(:attributes).and_return(metadata)
+
+ allow_next_found_instance_of(Ci::JobArtifact) do |trace_artifact|
+ allow(trace_artifact.file).to receive(:file) { spy_file }
+ end
+ end
+
+ context 'when the response does not include :content_md5' do
+ let(:metadata) {{}}
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error KeyError, /content_md5/
+ end
+ end
+
+ context 'when the response include :content_md5' do
+ let(:metadata) {{ content_md5: base64checksum }}
+
+ it { is_expected.to eq(checksum) }
+ end
+ end
+
+ context 'with unsupported providers' do
+ let(:file_store) { JobArtifactUploader::Store::REMOTE }
+
+ before do
+ expect(fetcher).to receive(:provider_aws?) { false }
+ expect(fetcher).to receive(:provider_google?) { false }
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 239eff11bf3..3ec332dace5 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
directives: {
base_uri: 'http://example.com',
child_src: "'self' https://child.example.com",
+ connect_src: "'self' ws://example.com",
default_src: "'self' https://other.example.com",
script_src: "'self' https://script.exammple.com ",
worker_src: "data: https://worker.example.com",
@@ -52,6 +53,28 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['child_src']).to eq(directives['frame_src'])
end
+ context 'adds all websocket origins to support Safari' do
+ it 'with insecure domain' do
+ stub_config_setting(host: 'example.com', https: false)
+ expect(directives['connect_src']).to eq("'self' ws://example.com")
+ end
+
+ it 'with secure domain' do
+ stub_config_setting(host: 'example.com', https: true)
+ expect(directives['connect_src']).to eq("'self' wss://example.com")
+ end
+
+ it 'with custom port' do
+ stub_config_setting(host: 'example.com', port: '1234')
+ expect(directives['connect_src']).to eq("'self' ws://example.com:1234")
+ end
+
+ it 'with custom port and secure domain' do
+ stub_config_setting(host: 'example.com', https: true, port: '1234')
+ expect(directives['connect_src']).to eq("'self' wss://example.com:1234")
+ end
+ end
+
context 'when CDN host is defined' do
before do
stub_config_setting(cdn_host: 'https://example.com')
@@ -67,10 +90,11 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
context 'when sentry is configured' do
before do
stub_sentry_settings
+ stub_config_setting(host: 'example.com')
end
it 'adds sentry path to CSP without user' do
- expect(directives['connect_src']).to eq("'self' dummy://example.com/43")
+ expect(directives['connect_src']).to eq("'self' ws://example.com dummy://example.com/43")
end
end
@@ -113,6 +137,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(policy.directives['base-uri']).to eq([csp_config[:directives][:base_uri]])
expect(policy.directives['default-src']).to eq(expected_config(:default_src))
+ expect(policy.directives['connect-src']).to eq(expected_config(:connect_src))
expect(policy.directives['child-src']).to eq(expected_config(:child_src))
expect(policy.directives['worker-src']).to eq(expected_config(:worker_src))
expect(policy.directives['report-uri']).to eq(expected_config(:report_uri))
diff --git a/spec/lib/gitlab/database/bulk_update_spec.rb b/spec/lib/gitlab/database/bulk_update_spec.rb
index dbafada26ca..9a6463c99fa 100644
--- a/spec/lib/gitlab/database/bulk_update_spec.rb
+++ b/spec/lib/gitlab/database/bulk_update_spec.rb
@@ -91,45 +91,38 @@ RSpec.describe Gitlab::Database::BulkUpdate do
.to eq(['MR a', 'Issue a', 'Issue b'])
end
- shared_examples 'basic functionality' do
- it 'sets multiple values' do
- create_default(:user)
- create_default(:project)
-
- i_a, i_b = create_list(:issue, 2)
+ context 'validates prepared_statements support', :reestablished_active_record_base do
+ using RSpec::Parameterized::TableSyntax
- mapping = {
- i_a => { title: 'Issue a' },
- i_b => { title: 'Issue b' }
- }
+ where(:prepared_statements) do
+ [false, true]
+ end
- described_class.execute(%i[title], mapping)
+ before do
+ configuration_hash = ActiveRecord::Base.connection_db_config.configuration_hash
- expect([i_a, i_b].map { |x| x.reset.title })
- .to eq(['Issue a', 'Issue b'])
+ ActiveRecord::Base.establish_connection(
+ configuration_hash.merge(prepared_statements: prepared_statements)
+ )
end
- end
- include_examples 'basic functionality'
+ with_them do
+ it 'sets multiple values' do
+ create_default(:user)
+ create_default(:project)
- context 'when prepared statements are configured differently to the normal test environment' do
- before do
- klass = Class.new(ActiveRecord::Base) do
- def self.abstract_class?
- true # So it gets its own connection
- end
- end
+ i_a, i_b = create_list(:issue, 2)
- stub_const('ActiveRecordBasePreparedStatementsInverted', klass)
+ mapping = {
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
+ }
- c = ActiveRecord::Base.connection.instance_variable_get(:@config)
- inverted = c.merge(prepared_statements: !ActiveRecord::Base.connection.prepared_statements)
- ActiveRecordBasePreparedStatementsInverted.establish_connection(inverted)
+ described_class.execute(%i[title], mapping)
- allow(ActiveRecord::Base).to receive(:connection_specification_name)
- .and_return(ActiveRecordBasePreparedStatementsInverted.connection_specification_name)
+ expect([i_a, i_b].map { |x| x.reset.title })
+ .to eq(['Issue a', 'Issue b'])
+ end
end
-
- include_examples 'basic functionality'
end
end
diff --git a/spec/lib/gitlab/database/connection_spec.rb b/spec/lib/gitlab/database/connection_spec.rb
index 7f94d7af4a9..ee1df141cd6 100644
--- a/spec/lib/gitlab/database/connection_spec.rb
+++ b/spec/lib/gitlab/database/connection_spec.rb
@@ -126,15 +126,7 @@ RSpec.describe Gitlab::Database::Connection do
end
end
- describe '#disable_prepared_statements' do
- around do |example|
- original_config = connection.scope.connection.pool.db_config
-
- example.run
-
- connection.scope.establish_connection(original_config)
- end
-
+ describe '#disable_prepared_statements', :reestablished_active_record_base do
it 'disables prepared statements' do
connection.scope.establish_connection(
::Gitlab::Database.main.config.merge(prepared_statements: true)
diff --git a/spec/lib/gitlab/database/consistency_spec.rb b/spec/lib/gitlab/database/consistency_spec.rb
index 35fa65512ae..5055be81c88 100644
--- a/spec/lib/gitlab/database/consistency_spec.rb
+++ b/spec/lib/gitlab/database/consistency_spec.rb
@@ -7,6 +7,14 @@ RSpec.describe Gitlab::Database::Consistency do
Gitlab::Database::LoadBalancing::Session.current
end
+ before do
+ Gitlab::Database::LoadBalancing::Session.clear_session
+ end
+
+ after do
+ Gitlab::Database::LoadBalancing::Session.clear_session
+ end
+
describe '.with_read_consistency' do
it 'sticks to primary database' do
expect(session).not_to be_using_primary
diff --git a/spec/lib/gitlab/database/count_spec.rb b/spec/lib/gitlab/database/count_spec.rb
index d65413c2a00..e712ad09927 100644
--- a/spec/lib/gitlab/database/count_spec.rb
+++ b/spec/lib/gitlab/database/count_spec.rb
@@ -46,5 +46,49 @@ RSpec.describe Gitlab::Database::Count do
subject
end
end
+
+ context 'default strategies' do
+ subject { described_class.approximate_counts(models) }
+
+ context 'with a read-only database' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'only uses the ExactCountStrategy' do
+ allow_next_instance_of(Gitlab::Database::Count::TablesampleCountStrategy) do |instance|
+ expect(instance).not_to receive(:count)
+ end
+ allow_next_instance_of(Gitlab::Database::Count::ReltuplesCountStrategy) do |instance|
+ expect(instance).not_to receive(:count)
+ end
+ expect_next_instance_of(Gitlab::Database::Count::ExactCountStrategy) do |instance|
+ expect(instance).to receive(:count).and_return({})
+ end
+
+ subject
+ end
+ end
+
+ context 'with a read-write database' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(false)
+ end
+
+ it 'uses the available strategies' do
+ [
+ Gitlab::Database::Count::TablesampleCountStrategy,
+ Gitlab::Database::Count::ReltuplesCountStrategy,
+ Gitlab::Database::Count::ExactCountStrategy
+ ].each do |strategy_klass|
+ expect_next_instance_of(strategy_klass) do |instance|
+ expect(instance).to receive(:count).and_return({})
+ end
+ end
+
+ subject
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
index ebbbafb855f..768855464c1 100644
--- a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_store do
describe '.wrapper' do
it 'uses primary and then releases the connection and clears the session' do
- expect(Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
+ expect(Gitlab::Database::LoadBalancing).to receive(:release_hosts)
expect(Gitlab::Database::LoadBalancing::Session).to receive(:clear_session)
described_class.wrapper.call(
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_s
context 'with an exception' do
it 'releases the connection and clears the session' do
- expect(Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
+ expect(Gitlab::Database::LoadBalancing).to receive(:release_hosts)
expect(Gitlab::Database::LoadBalancing::Session).to receive(:clear_session)
expect do
diff --git a/spec/lib/gitlab/database/load_balancing/active_record_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/active_record_proxy_spec.rb
deleted file mode 100644
index 8886ce9756d..00000000000
--- a/spec/lib/gitlab/database/load_balancing/active_record_proxy_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::LoadBalancing::ActiveRecordProxy do
- describe '#connection' do
- it 'returns a connection proxy' do
- dummy = Class.new do
- include Gitlab::Database::LoadBalancing::ActiveRecordProxy
- end
-
- proxy = double(:proxy)
-
- expect(Gitlab::Database::LoadBalancing).to receive(:proxy)
- .and_return(proxy)
-
- expect(dummy.new.connection).to eq(proxy)
- end
- end
-end
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
index 6621e6276a5..3e5249a3dea 100644
--- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -108,6 +108,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration do
end
describe '#load_balancing_enabled?' do
+ it 'returns false when running inside a Rake task' do
+ config = described_class.new(ActiveRecord::Base, %w[foo bar])
+
+ allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
+
+ expect(config.load_balancing_enabled?).to eq(false)
+ end
+
it 'returns true when hosts are configured' do
config = described_class.new(ActiveRecord::Base, %w[foo bar])
diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb
index e2011692228..b040c7a76bd 100644
--- a/spec/lib/gitlab/database/load_balancing/host_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb
@@ -172,6 +172,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host).not_to be_online
end
+
+ it 'returns false when ActiveRecord::ConnectionNotEstablished is raised' do
+ allow(host)
+ .to receive(:check_replica_status?)
+ .and_raise(ActiveRecord::ConnectionNotEstablished)
+
+ expect(host).not_to be_online
+ end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 86fae14b961..f3ce5563e38 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -47,16 +47,27 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
describe '#initialize' do
- it 'ignores the hosts when the primary_only option is enabled' do
+ it 'ignores the hosts when load balancing is disabled' do
config = Gitlab::Database::LoadBalancing::Configuration
.new(ActiveRecord::Base, [db_host])
- lb = described_class.new(config, primary_only: true)
+
+ allow(config).to receive(:load_balancing_enabled?).and_return(false)
+
+ lb = described_class.new(config)
hosts = lb.host_list.hosts
expect(hosts.length).to eq(1)
expect(hosts.first)
.to be_instance_of(Gitlab::Database::LoadBalancing::PrimaryHost)
end
+
+ it 'sets the name of the connection that is used' do
+ config =
+ Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
+ lb = described_class.new(config)
+
+ expect(lb.name).to eq(:main)
+ end
end
describe '#read' do
@@ -140,10 +151,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
.to yield_with_args(ActiveRecord::Base.retrieve_connection)
end
- it 'uses the primary when the primary_only option is enabled' do
+ it 'uses the primary when load balancing is disabled' do
config = Gitlab::Database::LoadBalancing::Configuration
.new(ActiveRecord::Base)
- lb = described_class.new(config, primary_only: true)
+
+ allow(config).to receive(:load_balancing_enabled?).and_return(false)
+
+ lb = described_class.new(config)
# When no hosts are configured, we don't want to produce any warnings, as
# they aren't useful/too noisy.
@@ -274,34 +288,43 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
expect { lb.retry_with_backoff { raise } }.to raise_error(RuntimeError)
end
- end
- describe '#connection_error?' do
- before do
- stub_const('Gitlab::Database::LoadBalancing::LoadBalancer::CONNECTION_ERRORS',
- [NotImplementedError])
+ it 'skips retries when only the primary is used' do
+ allow(lb).to receive(:primary_only?).and_return(true)
+
+ expect(lb).not_to receive(:sleep)
+
+ expect { lb.retry_with_backoff { raise } }.to raise_error(RuntimeError)
end
+ end
+ describe '#connection_error?' do
it 'returns true for a connection error' do
- error = NotImplementedError.new
+ error = ActiveRecord::ConnectionNotEstablished.new
expect(lb.connection_error?(error)).to eq(true)
end
+ it 'returns false for a missing database error' do
+ error = ActiveRecord::NoDatabaseError.new
+
+ expect(lb.connection_error?(error)).to eq(false)
+ end
+
it 'returns true for a wrapped connection error' do
- wrapped = wrapped_exception(ActiveRecord::StatementInvalid, NotImplementedError)
+ wrapped = wrapped_exception(ActiveRecord::StatementInvalid, ActiveRecord::ConnectionNotEstablished)
expect(lb.connection_error?(wrapped)).to eq(true)
end
it 'returns true for a wrapped connection error from a view' do
- wrapped = wrapped_exception(ActionView::Template::Error, NotImplementedError)
+ wrapped = wrapped_exception(ActionView::Template::Error, ActiveRecord::ConnectionNotEstablished)
expect(lb.connection_error?(wrapped)).to eq(true)
end
it 'returns true for deeply wrapped/nested errors' do
- top = twice_wrapped_exception(ActionView::Template::Error, ActiveRecord::StatementInvalid, NotImplementedError)
+ top = twice_wrapped_exception(ActionView::Template::Error, ActiveRecord::StatementInvalid, ActiveRecord::ConnectionNotEstablished)
expect(lb.connection_error?(top)).to eq(true)
end
diff --git a/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb b/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb
index a0e63a7ee4e..45d81808971 100644
--- a/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb
@@ -63,9 +63,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::PrimaryHost do
end
describe '#primary_write_location' do
- it 'returns the write location of the primary' do
- expect(host.primary_write_location).to be_an_instance_of(String)
- expect(host.primary_write_location).not_to be_empty
+ it 'raises NotImplementedError' do
+ expect { host.primary_write_location }.to raise_error(NotImplementedError)
end
end
@@ -76,51 +75,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::PrimaryHost do
end
describe '#database_replica_location' do
- let(:connection) { double(:connection) }
-
- it 'returns the write ahead location of the replica', :aggregate_failures do
- expect(host)
- .to receive(:query_and_release)
- .and_return({ 'location' => '0/D525E3A8' })
-
- expect(host.database_replica_location).to be_an_instance_of(String)
- end
-
- it 'returns nil when the database query returned no rows' do
- expect(host).to receive(:query_and_release).and_return({})
-
- expect(host.database_replica_location).to be_nil
- end
-
- it 'returns nil when the database connection fails' do
- allow(host).to receive(:connection).and_raise(PG::Error)
-
- expect(host.database_replica_location).to be_nil
- end
- end
-
- describe '#query_and_release' do
- it 'executes a SQL query' do
- results = host.query_and_release('SELECT 10 AS number')
-
- expect(results).to be_an_instance_of(Hash)
- expect(results['number'].to_i).to eq(10)
- end
-
- it 'releases the connection after running the query' do
- expect(host)
- .to receive(:release_connection)
- .once
-
- host.query_and_release('SELECT 10 AS number')
- end
-
- it 'returns an empty Hash in the event of an error' do
- expect(host.connection)
- .to receive(:select_all)
- .and_raise(RuntimeError, 'kittens')
-
- expect(host.query_and_release('SELECT 10 AS number')).to eq({})
+ it 'raises NotImplementedError' do
+ expect { host.database_replica_location }.to raise_error(NotImplementedError)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
index ea0c7f781fd..af7e2a4b167 100644
--- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
@@ -6,12 +6,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
let(:warden_user) { double(:warden, user: double(:user, id: 42)) }
- let(:single_sticking_object) { Set.new([[:user, 42]]) }
+ let(:single_sticking_object) { Set.new([[ActiveRecord::Base, :user, 42]]) }
let(:multiple_sticking_objects) do
Set.new([
- [:user, 42],
- [:runner, '123456789'],
- [:runner, '1234']
+ [ActiveRecord::Base, :user, 42],
+ [ActiveRecord::Base, :runner, '123456789'],
+ [ActiveRecord::Base, :runner, '1234']
])
end
@@ -19,47 +19,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
Gitlab::Database::LoadBalancing::Session.clear_session
end
- describe '.stick_or_unstick' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
- .and_return(true)
- end
-
- it 'sticks or unsticks a single object and updates the Rack environment' do
- expect(Gitlab::Database::LoadBalancing::Sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
-
- env = {}
-
- described_class.stick_or_unstick(env, :user, 42)
-
- expect(env[described_class::STICK_OBJECT].to_a).to eq([[:user, 42]])
- end
-
- it 'sticks or unsticks multiple objects and updates the Rack environment' do
- expect(Gitlab::Database::LoadBalancing::Sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
- .ordered
-
- expect(Gitlab::Database::LoadBalancing::Sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:runner, '123456789')
- .ordered
-
- env = {}
-
- described_class.stick_or_unstick(env, :user, 42)
- described_class.stick_or_unstick(env, :runner, '123456789')
-
- expect(env[described_class::STICK_OBJECT].to_a).to eq([
- [:user, 42],
- [:runner, '123456789']
- ])
- end
- end
-
describe '#call' do
it 'handles a request' do
env = {}
@@ -82,7 +41,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
describe '#unstick_or_continue_sticking' do
it 'does not stick if no namespace and identifier could be found' do
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.not_to receive(:unstick_or_continue_sticking)
middleware.unstick_or_continue_sticking({})
@@ -91,9 +50,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'sticks to the primary if a warden user is found' do
env = { 'warden' => warden_user }
- expect(Gitlab::Database::LoadBalancing::Sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
+ Gitlab::Database::LoadBalancing.base_models.each do |model|
+ expect(model.sticking)
+ .to receive(:unstick_or_continue_sticking)
+ .with(:user, 42)
+ end
middleware.unstick_or_continue_sticking(env)
end
@@ -101,7 +62,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'sticks to the primary if a sticking namespace and identifier is found' do
env = { described_class::STICK_OBJECT => single_sticking_object }
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:unstick_or_continue_sticking)
.with(:user, 42)
@@ -111,17 +72,17 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do
env = { described_class::STICK_OBJECT => multiple_sticking_objects }
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:unstick_or_continue_sticking)
.with(:user, 42)
.ordered
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:unstick_or_continue_sticking)
.with(:runner, '123456789')
.ordered
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:unstick_or_continue_sticking)
.with(:runner, '1234')
.ordered
@@ -132,7 +93,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
describe '#stick_if_necessary' do
it 'does not stick to the primary if not necessary' do
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.not_to receive(:stick_if_necessary)
middleware.stick_if_necessary({})
@@ -141,9 +102,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'sticks to the primary if a warden user is found' do
env = { 'warden' => warden_user }
- expect(Gitlab::Database::LoadBalancing::Sticking)
- .to receive(:stick_if_necessary)
- .with(:user, 42)
+ Gitlab::Database::LoadBalancing.base_models.each do |model|
+ expect(model.sticking)
+ .to receive(:stick_if_necessary)
+ .with(:user, 42)
+ end
middleware.stick_if_necessary(env)
end
@@ -151,7 +114,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'sticks to the primary if a a single sticking object is found' do
env = { described_class::STICK_OBJECT => single_sticking_object }
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:stick_if_necessary)
.with(:user, 42)
@@ -161,17 +124,17 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do
env = { described_class::STICK_OBJECT => multiple_sticking_objects }
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:stick_if_necessary)
.with(:user, 42)
.ordered
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:stick_if_necessary)
.with(:runner, '123456789')
.ordered
- expect(Gitlab::Database::LoadBalancing::Sticking)
+ expect(ApplicationRecord.sticking)
.to receive(:stick_if_necessary)
.with(:runner, '1234')
.ordered
@@ -182,47 +145,34 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
describe '#clear' do
it 'clears the currently used host and session' do
- lb = double(:lb)
session = spy(:session)
- allow(middleware).to receive(:load_balancer).and_return(lb)
-
- expect(lb).to receive(:release_host)
-
stub_const('Gitlab::Database::LoadBalancing::Session', session)
+ expect(Gitlab::Database::LoadBalancing).to receive(:release_hosts)
+
middleware.clear
expect(session).to have_received(:clear_session)
end
end
- describe '.load_balancer' do
- it 'returns a the load balancer' do
- proxy = double(:proxy)
-
- expect(Gitlab::Database::LoadBalancing).to receive(:proxy)
- .and_return(proxy)
-
- expect(proxy).to receive(:load_balancer)
-
- middleware.load_balancer
- end
- end
-
- describe '#sticking_namespaces_and_ids' do
+ describe '#sticking_namespaces' do
context 'using a Warden request' do
it 'returns the warden user if present' do
env = { 'warden' => warden_user }
+ ids = Gitlab::Database::LoadBalancing.base_models.map do |model|
+ [model, :user, 42]
+ end
- expect(middleware.sticking_namespaces_and_ids(env)).to eq([[:user, 42]])
+ expect(middleware.sticking_namespaces(env)).to eq(ids)
end
it 'returns an empty Array if no user was present' do
warden = double(:warden, user: nil)
env = { 'warden' => warden }
- expect(middleware.sticking_namespaces_and_ids(env)).to eq([])
+ expect(middleware.sticking_namespaces(env)).to eq([])
end
end
@@ -230,17 +180,17 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'returns the sticking object' do
env = { described_class::STICK_OBJECT => multiple_sticking_objects }
- expect(middleware.sticking_namespaces_and_ids(env)).to eq([
- [:user, 42],
- [:runner, '123456789'],
- [:runner, '1234']
+ expect(middleware.sticking_namespaces(env)).to eq([
+ [ActiveRecord::Base, :user, 42],
+ [ActiveRecord::Base, :runner, '123456789'],
+ [ActiveRecord::Base, :runner, '1234']
])
end
end
context 'using a regular request' do
it 'returns an empty Array' do
- expect(middleware.sticking_namespaces_and_ids({})).to eq([])
+ expect(middleware.sticking_namespaces({})).to eq([])
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/setup_spec.rb b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
new file mode 100644
index 00000000000..01646bc76ef
--- /dev/null
+++ b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LoadBalancing::Setup do
+ describe '#setup' do
+ it 'sets up the load balancer' do
+ setup = described_class.new(ActiveRecord::Base)
+
+ expect(setup).to receive(:disable_prepared_statements)
+ expect(setup).to receive(:setup_load_balancer)
+ expect(setup).to receive(:setup_service_discovery)
+
+ setup.setup
+ end
+ end
+
+ describe '#disable_prepared_statements' do
+ it 'disables prepared statements and reconnects to the database' do
+ config = double(
+ :config,
+ configuration_hash: { host: 'localhost' },
+ env_name: 'test',
+ name: 'main'
+ )
+ model = double(:model, connection_db_config: config)
+
+ expect(ActiveRecord::DatabaseConfigurations::HashConfig)
+ .to receive(:new)
+ .with('test', 'main', { host: 'localhost', prepared_statements: false })
+ .and_call_original
+
+ # HashConfig doesn't implement its own #==, so we can't directly compare
+ # the expected value with a pre-defined one.
+ expect(model)
+ .to receive(:establish_connection)
+ .with(an_instance_of(ActiveRecord::DatabaseConfigurations::HashConfig))
+
+ described_class.new(model).disable_prepared_statements
+ end
+ end
+
+ describe '#setup_load_balancer' do
+ it 'sets up the load balancer' do
+ model = Class.new(ActiveRecord::Base)
+ setup = described_class.new(model)
+ config = Gitlab::Database::LoadBalancing::Configuration.new(model)
+ lb = instance_spy(Gitlab::Database::LoadBalancing::LoadBalancer)
+
+ allow(lb).to receive(:configuration).and_return(config)
+
+ expect(Gitlab::Database::LoadBalancing::LoadBalancer)
+ .to receive(:new)
+ .with(setup.configuration)
+ .and_return(lb)
+
+ setup.setup_load_balancer
+
+ expect(model.connection.load_balancer).to eq(lb)
+ expect(model.sticking)
+ .to be_an_instance_of(Gitlab::Database::LoadBalancing::Sticking)
+ end
+ end
+
+ describe '#setup_service_discovery' do
+ context 'when service discovery is disabled' do
+ it 'does nothing' do
+ expect(Gitlab::Database::LoadBalancing::ServiceDiscovery)
+ .not_to receive(:new)
+
+ described_class.new(ActiveRecord::Base).setup_service_discovery
+ end
+ end
+
+ context 'when service discovery is enabled' do
+ it 'immediately performs service discovery' do
+ model = ActiveRecord::Base
+ setup = described_class.new(model)
+ sv = instance_spy(Gitlab::Database::LoadBalancing::ServiceDiscovery)
+ lb = model.connection.load_balancer
+
+ allow(setup.configuration)
+ .to receive(:service_discovery_enabled?)
+ .and_return(true)
+
+ allow(Gitlab::Database::LoadBalancing::ServiceDiscovery)
+ .to receive(:new)
+ .with(lb, setup.configuration.service_discovery)
+ .and_return(sv)
+
+ expect(sv).to receive(:perform_service_discovery)
+ expect(sv).not_to receive(:start)
+
+ setup.setup_service_discovery
+ end
+
+ it 'starts service discovery if needed' do
+ model = ActiveRecord::Base
+ setup = described_class.new(model, start_service_discovery: true)
+ sv = instance_spy(Gitlab::Database::LoadBalancing::ServiceDiscovery)
+ lb = model.connection.load_balancer
+
+ allow(setup.configuration)
+ .to receive(:service_discovery_enabled?)
+ .and_return(true)
+
+ allow(Gitlab::Database::LoadBalancing::ServiceDiscovery)
+ .to receive(:new)
+ .with(lb, setup.configuration.service_discovery)
+ .and_return(sv)
+
+ expect(sv).to receive(:perform_service_discovery)
+ expect(sv).to receive(:start)
+
+ setup.setup_service_discovery
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
index f683ade978a..08dd6a0a788 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
@@ -5,14 +5,12 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:middleware) { described_class.new }
- let(:load_balancer) { double.as_null_object }
let(:worker_class) { 'TestDataConsistencyWorker' }
let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
before do
skip_feature_flags_yaml_validation
skip_default_enabled_yaml_check
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
end
after do
@@ -23,7 +21,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
middleware.call(worker_class, job, nil, nil) {}
end
- describe '#call' do
+ describe '#call', :database_replica do
shared_context 'data consistency worker class' do |data_consistency, feature_flag|
let(:expected_consistency) { data_consistency }
let(:worker_class) do
@@ -85,9 +83,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes database_replica_location' do
- expected_location = { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location }
+ expected_location = {}
- expect(load_balancer).to receive_message_chain(:host, "database_replica_location").and_return(location)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ expect(lb.host)
+ .to receive(:database_replica_location)
+ .and_return(location)
+
+ expected_location[lb.name] = location
+ end
run_middleware
@@ -103,9 +107,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes primary write location', :aggregate_failures do
- expected_location = { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location }
+ expected_location = {}
- expect(load_balancer).to receive(:primary_write_location).and_return(location)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ expect(lb)
+ .to receive(:primary_write_location)
+ .and_return(location)
+
+ expected_location[lb.name] = location
+ end
run_middleware
@@ -137,8 +147,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } }
before do
- allow(load_balancer).to receive(:primary_write_location).and_return(new_location)
- allow(load_balancer).to receive(:database_replica_location).and_return(new_location)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ allow(lb).to receive(:primary_write_location).and_return(new_location)
+ allow(lb).to receive(:database_replica_location).and_return(new_location)
+ end
end
shared_examples_for 'does not set database location again' do |use_primary|
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index 9f23eb0094f..06efdcd8f99 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -2,20 +2,17 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
+RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues do
let(:middleware) { described_class.new }
-
- let(:load_balancer) { double.as_null_object }
-
let(:worker) { worker_class.new }
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } }
before do
skip_feature_flags_yaml_validation
skip_default_enabled_yaml_check
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
replication_lag!(false)
+ Gitlab::Database::LoadBalancing::Session.clear_session
end
after do
@@ -67,7 +64,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } }
it 'does not stick to the primary', :aggregate_failures do
- expect(load_balancer).to receive(:select_up_to_date_host).with(location).and_return(true)
+ expect(ActiveRecord::Base.connection.load_balancer)
+ .to receive(:select_up_to_date_host)
+ .with(location)
+ .and_return(true)
run_middleware do
expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).not_to be_truthy
@@ -92,7 +92,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'wal_locations' => wal_locations } }
before do
- allow(load_balancer).to receive(:select_up_to_date_host).with(location).and_return(true)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ allow(lb)
+ .to receive(:select_up_to_date_host)
+ .with(location)
+ .and_return(true)
+ end
end
it_behaves_like 'replica is up to date', 'replica'
@@ -102,7 +107,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'dedup_wal_locations' => wal_locations } }
before do
- allow(load_balancer).to receive(:select_up_to_date_host).with(wal_locations[:main]).and_return(true)
+ allow(ActiveRecord::Base.connection.load_balancer)
+ .to receive(:select_up_to_date_host)
+ .with(wal_locations[:main])
+ .and_return(true)
end
it_behaves_like 'replica is up to date', 'replica'
@@ -112,7 +120,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_write_location' => '0/D525E3A8' } }
before do
- allow(load_balancer).to receive(:select_up_to_date_host).with('0/D525E3A8').and_return(true)
+ allow(ActiveRecord::Base.connection.load_balancer)
+ .to receive(:select_up_to_date_host)
+ .with('0/D525E3A8')
+ .and_return(true)
end
it_behaves_like 'replica is up to date', 'replica'
@@ -158,18 +169,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
process_job(job)
end.to raise_error(Sidekiq::JobRetry::Skip)
- expect(job['error_class']).to eq('Gitlab::Database::LoadBalancing::SidekiqServerMiddleware::JobReplicaNotUpToDate')
+ job_for_retry = Sidekiq::RetrySet.new.first
+ expect(job_for_retry['error_class']).to eq('Gitlab::Database::LoadBalancing::SidekiqServerMiddleware::JobReplicaNotUpToDate')
end
include_examples 'load balancing strategy', 'retry'
end
context 'when job is retried' do
- before do
- expect do
- process_job(job)
- end.to raise_error(Sidekiq::JobRetry::Skip)
- end
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8', 'retry_count' => 0 } }
context 'and replica still lagging behind' do
include_examples 'stick to the primary', 'primary'
@@ -191,7 +199,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
context 'when replica is not up to date' do
before do
- allow(load_balancer).to receive(:select_up_to_date_host).and_return(false)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ allow(lb).to receive(:select_up_to_date_host).and_return(false)
+ end
end
include_examples 'stick to the primary', 'primary'
@@ -199,8 +209,47 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
end
end
+ describe '#databases_in_sync?' do
+ it 'treats load balancers without WAL entries as in sync' do
+ expect(middleware.send(:databases_in_sync?, {}))
+ .to eq(true)
+ end
+
+ it 'returns true when all load balancers are in sync' do
+ locations = {}
+
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ locations[lb.name] = 'foo'
+
+ expect(lb)
+ .to receive(:select_up_to_date_host)
+ .with('foo')
+ .and_return(true)
+ end
+
+ expect(middleware.send(:databases_in_sync?, locations))
+ .to eq(true)
+ end
+
+ it 'returns false when the load balancers are not in sync' do
+ locations = {}
+
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ locations[lb.name] = 'foo'
+
+ allow(lb)
+ .to receive(:select_up_to_date_host)
+ .with('foo')
+ .and_return(false)
+ end
+
+ expect(middleware.send(:databases_in_sync?, locations))
+ .to eq(false)
+ end
+ end
+
def process_job(job)
- Sidekiq::JobRetry.new.local(worker_class, job, 'default') do
+ Sidekiq::JobRetry.new.local(worker_class, job.to_json, 'default') do
worker_class.process_job(job)
end
end
@@ -212,6 +261,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
end
def replication_lag!(exists)
- allow(load_balancer).to receive(:select_up_to_date_host).and_return(!exists)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ allow(lb).to receive(:select_up_to_date_host).and_return(!exists)
+ end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index cf52e59db3a..8ceda52ee85 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -3,55 +3,82 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
+ let(:sticking) do
+ described_class.new(ActiveRecord::Base.connection.load_balancer)
+ end
+
after do
Gitlab::Database::LoadBalancing::Session.clear_session
end
- describe '.stick_if_necessary' do
- context 'when sticking is disabled' do
- it 'does not perform any sticking' do
- expect(described_class).not_to receive(:stick)
+ describe '#stick_or_unstick_request' do
+ it 'sticks or unsticks a single object and updates the Rack environment' do
+ expect(sticking)
+ .to receive(:unstick_or_continue_sticking)
+ .with(:user, 42)
- described_class.stick_if_necessary(:user, 42)
- end
+ env = {}
+
+ sticking.stick_or_unstick_request(env, :user, 42)
+
+ expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a)
+ .to eq([[ActiveRecord::Base, :user, 42]])
end
- context 'when sticking is enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
- .and_return(true)
- end
+ it 'sticks or unsticks multiple objects and updates the Rack environment' do
+ expect(sticking)
+ .to receive(:unstick_or_continue_sticking)
+ .with(:user, 42)
+ .ordered
- it 'does not stick if no write was performed' do
- allow(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:performed_write?)
- .and_return(false)
+ expect(sticking)
+ .to receive(:unstick_or_continue_sticking)
+ .with(:runner, '123456789')
+ .ordered
- expect(described_class).not_to receive(:stick)
+ env = {}
- described_class.stick_if_necessary(:user, 42)
- end
+ sticking.stick_or_unstick_request(env, :user, 42)
+ sticking.stick_or_unstick_request(env, :runner, '123456789')
- it 'sticks to the primary if a write was performed' do
- allow(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:performed_write?)
- .and_return(true)
+ expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq([
+ [ActiveRecord::Base, :user, 42],
+ [ActiveRecord::Base, :runner, '123456789']
+ ])
+ end
+ end
- expect(described_class).to receive(:stick).with(:user, 42)
+ describe '#stick_if_necessary' do
+ it 'does not stick if no write was performed' do
+ allow(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:performed_write?)
+ .and_return(false)
- described_class.stick_if_necessary(:user, 42)
- end
+ expect(sticking).not_to receive(:stick)
+
+ sticking.stick_if_necessary(:user, 42)
+ end
+
+ it 'sticks to the primary if a write was performed' do
+ allow(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:performed_write?)
+ .and_return(true)
+
+ expect(sticking)
+ .to receive(:stick)
+ .with(:user, 42)
+
+ sticking.stick_if_necessary(:user, 42)
end
end
- describe '.all_caught_up?' do
- let(:lb) { double(:lb) }
+ describe '#all_caught_up?' do
+ let(:lb) { ActiveRecord::Base.connection.load_balancer }
let(:last_write_location) { 'foo' }
before do
- allow(described_class).to receive(:load_balancer).and_return(lb)
-
- allow(described_class).to receive(:last_write_location_for)
+ allow(sticking)
+ .to receive(:last_write_location_for)
.with(:user, 42)
.and_return(last_write_location)
end
@@ -60,13 +87,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
let(:last_write_location) { nil }
it 'returns true' do
- allow(described_class).to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return(nil)
-
expect(lb).not_to receive(:select_up_to_date_host)
- expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ expect(sticking.all_caught_up?(:user, 42)).to eq(true)
end
end
@@ -76,9 +99,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
end
it 'returns true, and unsticks' do
- expect(described_class).to receive(:unstick).with(:user, 42)
+ expect(sticking)
+ .to receive(:unstick)
+ .with(:user, 42)
- expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ expect(sticking.all_caught_up?(:user, 42)).to eq(true)
end
it 'notifies with the proper event payload' do
@@ -87,7 +112,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with('caught_up_replica_pick.load_balancing', { result: true })
.and_call_original
- described_class.all_caught_up?(:user, 42)
+ sticking.all_caught_up?(:user, 42)
end
end
@@ -97,7 +122,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
end
it 'returns false' do
- expect(described_class.all_caught_up?(:user, 42)).to eq(false)
+ expect(sticking.all_caught_up?(:user, 42)).to eq(false)
end
it 'notifies with the proper event payload' do
@@ -106,42 +131,43 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with('caught_up_replica_pick.load_balancing', { result: false })
.and_call_original
- described_class.all_caught_up?(:user, 42)
+ sticking.all_caught_up?(:user, 42)
end
end
end
- describe '.unstick_or_continue_sticking' do
- let(:lb) { double(:lb) }
-
- before do
- allow(described_class).to receive(:load_balancer).and_return(lb)
- end
+ describe '#unstick_or_continue_sticking' do
+ let(:lb) { ActiveRecord::Base.connection.load_balancer }
it 'simply returns if no write location could be found' do
- allow(described_class).to receive(:last_write_location_for)
+ allow(sticking)
+ .to receive(:last_write_location_for)
.with(:user, 42)
.and_return(nil)
expect(lb).not_to receive(:select_up_to_date_host)
- described_class.unstick_or_continue_sticking(:user, 42)
+ sticking.unstick_or_continue_sticking(:user, 42)
end
it 'unsticks if all secondaries have caught up' do
- allow(described_class).to receive(:last_write_location_for)
+ allow(sticking)
+ .to receive(:last_write_location_for)
.with(:user, 42)
.and_return('foo')
allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
- expect(described_class).to receive(:unstick).with(:user, 42)
+ expect(sticking)
+ .to receive(:unstick)
+ .with(:user, 42)
- described_class.unstick_or_continue_sticking(:user, 42)
+ sticking.unstick_or_continue_sticking(:user, 42)
end
it 'continues using the primary if the secondaries have not yet caught up' do
- allow(described_class).to receive(:last_write_location_for)
+ allow(sticking)
+ .to receive(:last_write_location_for)
.with(:user, 42)
.and_return('foo')
@@ -150,184 +176,151 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
expect(Gitlab::Database::LoadBalancing::Session.current)
.to receive(:use_primary!)
- described_class.unstick_or_continue_sticking(:user, 42)
+ sticking.unstick_or_continue_sticking(:user, 42)
end
end
RSpec.shared_examples 'sticking' do
- context 'when sticking is disabled' do
- it 'does not perform any sticking', :aggregate_failures do
- expect(described_class).not_to receive(:set_write_location_for)
- expect(Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_primary!)
-
- described_class.bulk_stick(:user, ids)
- end
+ before do
+ allow(ActiveRecord::Base.connection.load_balancer)
+ .to receive(:primary_write_location)
+ .and_return('foo')
end
- context 'when sticking is enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(true)
-
- lb = double(:lb, primary_write_location: 'foo')
+ it 'sticks an entity to the primary', :aggregate_failures do
+ allow(ActiveRecord::Base.connection.load_balancer)
+ .to receive(:primary_only?)
+ .and_return(false)
- allow(described_class).to receive(:load_balancer).and_return(lb)
+ ids.each do |id|
+ expect(sticking)
+ .to receive(:set_write_location_for)
+ .with(:user, id, 'foo')
end
- it 'sticks an entity to the primary', :aggregate_failures do
- ids.each do |id|
- expect(described_class).to receive(:set_write_location_for)
- .with(:user, id, 'foo')
- end
+ expect(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:use_primary!)
- expect(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:use_primary!)
+ subject
+ end
- subject
- end
+ it 'does not update the write location when no replicas are used' do
+ expect(sticking).not_to receive(:set_write_location_for)
+
+ subject
end
end
- describe '.stick' do
+ describe '#stick' do
it_behaves_like 'sticking' do
let(:ids) { [42] }
- subject { described_class.stick(:user, ids.first) }
+ subject { sticking.stick(:user, ids.first) }
end
end
- describe '.bulk_stick' do
+ describe '#bulk_stick' do
it_behaves_like 'sticking' do
let(:ids) { [42, 43] }
- subject { described_class.bulk_stick(:user, ids) }
+ subject { sticking.bulk_stick(:user, ids) }
end
end
- describe '.mark_primary_write_location' do
- context 'when enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(true)
- end
-
- it 'updates the write location with the load balancer' do
- lb = double(:lb, primary_write_location: 'foo')
+ describe '#mark_primary_write_location' do
+ it 'updates the write location with the load balancer' do
+ allow(ActiveRecord::Base.connection.load_balancer)
+ .to receive(:primary_write_location)
+ .and_return('foo')
- allow(described_class).to receive(:load_balancer).and_return(lb)
+ allow(ActiveRecord::Base.connection.load_balancer)
+ .to receive(:primary_only?)
+ .and_return(false)
- expect(described_class).to receive(:set_write_location_for)
- .with(:user, 42, 'foo')
+ expect(sticking)
+ .to receive(:set_write_location_for)
+ .with(:user, 42, 'foo')
- described_class.mark_primary_write_location(:user, 42)
- end
+ sticking.mark_primary_write_location(:user, 42)
end
- context 'when load balancing is configured but not enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
- allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(true)
- end
-
- it 'updates the write location with the main ActiveRecord connection' do
- allow(described_class).to receive(:load_balancer).and_return(nil)
- expect(ActiveRecord::Base).to receive(:connection).and_call_original
- expect(described_class).to receive(:set_write_location_for)
- .with(:user, 42, anything)
+ it 'does nothing when no replicas are used' do
+ expect(sticking).not_to receive(:set_write_location_for)
- described_class.mark_primary_write_location(:user, 42)
- end
-
- context 'when write location is nil' do
- before do
- allow(Gitlab::Database.main).to receive(:get_write_location).and_return(nil)
- end
+ sticking.mark_primary_write_location(:user, 42)
+ end
+ end
- it 'does not update the write location' do
- expect(described_class).not_to receive(:set_write_location_for)
+ describe '#unstick' do
+ it 'removes the sticking data from Redis' do
+ sticking.set_write_location_for(:user, 4, 'foo')
+ sticking.unstick(:user, 4)
- described_class.mark_primary_write_location(:user, 42)
- end
- end
+ expect(sticking.last_write_location_for(:user, 4)).to be_nil
end
- context 'when load balancing is disabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
- allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(false)
+ it 'removes the old key' do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(sticking.send(:old_redis_key_for, :user, 4), 'foo', ex: 30)
end
- it 'updates the write location with the main ActiveRecord connection' do
- expect(described_class).not_to receive(:set_write_location_for)
-
- described_class.mark_primary_write_location(:user, 42)
- end
+ sticking.unstick(:user, 4)
+ expect(sticking.last_write_location_for(:user, 4)).to be_nil
end
end
- describe '.unstick' do
- it 'removes the sticking data from Redis' do
- described_class.set_write_location_for(:user, 4, 'foo')
- described_class.unstick(:user, 4)
+ describe '#last_write_location_for' do
+ it 'returns the last WAL write location for a user' do
+ sticking.set_write_location_for(:user, 4, 'foo')
- expect(described_class.last_write_location_for(:user, 4)).to be_nil
+ expect(sticking.last_write_location_for(:user, 4)).to eq('foo')
end
- end
- describe '.last_write_location_for' do
- it 'returns the last WAL write location for a user' do
- described_class.set_write_location_for(:user, 4, 'foo')
+ it 'falls back to reading the old key' do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(sticking.send(:old_redis_key_for, :user, 4), 'foo', ex: 30)
+ end
- expect(described_class.last_write_location_for(:user, 4)).to eq('foo')
+ expect(sticking.last_write_location_for(:user, 4)).to eq('foo')
end
end
- describe '.redis_key_for' do
+ describe '#redis_key_for' do
it 'returns a String' do
- expect(described_class.redis_key_for(:user, 42))
- .to eq('database-load-balancing/write-location/user/42')
+ expect(sticking.redis_key_for(:user, 42))
+ .to eq('database-load-balancing/write-location/main/user/42')
end
end
- describe '.load_balancer' do
- it 'returns a the load balancer' do
- proxy = double(:proxy)
-
- expect(Gitlab::Database::LoadBalancing).to receive(:proxy)
- .and_return(proxy)
-
- expect(proxy).to receive(:load_balancer)
-
- described_class.load_balancer
- end
- end
-
- describe '.select_caught_up_replicas' do
- let(:lb) { double(:lb) }
-
- before do
- allow(described_class).to receive(:load_balancer).and_return(lb)
- end
+ describe '#select_caught_up_replicas' do
+ let(:lb) { ActiveRecord::Base.connection.load_balancer }
context 'with no write location' do
before do
- allow(described_class).to receive(:last_write_location_for)
- .with(:project, 42).and_return(nil)
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:project, 42)
+ .and_return(nil)
end
it 'returns false and does not try to find caught up hosts' do
expect(lb).not_to receive(:select_up_to_date_host)
- expect(described_class.select_caught_up_replicas(:project, 42)).to be false
+ expect(sticking.select_caught_up_replicas(:project, 42)).to be false
end
end
context 'with write location' do
before do
- allow(described_class).to receive(:last_write_location_for)
- .with(:project, 42).and_return('foo')
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:project, 42)
+ .and_return('foo')
end
it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
expect(lb).to receive(:select_up_to_date_host).and_return(true)
- expect(described_class).to receive(:unstick).with(:project, 42)
- expect(described_class.select_caught_up_replicas(:project, 42)).to be true
+ expect(sticking)
+ .to receive(:unstick)
+ .with(:project, 42)
+ expect(sticking.select_caught_up_replicas(:project, 42)).to be true
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index f40ad444081..bf5314e2c34 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -3,203 +3,52 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing do
- describe '.proxy' do
- before do
- @previous_proxy = ActiveRecord::Base.load_balancing_proxy
+ describe '.base_models' do
+ it 'returns the models to apply load balancing to' do
+ models = described_class.base_models
- ActiveRecord::Base.load_balancing_proxy = connection_proxy
- end
-
- after do
- ActiveRecord::Base.load_balancing_proxy = @previous_proxy
- end
-
- context 'when configured' do
- let(:connection_proxy) { double(:connection_proxy) }
-
- it 'returns the connection proxy' do
- expect(subject.proxy).to eq(connection_proxy)
- end
- end
-
- context 'when not configured' do
- let(:connection_proxy) { nil }
+ expect(models).to include(ActiveRecord::Base)
- it 'returns nil' do
- expect(subject.proxy).to be_nil
+ if Gitlab::Database.has_config?(:ci)
+ expect(models).to include(Ci::CiDatabaseRecord)
end
-
- it 'tracks an error to sentry' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- an_instance_of(subject::ProxyNotConfiguredError)
- )
-
- subject.proxy
- end
- end
- end
-
- describe '.configuration' do
- it 'returns the configuration for the load balancer' do
- raw = ActiveRecord::Base.connection_db_config.configuration_hash
- cfg = described_class.configuration
-
- # There isn't much to test here as the load balancing settings might not
- # (and likely aren't) set when running tests.
- expect(cfg.pool_size).to eq(raw[:pool])
- end
- end
-
- describe '.enable?' do
- before do
- allow(described_class.configuration)
- .to receive(:hosts)
- .and_return(%w(foo))
- end
-
- it 'returns false when no hosts are specified' do
- allow(described_class.configuration).to receive(:hosts).and_return([])
-
- expect(described_class.enable?).to eq(false)
- end
-
- it 'returns true when Sidekiq is being used' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
-
- expect(described_class.enable?).to eq(true)
- end
-
- it 'returns false when running inside a Rake task' do
- allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
-
- expect(described_class.enable?).to eq(false)
- end
-
- it 'returns true when load balancing should be enabled' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(false)
-
- expect(described_class.enable?).to eq(true)
end
- it 'returns true when service discovery is enabled' do
- allow(described_class.configuration).to receive(:hosts).and_return([])
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(false)
-
- allow(described_class.configuration)
- .to receive(:service_discovery_enabled?)
- .and_return(true)
-
- expect(described_class.enable?).to eq(true)
+ it 'returns the models as a frozen array' do
+ expect(described_class.base_models).to be_frozen
end
end
- describe '.configured?' do
- it 'returns true when hosts are configured' do
- allow(described_class.configuration)
- .to receive(:hosts)
- .and_return(%w[foo])
-
- expect(described_class.configured?).to eq(true)
- end
+ describe '.each_load_balancer' do
+ it 'yields every load balancer to the supplied block' do
+ lbs = []
- it 'returns true when service discovery is enabled' do
- allow(described_class.configuration).to receive(:hosts).and_return([])
- allow(described_class.configuration)
- .to receive(:service_discovery_enabled?)
- .and_return(true)
+ described_class.each_load_balancer do |lb|
+ lbs << lb
+ end
- expect(described_class.configured?).to eq(true)
+ expect(lbs.length).to eq(described_class.base_models.length)
end
- it 'returns false when neither service discovery nor hosts are configured' do
- allow(described_class.configuration).to receive(:hosts).and_return([])
- allow(described_class.configuration)
- .to receive(:service_discovery_enabled?)
- .and_return(false)
+ it 'returns an Enumerator when no block is given' do
+ res = described_class.each_load_balancer
- expect(described_class.configured?).to eq(false)
+ expect(res.next)
+ .to be_an_instance_of(Gitlab::Database::LoadBalancing::LoadBalancer)
end
end
- describe '.configure_proxy' do
- before do
- allow(ActiveRecord::Base).to receive(:load_balancing_proxy=)
- end
-
- it 'configures the connection proxy' do
- described_class.configure_proxy
-
- expect(ActiveRecord::Base).to have_received(:load_balancing_proxy=)
- .with(Gitlab::Database::LoadBalancing::ConnectionProxy)
- end
-
- context 'when service discovery is enabled' do
- it 'runs initial service discovery when configuring the connection proxy' do
- discover = instance_spy(Gitlab::Database::LoadBalancing::ServiceDiscovery)
-
- allow(described_class.configuration)
- .to receive(:service_discovery)
- .and_return({ record: 'foo' })
-
- expect(Gitlab::Database::LoadBalancing::ServiceDiscovery)
- .to receive(:new)
- .with(
- an_instance_of(Gitlab::Database::LoadBalancing::LoadBalancer),
- an_instance_of(Hash)
- )
- .and_return(discover)
-
- expect(discover).to receive(:perform_service_discovery)
-
- described_class.configure_proxy
+ describe '.release_hosts' do
+ it 'releases the host of every load balancer' do
+ described_class.each_load_balancer do |lb|
+ expect(lb).to receive(:release_host)
end
- end
- end
-
- describe '.start_service_discovery' do
- it 'does not start if service discovery is disabled' do
- expect(Gitlab::Database::LoadBalancing::ServiceDiscovery)
- .not_to receive(:new)
- described_class.start_service_discovery
- end
-
- it 'starts service discovery if enabled' do
- allow(described_class.configuration)
- .to receive(:service_discovery_enabled?)
- .and_return(true)
-
- instance = double(:instance)
- config = Gitlab::Database::LoadBalancing::Configuration
- .new(ActiveRecord::Base)
- lb = Gitlab::Database::LoadBalancing::LoadBalancer.new(config)
- proxy = Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
-
- allow(described_class)
- .to receive(:proxy)
- .and_return(proxy)
-
- expect(Gitlab::Database::LoadBalancing::ServiceDiscovery)
- .to receive(:new)
- .with(lb, an_instance_of(Hash))
- .and_return(instance)
-
- expect(instance)
- .to receive(:start)
-
- described_class.start_service_discovery
+ described_class.release_hosts
end
end
describe '.db_role_for_connection' do
- context 'when the load balancing is not configured' do
- let(:connection) { ActiveRecord::Base.connection }
-
- it 'returns primary' do
- expect(described_class.db_role_for_connection(connection)).to eq(:primary)
- end
- end
-
context 'when the NullPool is used for connection' do
let(:pool) { ActiveRecord::ConnectionAdapters::NullPool.new }
let(:connection) { double(:connection, pool: pool) }
@@ -253,7 +102,7 @@ RSpec.describe Gitlab::Database::LoadBalancing do
# - In each test, we listen to the SQL queries (via sql.active_record
# instrumentation) while triggering real queries from the defined model.
# - We assert the desinations (replica/primary) of the queries in order.
- describe 'LoadBalancing integration tests', :db_load_balancing, :delete do
+ describe 'LoadBalancing integration tests', :database_replica, :delete do
before(:all) do
ActiveRecord::Schema.define do
create_table :load_balancing_test, force: true do |t|
@@ -274,10 +123,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
end
- before do
- model.singleton_class.prepend ::Gitlab::Database::LoadBalancing::ActiveRecordProxy
- end
-
where(:queries, :include_transaction, :expected_results) do
[
# Read methods
diff --git a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
index 708d1be6e00..54b3ad22faf 100644
--- a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
@@ -19,6 +19,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do
end
end
+ after(:all) do
+ migration.drop_table :loose_fk_test_table
+ end
+
before do
3.times { model.create! }
end
@@ -45,8 +49,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do
expect(LooseForeignKeys::DeletedRecord.count).to eq(1)
deleted_record = LooseForeignKeys::DeletedRecord.all.first
- expect(deleted_record.deleted_table_primary_key_value).to eq(record_to_be_deleted.id)
- expect(deleted_record.deleted_table_name).to eq('loose_fk_test_table')
+ expect(deleted_record.primary_key_value).to eq(record_to_be_deleted.id)
+ expect(deleted_record.fully_qualified_table_name).to eq('public.loose_fk_test_table')
+ expect(deleted_record.partition).to eq(1)
end
it 'stores multiple record deletions' do
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 006f8a39f9c..d89af1521a2 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1631,10 +1631,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:worker) do
Class.new do
include Sidekiq::Worker
+
sidekiq_options queue: 'test'
+
+ def self.name
+ 'WorkerClass'
+ end
end
end
+ before do
+ stub_const(worker.name, worker)
+ end
+
describe '#sidekiq_queue_length' do
context 'when queue is empty' do
it 'returns zero' do
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index 5945e5a2039..841d2a98a16 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -2,8 +2,13 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Instrumentation do
+ let(:result_dir) { Dir.mktmpdir }
+
+ after do
+ FileUtils.rm_rf(result_dir)
+ end
describe '#observe' do
- subject { described_class.new }
+ subject { described_class.new(result_dir: result_dir) }
let(:migration_name) { 'test' }
let(:migration_version) { '12345' }
@@ -13,7 +18,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'behavior with observers' do
- subject { described_class.new([Gitlab::Database::Migrations::Observers::MigrationObserver]).observe(version: migration_version, name: migration_name) {} }
+ subject { described_class.new(observer_classes: [Gitlab::Database::Migrations::Observers::MigrationObserver], result_dir: result_dir).observe(version: migration_version, name: migration_name) {} }
let(:observer) { instance_double('Gitlab::Database::Migrations::Observers::MigrationObserver', before: nil, after: nil, record: nil) }
@@ -24,7 +29,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
it 'instantiates observer with observation' do
expect(Gitlab::Database::Migrations::Observers::MigrationObserver)
.to receive(:new)
- .with(instance_of(Gitlab::Database::Migrations::Observation)) { |observation| expect(observation.version).to eq(migration_version) }
+ .with(instance_of(Gitlab::Database::Migrations::Observation), anything) { |observation| expect(observation.version).to eq(migration_version) }
.and_return(observer)
subject
@@ -58,7 +63,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'on successful execution' do
- subject { described_class.new.observe(version: migration_version, name: migration_name) {} }
+ subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name) {} }
it 'records walltime' do
expect(subject.walltime).not_to be_nil
@@ -78,7 +83,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'upon failure' do
- subject { described_class.new.observe(version: migration_version, name: migration_name) { raise 'something went wrong' } }
+ subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name) { raise 'something went wrong' } }
it 'raises the exception' do
expect { subject }.to raise_error(/something went wrong/)
@@ -93,7 +98,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
# ignore
end
- let(:instance) { described_class.new }
+ let(:instance) { described_class.new(result_dir: result_dir) }
it 'records walltime' do
expect(subject.walltime).not_to be_nil
@@ -114,7 +119,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'sequence of migrations with failures' do
- subject { described_class.new }
+ subject { described_class.new(result_dir: result_dir) }
let(:migration1) { double('migration1', call: nil) }
let(:migration2) { double('migration2', call: nil) }
diff --git a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
index 36885a1594f..191ac29e3b3 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do
- subject { described_class.new(observation) }
+ subject { described_class.new(observation, directory_path) }
let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) }
let(:connection) { ActiveRecord::Base.connection }
@@ -14,10 +14,6 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do
let(:migration_version) { 20210422152437 }
let(:migration_name) { 'test' }
- before do
- stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory_path)
- end
-
after do
FileUtils.remove_entry(directory_path)
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
index 2a49d8e8b73..2e70a85fd5b 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
- subject { described_class.new(observation) }
+ subject { described_class.new(observation, directory_path) }
let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) }
let(:connection) { ActiveRecord::Base.connection }
@@ -11,10 +11,6 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
let(:migration_version) { 20210422152437 }
let(:migration_name) { 'test' }
- before do
- stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory_path)
- end
-
after do
FileUtils.remove_entry(directory_path)
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
index 32a25fdaa28..9727a215d71 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
- subject { described_class.new(observation) }
+ subject { described_class.new(observation, double("unused path")) }
let(:observation) { Gitlab::Database::Migrations::Observation.new }
let(:connection) { ActiveRecord::Base.connection }
diff --git a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
index 61e28003e66..e689759c574 100644
--- a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::TotalDatabaseSizeChange do
- subject { described_class.new(observation) }
+ subject { described_class.new(observation, double('unused path')) }
let(:observation) { Gitlab::Database::Migrations::Observation.new }
let(:connection) { ActiveRecord::Base.connection }
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
new file mode 100644
index 00000000000..52fb5ec2ba8
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::Runner do
+ let(:result_dir) { Pathname.new(Dir.mktmpdir) }
+
+ let(:migration_runs) { [] } # This list gets populated as the runner tries to run migrations
+
+ # Tests depend on all of these lists being sorted in the order migrations would be applied
+ let(:applied_migrations_other_branches) { [double(ActiveRecord::Migration, version: 1, name: 'migration_complete_other_branch')] }
+
+ let(:applied_migrations_this_branch) do
+ [
+ double(ActiveRecord::Migration, version: 2, name: 'older_migration_complete_this_branch'),
+ double(ActiveRecord::Migration, version: 3, name: 'newer_migration_complete_this_branch')
+ ].sort_by(&:version)
+ end
+
+ let(:pending_migrations) do
+ [
+ double(ActiveRecord::Migration, version: 4, name: 'older_migration_pending'),
+ double(ActiveRecord::Migration, version: 5, name: 'newer_migration_pending')
+ ].sort_by(&:version)
+ end
+
+ before do
+ stub_const('Gitlab::Database::Migrations::Runner::BASE_RESULT_DIR', result_dir)
+ allow(ActiveRecord::Migrator).to receive(:new) do |dir, _all_migrations, _schema_migration_class, version_to_migrate|
+ migrator = double(ActiveRecord::Migrator)
+ expect(migrator).to receive(:run) do
+ migration_runs << OpenStruct.new(dir: dir, version_to_migrate: version_to_migrate)
+ end
+ migrator
+ end
+
+ all_versions = (applied_migrations_other_branches + applied_migrations_this_branch).map(&:version)
+ migrations = applied_migrations_other_branches + applied_migrations_this_branch + pending_migrations
+ ctx = double(ActiveRecord::MigrationContext, get_all_versions: all_versions, migrations: migrations, schema_migration: ActiveRecord::SchemaMigration)
+
+ allow(described_class).to receive(:migration_context).and_return(ctx)
+
+ names_this_branch = (applied_migrations_this_branch + pending_migrations).map { |m| "db/migrate/#{m.version}_#{m.name}.rb"}
+ allow(described_class).to receive(:migration_file_names_this_branch).and_return(names_this_branch)
+ end
+
+ after do
+ FileUtils.rm_rf(result_dir)
+ end
+
+ it 'creates the results dir when one does not exist' do
+ FileUtils.rm_rf(result_dir)
+
+ expect do
+ described_class.new(direction: :up, migrations: [], result_dir: result_dir).run
+ end.to change { Dir.exist?(result_dir) }.from(false).to(true)
+ end
+
+ describe '.up' do
+ context 'result directory' do
+ it 'uses the /up subdirectory' do
+ expect(described_class.up.result_dir).to eq(result_dir.join('up'))
+ end
+ end
+
+ context 'migrations to run' do
+ subject(:up) { described_class.up }
+
+ it 'is the list of pending migrations' do
+ expect(up.migrations).to eq(pending_migrations)
+ end
+ end
+
+ context 'running migrations' do
+ subject(:up) { described_class.up }
+
+ it 'runs the unapplied migrations in version order', :aggregate_failures do
+ up.run
+
+ expect(migration_runs.map(&:dir)).to eq([:up, :up])
+ expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version))
+ end
+ end
+ end
+
+ describe '.down' do
+ subject(:down) { described_class.down }
+
+ context 'result directory' do
+ it 'is the /down subdirectory' do
+ expect(down.result_dir).to eq(result_dir.join('down'))
+ end
+ end
+
+ context 'migrations to run' do
+ it 'is the list of migrations that are up and on this branch' do
+ expect(down.migrations).to eq(applied_migrations_this_branch)
+ end
+ end
+
+ context 'running migrations' do
+ it 'runs the applied migrations for the current branch in reverse order', :aggregate_failures do
+ down.run
+
+ expect(migration_runs.map(&:dir)).to eq([:down, :down])
+ expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
index 8523b7104f0..8c406c90e36 100644
--- a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
@@ -84,6 +84,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
before do
stub_feature_flags(drop_detached_partitions: false)
end
+
it 'does not drop the partition' do
subject.perform
@@ -162,8 +163,8 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
context 'when the first drop returns an error' do
it 'still drops the second partition' do
- expect(subject).to receive(:drop_one).ordered.and_raise('injected error')
- expect(subject).to receive(:drop_one).ordered.and_call_original
+ expect(subject).to receive(:drop_detached_partition).ordered.and_raise('injected error')
+ expect(subject).to receive(:drop_detached_partition).ordered.and_call_original
subject.perform
diff --git a/spec/lib/gitlab/database/partitioning/multi_database_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/multi_database_partition_dropper_spec.rb
new file mode 100644
index 00000000000..56d6ebb7aff
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/multi_database_partition_dropper_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::MultiDatabasePartitionDropper, '#drop_detached_partitions' do
+ subject(:drop_detached_partitions) { multi_db_dropper.drop_detached_partitions }
+
+ let(:multi_db_dropper) { described_class.new }
+
+ let(:connection_wrapper1) { double(scope: scope1) }
+ let(:connection_wrapper2) { double(scope: scope2) }
+
+ let(:scope1) { double(connection: connection1) }
+ let(:scope2) { double(connection: connection2) }
+
+ let(:connection1) { double('connection') }
+ let(:connection2) { double('connection') }
+
+ let(:dropper_class) { Gitlab::Database::Partitioning::DetachedPartitionDropper }
+ let(:dropper1) { double('partition dropper') }
+ let(:dropper2) { double('partition dropper') }
+
+ before do
+ allow(multi_db_dropper).to receive(:databases).and_return({ db1: connection_wrapper1, db2: connection_wrapper2 })
+ end
+
+ it 'drops detached partitions for each database' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(connection1).and_yield.ordered
+ expect(dropper_class).to receive(:new).and_return(dropper1).ordered
+ expect(dropper1).to receive(:perform)
+
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(connection2).and_yield.ordered
+ expect(dropper_class).to receive(:new).and_return(dropper2).ordered
+ expect(dropper2).to receive(:perform)
+
+ drop_detached_partitions
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index 8f1f5b5ba1b..7c4cfcfb3a9 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -176,7 +176,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
it 'detaches exactly one partition' do
- expect { subject }.to change { find_partitions(my_model.table_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA).size }.from(9).to(8)
+ expect { subject }.to change { find_partitions(my_model.table_name).size }.from(9).to(8)
end
it 'detaches the old partition' do
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index f163b45e01e..486af9413e8 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -33,4 +33,22 @@ RSpec.describe Gitlab::Database::Partitioning do
end
end
end
+
+ describe '.drop_detached_partitions' do
+ let(:partition_dropper_class) { described_class::MultiDatabasePartitionDropper }
+
+ it 'delegates to the partition dropper' do
+ expect_next_instance_of(partition_dropper_class) do |partition_dropper|
+ expect(partition_dropper).to receive(:drop_detached_partitions)
+ end
+
+ described_class.drop_detached_partitions
+ end
+ end
+
+ context 'ensure that the registered models have partitioning strategy' do
+ it 'fails when partitioning_strategy is not specified for the model' do
+ expect(described_class.registered_models).to all(respond_to(:partitioning_strategy))
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index 2a1f91b5b21..399fcae2fa0 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin do
+RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin, :reestablished_active_record_base do
describe 'checking in a connection to the pool' do
let(:model) do
Class.new(ActiveRecord::Base) do
diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
index a79e6706149..0323fa22b78 100644
--- a/spec/lib/gitlab/database/schema_migrations/context_spec.rb
+++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
@@ -23,19 +23,7 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
end
end
- context 'multiple databases' do
- let(:connection_class) do
- Class.new(::ApplicationRecord) do
- self.abstract_class = true
-
- def self.name
- 'Gitlab::Database::SchemaMigrations::Context::TestConnection'
- end
- end
- end
-
- let(:configuration_overrides) { {} }
-
+ context 'multiple databases', :reestablished_active_record_base do
before do
connection_class.establish_connection(
ActiveRecord::Base
@@ -46,10 +34,6 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
)
end
- after do
- connection_class.remove_connection
- end
-
context 'when `schema_migrations_path` is configured as string' do
let(:configuration_overrides) do
{ "schema_migrations_path" => "db/ci_schema_migrations" }
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 0b960830d89..c2c818aa106 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER }
let(:subject) { described_class.new(env: env, logger: logger, allow_savepoints: allow_savepoints, timing_configuration: timing_configuration) }
let(:allow_savepoints) { true }
- let(:connection) { ActiveRecord::Base.connection }
+ let(:connection) { ActiveRecord::Base.retrieve_connection }
let(:timing_configuration) do
[
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index a9a8d5e6314..a2e7b6d27b9 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -15,6 +15,13 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.databases' do
+ it 'stores connections as a HashWithIndifferentAccess' do
+ expect(described_class.databases.has_key?('main')).to be true
+ expect(described_class.databases.has_key?(:main)).to be true
+ end
+ end
+
describe '.default_pool_size' do
before do
allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
@@ -185,10 +192,23 @@ RSpec.describe Gitlab::Database do
describe '.db_config_name' do
it 'returns the db_config name for the connection' do
- connection = ActiveRecord::Base.connection
+ model = ActiveRecord::Base
+
+ # This is a ConnectionProxy
+ expect(described_class.db_config_name(model.connection))
+ .to eq('unknown')
- expect(described_class.db_config_name(connection)).to be_a(String)
- expect(described_class.db_config_name(connection)).to eq(connection.pool.db_config.name)
+ # This is an actual connection
+ expect(described_class.db_config_name(model.retrieve_connection))
+ .to eq('main')
+ end
+
+ context 'when replicas are configured', :database_replica do
+ it 'returns the name for a replica' do
+ replica = ActiveRecord::Base.connection.load_balancer.host
+
+ expect(described_class.db_config_name(replica)).to eq('main_replica')
+ end
end
end
@@ -279,7 +299,7 @@ RSpec.describe Gitlab::Database do
expect(event).not_to be_nil
expect(event.duration).to be > 0.0
expect(event.payload).to a_hash_including(
- connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ connection: be_a(Gitlab::Database::LoadBalancing::ConnectionProxy)
)
end
end
@@ -296,7 +316,7 @@ RSpec.describe Gitlab::Database do
expect(event).not_to be_nil
expect(event.duration).to be > 0.0
expect(event.payload).to a_hash_including(
- connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ connection: be_a(Gitlab::Database::LoadBalancing::ConnectionProxy)
)
end
end
@@ -319,7 +339,7 @@ RSpec.describe Gitlab::Database do
expect(event).not_to be_nil
expect(event.duration).to be > 0.0
expect(event.payload).to a_hash_including(
- connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ connection: be_a(Gitlab::Database::LoadBalancing::ConnectionProxy)
)
end
end
@@ -340,7 +360,7 @@ RSpec.describe Gitlab::Database do
expect(event).not_to be_nil
expect(event.duration).to be > 0.0
expect(event.payload).to a_hash_including(
- connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ connection: be_a(Gitlab::Database::LoadBalancing::ConnectionProxy)
)
end
end
diff --git a/spec/lib/gitlab/doctor/secrets_spec.rb b/spec/lib/gitlab/doctor/secrets_spec.rb
index b9e054ce14f..f95a7eb1492 100644
--- a/spec/lib/gitlab/doctor/secrets_spec.rb
+++ b/spec/lib/gitlab/doctor/secrets_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Doctor::Secrets do
let!(:user) { create(:user, otp_secret: "test") }
let!(:group) { create(:group, runners_token: "test") }
+ let!(:project) { create(:project) }
+ let!(:grafana_integration) { create(:grafana_integration, project: project, token: "test") }
let(:logger) { double(:logger).as_null_object }
subject { described_class.new(logger).run! }
@@ -39,4 +41,12 @@ RSpec.describe Gitlab::Doctor::Secrets do
subject
end
end
+
+ context 'when GrafanaIntegration token is set via private method' do
+ it 'can access GrafanaIntegration token value' do
+ expect(logger).to receive(:info).with(/GrafanaIntegration failures: 0/)
+
+ subject
+ end
+ end
end
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index dd230140b30..bd4f1d164a8 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -136,6 +136,36 @@ RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do
expect { handler.execute }.to raise_error(Gitlab::Email::ProjectNotFound)
end
end
+
+ context 'rate limiting' do
+ let(:rate_limited_service_feature_enabled) { nil }
+
+ before do
+ stub_feature_flags(rate_limited_service_issues_create: rate_limited_service_feature_enabled)
+ end
+
+ context 'when :rate_limited_service Feature is disabled' do
+ let(:rate_limited_service_feature_enabled) { false }
+
+ it 'does not attempt to throttle' do
+ expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+
+ setup_attachment
+ receiver.execute
+ end
+ end
+
+ context 'when :rate_limited_service Feature is enabled' do
+ let(:rate_limited_service_feature_enabled) { true }
+
+ it 'raises a RateLimitedService::RateLimitedError' do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+
+ setup_attachment
+ expect { receiver.execute }.to raise_error(RateLimitedService::RateLimitedError, _('This endpoint has been requested too many times. Try again later.'))
+ end
+ end
+ end
end
def email_fixture(path)
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 2916e65528f..8cb1ccc065b 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -243,6 +243,15 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
end
end
+
+ context 'when rate limiting is in effect' do
+ it 'allows unlimited new issue creation' do
+ stub_application_setting(issues_create_limit: 1)
+ setup_attachment
+
+ expect { 2.times { receiver.execute } }.to change { Issue.count }.by(2)
+ end
+ end
end
describe '#can_handle?' do
diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
index 56cf58dcf92..0a1f04ed793 100644
--- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
+++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
@@ -14,15 +14,15 @@ RSpec.describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
end
let(:root_certificate) do
- Gitlab::Email::Smime::Certificate.new(@root_ca[:key], @root_ca[:cert])
+ Gitlab::X509::Certificate.new(@root_ca[:key], @root_ca[:cert])
end
let(:intermediate_certificate) do
- Gitlab::Email::Smime::Certificate.new(@intermediate_ca[:key], @intermediate_ca[:cert])
+ Gitlab::X509::Certificate.new(@intermediate_ca[:key], @intermediate_ca[:cert])
end
let(:certificate) do
- Gitlab::Email::Smime::Certificate.new(@cert[:key], @cert[:cert], [intermediate_certificate.cert])
+ Gitlab::X509::Certificate.new(@cert[:key], @cert[:cert], [intermediate_certificate.cert])
end
let(:mail_body) { "signed hello with Unicode €áø and\r\n newlines\r\n" }
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
end
before do
- allow(Gitlab::Email::Smime::Certificate).to receive_messages(from_files: certificate)
+ allow(Gitlab::X509::Certificate).to receive_messages(from_files: certificate)
Mail.register_interceptor(described_class)
mail.deliver_now
diff --git a/spec/lib/gitlab/endpoint_attributes_spec.rb b/spec/lib/gitlab/endpoint_attributes_spec.rb
new file mode 100644
index 00000000000..4d4cfed57fa
--- /dev/null
+++ b/spec/lib/gitlab/endpoint_attributes_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative "../../support/matchers/be_request_urgency"
+require_relative "../../../lib/gitlab/endpoint_attributes"
+
+RSpec.describe Gitlab::EndpointAttributes do
+ let(:base_controller) do
+ Class.new do
+ include ::Gitlab::EndpointAttributes
+ end
+ end
+
+ let(:controller) do
+ Class.new(base_controller) do
+ feature_category :foo, %w(update edit)
+ feature_category :bar, %w(index show)
+ feature_category :quux, %w(destroy)
+
+ urgency :high, %w(do_a)
+ urgency :low, %w(do_b do_c)
+ end
+ end
+
+ let(:subclass) do
+ Class.new(controller) do
+ feature_category :baz, %w(subclass_index)
+ urgency :high, %w(superclass_do_something)
+ end
+ end
+
+ it "is nil when nothing was defined" do
+ expect(base_controller.feature_category_for_action("hello")).to be_nil
+ end
+
+ it "returns the expected category", :aggregate_failures do
+ expect(controller.feature_category_for_action("update")).to eq(:foo)
+ expect(controller.feature_category_for_action("index")).to eq(:bar)
+ expect(controller.feature_category_for_action("destroy")).to eq(:quux)
+ end
+
+ it "falls back to default when urgency was not defined", :aggregate_failures do
+ expect(base_controller.urgency_for_action("hello")).to be_request_urgency(:default)
+ expect(controller.urgency_for_action("update")).to be_request_urgency(:default)
+ expect(controller.urgency_for_action("index")).to be_request_urgency(:default)
+ expect(controller.urgency_for_action("destroy")).to be_request_urgency(:default)
+ end
+
+ it "returns the expected urgency", :aggregate_failures do
+ expect(controller.urgency_for_action("do_a")).to be_request_urgency(:high)
+ expect(controller.urgency_for_action("do_b")).to be_request_urgency(:low)
+ expect(controller.urgency_for_action("do_c")).to be_request_urgency(:low)
+ end
+
+ it "returns feature category for an implied action if not specify actions" do
+ klass = Class.new(base_controller) do
+ feature_category :foo
+ end
+ expect(klass.feature_category_for_action("index")).to eq(:foo)
+ expect(klass.feature_category_for_action("show")).to eq(:foo)
+ end
+
+ it "returns expected duration for an implied action if not specify actions" do
+ klass = Class.new(base_controller) do
+ feature_category :foo
+ urgency :low
+ end
+ expect(klass.urgency_for_action("index")).to be_request_urgency(:low)
+ expect(klass.urgency_for_action("show")).to be_request_urgency(:low)
+ end
+
+ it "returns the expected category for categories defined in subclasses" do
+ expect(subclass.feature_category_for_action("subclass_index")).to eq(:baz)
+ end
+
+ it "falls back to superclass's feature category" do
+ expect(subclass.feature_category_for_action("update")).to eq(:foo)
+ end
+
+ it "returns the expected urgency for categories defined in subclasses" do
+ expect(subclass.urgency_for_action("superclass_do_something")).to be_request_urgency(:high)
+ end
+
+ it "falls back to superclass's expected duration" do
+ expect(subclass.urgency_for_action("do_a")).to be_request_urgency(:high)
+ end
+
+ it "raises an error when defining for the controller and for individual actions" do
+ expect do
+ Class.new(base_controller) do
+ feature_category :hello
+ feature_category :goodbye, [:world]
+ end
+ end.to raise_error(ArgumentError, "feature_category are already defined for all actions, but re-defined for world")
+ end
+
+ it "raises an error when multiple calls define the same action" do
+ expect do
+ Class.new(base_controller) do
+ feature_category :hello, [:world]
+ feature_category :goodbye, ["world"]
+ end
+ end.to raise_error(ArgumentError, "Attributes re-defined for action world: feature_category")
+ end
+
+ it "raises an error when multiple calls define the same action" do
+ expect do
+ Class.new(base_controller) do
+ urgency :high, [:world]
+ urgency :low, ["world"]
+ end
+ end.to raise_error(ArgumentError, "Attributes re-defined for action world: urgency")
+ end
+
+ it "does not raise an error when multiple calls define the same action and configs" do
+ expect do
+ Class.new(base_controller) do
+ feature_category :hello, [:world]
+ feature_category :hello, ["world"]
+ urgency :medium, [:moon]
+ urgency :medium, ["moon"]
+ end
+ end.not_to raise_error
+ end
+
+ it "raises an error if the expected duration is not supported" do
+ expect do
+ Class.new(base_controller) do
+ urgency :super_slow
+ end
+ end.to raise_error(ArgumentError, "Urgency not supported: super_slow")
+ end
+end
diff --git a/spec/lib/gitlab/etag_caching/router/graphql_spec.rb b/spec/lib/gitlab/etag_caching/router/graphql_spec.rb
index d151dcba413..9a6787e3640 100644
--- a/spec/lib/gitlab/etag_caching/router/graphql_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router/graphql_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::EtagCaching::Router::Graphql do
end
it 'has a valid feature category for every route', :aggregate_failures do
- feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).to_set
+ feature_categories = Gitlab::FeatureCategories.default.categories
described_class::ROUTES.each do |route|
expect(feature_categories).to include(route.feature_category), "#{route.name} has a category of #{route.feature_category}, which is not valid"
diff --git a/spec/lib/gitlab/etag_caching/router/restful_spec.rb b/spec/lib/gitlab/etag_caching/router/restful_spec.rb
index 1f5cac09b6d..a0fc480369c 100644
--- a/spec/lib/gitlab/etag_caching/router/restful_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router/restful_spec.rb
@@ -107,7 +107,7 @@ RSpec.describe Gitlab::EtagCaching::Router::Restful do
end
it 'has a valid feature category for every route', :aggregate_failures do
- feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).to_set
+ feature_categories = Gitlab::FeatureCategories.default.categories
described_class::ROUTES.each do |route|
expect(feature_categories).to include(route.feature_category), "#{route.name} has a category of #{route.feature_category}, which is not valid"
diff --git a/spec/lib/gitlab/feature_categories_spec.rb b/spec/lib/gitlab/feature_categories_spec.rb
new file mode 100644
index 00000000000..daced154a69
--- /dev/null
+++ b/spec/lib/gitlab/feature_categories_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::FeatureCategories do
+ let(:fake_categories) { %w(foo bar) }
+
+ subject { described_class.new(fake_categories) }
+
+ describe "#valid?" do
+ it "returns true if category is known", :aggregate_failures do
+ expect(subject.valid?('foo')).to be(true)
+ expect(subject.valid?('zzz')).to be(false)
+ end
+ end
+
+ describe "#from_request" do
+ let(:request_env) { {} }
+ let(:verified) { true }
+
+ def fake_request(request_feature_category)
+ double('request', env: request_env, headers: { "HTTP_X_GITLAB_FEATURE_CATEGORY" => request_feature_category })
+ end
+
+ before do
+ allow(::Gitlab::RequestForgeryProtection).to receive(:verified?).with(request_env).and_return(verified)
+ end
+
+ it "returns category from request when valid, otherwise returns nil", :aggregate_failures do
+ expect(subject.from_request(fake_request("foo"))).to be("foo")
+ expect(subject.from_request(fake_request("zzz"))).to be_nil
+ end
+
+ context "when request is not verified" do
+ let(:verified) { false }
+
+ it "returns nil" do
+ expect(subject.from_request(fake_request("foo"))).to be_nil
+ end
+ end
+ end
+
+ describe "#categories" do
+ it "returns a set of the given categories" do
+ expect(subject.categories).to be_a(Set)
+ expect(subject.categories).to contain_exactly(*fake_categories)
+ end
+ end
+
+ describe ".load_from_yaml" do
+ subject { described_class.load_from_yaml }
+
+ it "creates FeatureCategories from feature_categories.yml file" do
+ contents = YAML.load_file(Rails.root.join('config', 'feature_categories.yml'))
+
+ expect(subject.categories).to contain_exactly(*contents)
+ end
+ end
+
+ describe ".default" do
+ it "returns a memoization of load_from_yaml", :aggregate_failures do
+ # FeatureCategories.default could have been referenced in another spec, so we need to clean it up here
+ described_class.instance_variable_set(:@default, nil)
+
+ expect(described_class).to receive(:load_from_yaml).once.and_call_original
+
+ 2.times { described_class.default }
+
+ # Uses reference equality to verify memoization
+ expect(described_class.default).to equal(described_class.default)
+ expect(described_class.default).to be_a(described_class)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index a46846e9820..e160e88487b 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -75,7 +75,68 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
checkbox_html
- expect(fake_template).to have_received(:label).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-label label-foo-bar), object: user })
+ expect(fake_template).to have_received(:label).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-label label-foo-bar), object: user, value: nil })
+ end
+ end
+ end
+
+ describe '#gitlab_ui_radio_component' do
+ let(:optional_args) { {} }
+
+ subject(:radio_html) { form_builder.gitlab_ui_radio_component(:access_level, :admin, "Access Level", **optional_args) }
+
+ context 'without optional arguments' do
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <div class="gl-form-radio custom-control custom-radio">
+ <input class="custom-control-input" type="radio" value="admin" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label" for="user_access_level_admin">
+ Access Level
+ </label>
+ </div>
+ EOS
+
+ expect(radio_html).to eq(html_strip_whitespace(expected_html))
+ end
+ end
+
+ context 'with optional arguments' do
+ let(:optional_args) do
+ {
+ help_text: 'Administrators have access to all groups, projects, and users and can manage all features in this installation',
+ radio_options: { class: 'radio-foo-bar' },
+ label_options: { class: 'label-foo-bar' }
+ }
+ end
+
+ it 'renders help text' do
+ expected_html = <<~EOS
+ <div class="gl-form-radio custom-control custom-radio">
+ <input class="custom-control-input radio-foo-bar" type="radio" value="admin" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label label-foo-bar" for="user_access_level_admin">
+ <span>Access Level</span>
+ <p class="help-text">Administrators have access to all groups, projects, and users and can manage all features in this installation</p>
+ </label>
+ </div>
+ EOS
+
+ expect(radio_html).to eq(html_strip_whitespace(expected_html))
+ end
+
+ it 'passes arguments to `radio_button` method' do
+ allow(fake_template).to receive(:radio_button).and_return('')
+
+ radio_html
+
+ expect(fake_template).to have_received(:radio_button).with(:user, :access_level, :admin, { class: %w(custom-control-input radio-foo-bar), object: user })
+ end
+
+ it 'passes arguments to `label` method' do
+ allow(fake_template).to receive(:label).and_return('')
+
+ radio_html
+
+ expect(fake_template).to have_received(:label).with(:user, :access_level, { class: %w(custom-control-label label-foo-bar), object: user, value: :admin })
end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 9ecd281cce0..c7b68ff3e28 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2238,7 +2238,6 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
describe '#squash' do
- let(:squash_id) { '1' }
let(:branch_name) { 'fix' }
let(:start_sha) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' }
let(:end_sha) { '12d65c8dd2b2676fa3ac47d955accc085a37a9c1' }
@@ -2252,7 +2251,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
message: 'Squash commit message'
}
- repository.squash(user, squash_id, opts)
+ repository.squash(user, opts)
end
# Should be ported to gitaly-ruby rspec suite https://gitlab.com/gitlab-org/gitaly/issues/1234
diff --git a/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb b/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
index 2c9da0f6606..e551dfaa1c5 100644
--- a/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
+++ b/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Git::WrapsGitalyErrors do
mapping = {
GRPC::NotFound => Gitlab::Git::Repository::NoRepository,
GRPC::InvalidArgument => ArgumentError,
+ GRPC::DeadlineExceeded => Gitlab::Git::CommandTimedOut,
GRPC::BadStatus => Gitlab::Git::CommandError
}
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 3789bc76a94..27e7d446770 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -169,6 +169,56 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
+ describe '#user_merge_branch' do
+ let(:target_branch) { 'master' }
+ let(:source_sha) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
+ let(:message) { 'Merge a branch' }
+
+ subject { client.user_merge_branch(user, source_sha, target_branch, message) {} }
+
+ it 'sends a user_merge_branch message' do
+ expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate)
+ expect(subject.newrev).to be_present
+ expect(subject.repo_created).to be(false)
+ expect(subject.branch_created).to be(false)
+ end
+
+ context 'with an exception with the UserMergeBranchError' do
+ let(:permission_error) do
+ GRPC::PermissionDenied.new(
+ "GitLab: You are not allowed to push code to this project.",
+ { "grpc-status-details-bin" =>
+ "\b\a\x129GitLab: You are not allowed to push code to this project.\x1A\xDE\x01\n/type.googleapis.com/gitaly.UserMergeBranchError\x12\xAA\x01\n\xA7\x01\n1You are not allowed to push code to this project.\x12\x03web\x1A\auser-15\"df15b32277d2c55c6c595845a87109b09c913c556 5d6e0f935ad9240655f64e883cd98fad6f9a17ee refs/heads/master\n" }
+ )
+ end
+
+ it 'raises PreRecieveError with the error message' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_merge_branch).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(permission_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq("You are not allowed to push code to this project.")
+ end
+ end
+ end
+
+ context 'with an exception without the detailed error' do
+ let(:permission_error) do
+ GRPC::PermissionDenied.new
+ end
+
+ it 'raises PermissionDenied' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_merge_branch).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(permission_error)
+
+ expect { subject }.to raise_error(GRPC::PermissionDenied)
+ end
+ end
+ end
+
describe '#user_ff_branch' do
let(:target_branch) { 'my-branch' }
let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
@@ -308,7 +358,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
describe '#user_squash' do
- let(:squash_id) { '1' }
let(:start_sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
let(:end_sha) { '54cec5282aa9f21856362fe321c800c236a61615' }
let(:commit_message) { 'Squash message' }
@@ -321,7 +370,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
Gitaly::UserSquashRequest.new(
repository: repository.gitaly_repository,
user: gitaly_user,
- squash_id: squash_id.to_s,
start_sha: start_sha,
end_sha: end_sha,
author: gitaly_user,
@@ -334,7 +382,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
let(:response) { Gitaly::UserSquashResponse.new(squash_sha: squash_sha) }
subject do
- client.user_squash(user, squash_id, start_sha, end_sha, user, commit_message, time)
+ client.user_squash(user, start_sha, end_sha, user, commit_message, time)
end
it 'sends a user_squash message and returns the squash sha' do
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 46b9959ff64..be4fc3cbf16 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -15,10 +15,18 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do
original_commit_id: 'original123abc',
diff_hunk: "@@ -1 +1 @@\n-Hello\n+Hello world",
user: double(:user, id: 4, login: 'alice'),
- body: 'Hello world',
created_at: Time.zone.now,
updated_at: Time.zone.now,
- id: 1
+ line: 23,
+ start_line: nil,
+ id: 1,
+ body: <<~BODY
+ Hello World
+
+ ```suggestion
+ sug1
+ ```
+ BODY
)
end
diff --git a/spec/lib/gitlab/github_import/parallel_importer_spec.rb b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
index 06304bf84ca..c7b300ff043 100644
--- a/spec/lib/gitlab/github_import/parallel_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
@@ -9,6 +9,18 @@ RSpec.describe Gitlab::GithubImport::ParallelImporter do
end
end
+ describe '.track_start_import' do
+ it 'tracks the start of import' do
+ project = double(:project)
+ metrics = double(:metrics)
+
+ expect(Gitlab::Import::Metrics).to receive(:new).with(:github_importer, project).and_return(metrics)
+ expect(metrics).to receive(:track_start_import)
+
+ described_class.track_start_import(project)
+ end
+ end
+
describe '#execute', :clean_gitlab_redis_shared_state do
let(:project) { create(:project) }
let(:importer) { described_class.new(project) }
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 1fc7d3c887f..f375e84e0fd 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -130,7 +130,8 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
project_id: project.id,
exception: exception,
error_source: 'MyImporter',
- fail_import: false
+ fail_import: false,
+ metrics: true
).and_call_original
expect { importer.execute }
@@ -195,7 +196,8 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
project_id: project.id,
exception: exception,
error_source: 'MyImporter',
- fail_import: true
+ fail_import: true,
+ metrics: true
).and_call_original
expect { importer.execute }
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
index 7c24cd0a5db..81722c0eba7 100644
--- a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
end
it 'includes the GitHub ID' do
- expect(note.github_id).to eq(1)
+ expect(note.note_id).to eq(1)
end
it 'returns the noteable type' do
@@ -73,6 +73,8 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
body: 'Hello world',
created_at: created_at,
updated_at: updated_at,
+ line: 23,
+ start_line: nil,
id: 1
)
end
@@ -90,47 +92,70 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
expect(note.author).to be_nil
end
- end
- describe '.from_json_hash' do
- it_behaves_like 'a DiffNote' do
- let(:hash) do
- {
- 'noteable_type' => 'MergeRequest',
- 'noteable_id' => 42,
- 'file_path' => 'README.md',
- 'commit_id' => '123abc',
- 'original_commit_id' => 'original123abc',
- 'diff_hunk' => hunk,
- 'author' => { 'id' => 4, 'login' => 'alice' },
- 'note' => 'Hello world',
- 'created_at' => created_at.to_s,
- 'updated_at' => updated_at.to_s,
- 'github_id' => 1
- }
- end
+ it 'formats a suggestion in the note body' do
+ allow(response)
+ .to receive(:body)
+ .and_return <<~BODY
+ ```suggestion
+ Hello World
+ ```
+ BODY
- let(:note) { described_class.from_json_hash(hash) }
+ note = described_class.from_api_response(response)
+
+ expect(note.note).to eq <<~BODY
+ ```suggestion:-0+0
+ Hello World
+ ```
+ BODY
end
+ end
- it 'does not convert the author if it was not specified' do
- hash = {
+ describe '.from_json_hash' do
+ let(:hash) do
+ {
'noteable_type' => 'MergeRequest',
'noteable_id' => 42,
'file_path' => 'README.md',
'commit_id' => '123abc',
'original_commit_id' => 'original123abc',
'diff_hunk' => hunk,
+ 'author' => { 'id' => 4, 'login' => 'alice' },
'note' => 'Hello world',
'created_at' => created_at.to_s,
'updated_at' => updated_at.to_s,
- 'github_id' => 1
+ 'note_id' => 1
}
+ end
+
+ it_behaves_like 'a DiffNote' do
+ let(:note) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash.delete('author')
note = described_class.from_json_hash(hash)
expect(note.author).to be_nil
end
+
+ it 'formats a suggestion in the note body' do
+ hash['note'] = <<~BODY
+ ```suggestion
+ Hello World
+ ```
+ BODY
+
+ note = described_class.from_json_hash(hash)
+
+ expect(note.note).to eq <<~BODY
+ ```suggestion:-0+0
+ Hello World
+ ```
+ BODY
+ end
end
describe '#line_code' do
@@ -154,7 +179,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
'note' => 'Hello world',
'created_at' => created_at.to_s,
'updated_at' => updated_at.to_s,
- 'github_id' => 1
+ 'note_id' => 1
)
expect(note.diff_hash).to eq(
@@ -167,4 +192,68 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
)
end
end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ github_identifiers = {
+ noteable_id: 42,
+ noteable_type: 'MergeRequest',
+ note_id: 1
+ }
+ other_attributes = { something_else: '_something_else_' }
+ note = described_class.new(github_identifiers.merge(other_attributes))
+
+ expect(note.github_identifiers).to eq(github_identifiers)
+ end
+ end
+
+ describe '#note' do
+ it 'returns the given note' do
+ hash = {
+ 'note': 'simple text'
+ }
+
+ note = described_class.new(hash)
+
+ expect(note.note).to eq 'simple text'
+ end
+
+ it 'returns the suggestion formatted in the note' do
+ hash = {
+ 'note': <<~BODY
+ ```suggestion
+ Hello World
+ ```
+ BODY
+ }
+
+ note = described_class.new(hash)
+
+ expect(note.note).to eq <<~BODY
+ ```suggestion:-0+0
+ Hello World
+ ```
+ BODY
+ end
+
+ it 'returns the multi-line suggestion formatted in the note' do
+ hash = {
+ 'start_line': 20,
+ 'end_line': 23,
+ 'note': <<~BODY
+ ```suggestion
+ Hello World
+ ```
+ BODY
+ }
+
+ note = described_class.new(hash)
+
+ expect(note.note).to eq <<~BODY
+ ```suggestion:-3+0
+ Hello World
+ ```
+ BODY
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb b/spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb
new file mode 100644
index 00000000000..2ffd5f50d3b
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormatter do
+ it 'does nothing when there is any text before the suggestion tag' do
+ note = <<~BODY
+ looks like```suggestion but it isn't
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note)).to eq(note)
+ end
+
+ it 'handles nil value for note' do
+ note = nil
+
+ expect(described_class.formatted_note_for(note: note)).to eq(note)
+ end
+
+ it 'does not allow over 3 leading spaces for valid suggestion' do
+ note = <<~BODY
+ Single-line suggestion
+ ```suggestion
+ sug1
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note)).to eq(note)
+ end
+
+ it 'allows up to 3 leading spaces' do
+ note = <<~BODY
+ Single-line suggestion
+ ```suggestion
+ sug1
+ ```
+ BODY
+
+ expected = <<~BODY
+ Single-line suggestion
+ ```suggestion:-0+0
+ sug1
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note)).to eq(expected)
+ end
+
+ it 'does nothing when there is any text without space after the suggestion tag' do
+ note = <<~BODY
+ ```suggestionbut it isn't
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note)).to eq(note)
+ end
+
+ it 'formats single-line suggestions' do
+ note = <<~BODY
+ Single-line suggestion
+ ```suggestion
+ sug1
+ ```
+ BODY
+
+ expected = <<~BODY
+ Single-line suggestion
+ ```suggestion:-0+0
+ sug1
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note)).to eq(expected)
+ end
+
+ it 'ignores text after suggestion tag on the same line' do
+ note = <<~BODY
+ looks like
+ ```suggestion text to be ignored
+ suggestion
+ ```
+ BODY
+
+ expected = <<~BODY
+ looks like
+ ```suggestion:-0+0
+ suggestion
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note)).to eq(expected)
+ end
+
+ it 'formats multiple single-line suggestions' do
+ note = <<~BODY
+ Single-line suggestion
+ ```suggestion
+ sug1
+ ```
+ OR
+ ```suggestion
+ sug2
+ ```
+ BODY
+
+ expected = <<~BODY
+ Single-line suggestion
+ ```suggestion:-0+0
+ sug1
+ ```
+ OR
+ ```suggestion:-0+0
+ sug2
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note)).to eq(expected)
+ end
+
+ it 'formats multi-line suggestions' do
+ note = <<~BODY
+ Multi-line suggestion
+ ```suggestion
+ sug1
+ ```
+ BODY
+
+ expected = <<~BODY
+ Multi-line suggestion
+ ```suggestion:-2+0
+ sug1
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note, start_line: 6, end_line: 8)).to eq(expected)
+ end
+
+ it 'formats multiple multi-line suggestions' do
+ note = <<~BODY
+ Multi-line suggestion
+ ```suggestion
+ sug1
+ ```
+ OR
+ ```suggestion
+ sug2
+ ```
+ BODY
+
+ expected = <<~BODY
+ Multi-line suggestion
+ ```suggestion:-2+0
+ sug1
+ ```
+ OR
+ ```suggestion:-2+0
+ sug2
+ ```
+ BODY
+
+ expect(described_class.formatted_note_for(note: note, start_line: 6, end_line: 8)).to eq(expected)
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/issue_spec.rb b/spec/lib/gitlab/github_import/representation/issue_spec.rb
index 3d306a4a3a3..f3052efea70 100644
--- a/spec/lib/gitlab/github_import/representation/issue_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_spec.rb
@@ -181,4 +181,17 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do
expect(object.truncated_title).to eq('foo')
end
end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ github_identifiers = {
+ iid: 42,
+ issuable_type: 'MergeRequest'
+ }
+ other_attributes = { pull_request: true, something_else: '_something_else_' }
+ issue = described_class.new(github_identifiers.merge(other_attributes))
+
+ expect(issue.github_identifiers).to eq(github_identifiers)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/representation/lfs_object_spec.rb b/spec/lib/gitlab/github_import/representation/lfs_object_spec.rb
new file mode 100644
index 00000000000..b59ea513436
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/lfs_object_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::LfsObject do
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ github_identifiers = {
+ oid: 42
+ }
+ other_attributes = { something_else: '_something_else_' }
+ lfs_object = described_class.new(github_identifiers.merge(other_attributes))
+
+ expect(lfs_object.github_identifiers).to eq(github_identifiers)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/note_spec.rb b/spec/lib/gitlab/github_import/representation/note_spec.rb
index 112bb7eb908..97addcc1c98 100644
--- a/spec/lib/gitlab/github_import/representation/note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/note_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
expect(note.updated_at).to eq(updated_at)
end
- it 'includes the GitHub ID' do
- expect(note.github_id).to eq(1)
+ it 'includes the note ID' do
+ expect(note.note_id).to eq(1)
end
end
end
@@ -84,7 +84,7 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
'note' => 'Hello world',
'created_at' => created_at.to_s,
'updated_at' => updated_at.to_s,
- 'github_id' => 1
+ 'note_id' => 1
}
end
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
'note' => 'Hello world',
'created_at' => created_at.to_s,
'updated_at' => updated_at.to_s,
- 'github_id' => 1
+ 'note_id' => 1
}
note = described_class.from_json_hash(hash)
@@ -106,4 +106,18 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
expect(note.author).to be_nil
end
end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ github_identifiers = {
+ noteable_id: 42,
+ noteable_type: 'Issue',
+ note_id: 1
+ }
+ other_attributes = { something_else: '_something_else_' }
+ note = described_class.new(github_identifiers.merge(other_attributes))
+
+ expect(note.github_identifiers).to eq(github_identifiers)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
index cad9b13774e..f812fd85fbc 100644
--- a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do
expect(review.note).to eq('note')
expect(review.review_type).to eq('APPROVED')
expect(review.submitted_at).to eq(submitted_at)
- expect(review.github_id).to eq(999)
+ expect(review.review_id).to eq(999)
expect(review.merge_request_id).to eq(42)
end
end
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do
describe '.from_json_hash' do
let(:hash) do
{
- 'github_id' => 999,
+ 'review_id' => 999,
'merge_request_id' => 42,
'note' => 'note',
'review_type' => 'APPROVED',
@@ -75,4 +75,17 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do
expect(review.submitted_at).to be_nil
end
end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ github_identifiers = {
+ review_id: 999,
+ merge_request_id: 42
+ }
+ other_attributes = { something_else: '_something_else_' }
+ review = described_class.new(github_identifiers.merge(other_attributes))
+
+ expect(review.github_identifiers).to eq(github_identifiers)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
index 27a82951b01..925dba5b5a7 100644
--- a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
@@ -288,4 +288,16 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequest do
expect(object.truncated_title).to eq('foo')
end
end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ github_identifiers = {
+ iid: 1
+ }
+ other_attributes = { something_else: '_something_else_' }
+ pr = described_class.new(github_identifiers.merge(other_attributes))
+
+ expect(pr.github_identifiers).to eq(github_identifiers.merge(issuable_type: 'MergeRequest'))
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/sequential_importer_spec.rb b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
index 3c3f8ff59d0..2b76f0e27c9 100644
--- a/spec/lib/gitlab/github_import/sequential_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
@@ -4,10 +4,17 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::SequentialImporter do
describe '#execute' do
+ let_it_be(:project) do
+ create(:project, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git', import_type: 'github')
+ end
+
+ subject(:importer) { described_class.new(project, token: 'foo') }
+
it 'imports a project in sequence' do
- repository = double(:repository)
- project = double(:project, id: 1, repository: repository, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git', group: nil)
- importer = described_class.new(project, token: 'foo')
+ expect_next_instance_of(Gitlab::Import::Metrics) do |instance|
+ expect(instance).to receive(:track_start_import)
+ expect(instance).to receive(:track_finished_import)
+ end
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
expect(instance).to receive(:execute)
@@ -35,5 +42,23 @@ RSpec.describe Gitlab::GithubImport::SequentialImporter do
expect(importer.execute).to eq(true)
end
+
+ it 'raises an error' do
+ exception = StandardError.new('_some_error_')
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |importer|
+ expect(importer).to receive(:execute).and_raise(exception)
+ end
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: true,
+ metrics: true
+ ).and_call_original
+
+ expect { importer.execute }.to raise_error(StandardError)
+ end
end
end
diff --git a/spec/lib/gitlab/health_checks/probes/collection_spec.rb b/spec/lib/gitlab/health_checks/probes/collection_spec.rb
index 69828c143db..741c45d953c 100644
--- a/spec/lib/gitlab/health_checks/probes/collection_spec.rb
+++ b/spec/lib/gitlab/health_checks/probes/collection_spec.rb
@@ -16,6 +16,9 @@ RSpec.describe Gitlab::HealthChecks::Probes::Collection do
Gitlab::HealthChecks::Redis::CacheCheck,
Gitlab::HealthChecks::Redis::QueuesCheck,
Gitlab::HealthChecks::Redis::SharedStateCheck,
+ Gitlab::HealthChecks::Redis::TraceChunksCheck,
+ Gitlab::HealthChecks::Redis::RateLimitingCheck,
+ Gitlab::HealthChecks::Redis::SessionsCheck,
Gitlab::HealthChecks::GitalyCheck
]
end
diff --git a/spec/lib/gitlab/health_checks/redis/rate_limiting_check_spec.rb b/spec/lib/gitlab/health_checks/redis/rate_limiting_check_spec.rb
new file mode 100644
index 00000000000..1521fc99cde
--- /dev/null
+++ b/spec/lib/gitlab/health_checks/redis/rate_limiting_check_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative '../simple_check_shared'
+
+RSpec.describe Gitlab::HealthChecks::Redis::RateLimitingCheck do
+ include_examples 'simple_check', 'redis_rate_limiting_ping', 'RedisRateLimiting', 'PONG'
+end
diff --git a/spec/lib/gitlab/health_checks/redis/sessions_check_spec.rb b/spec/lib/gitlab/health_checks/redis/sessions_check_spec.rb
new file mode 100644
index 00000000000..82b3b33ec0a
--- /dev/null
+++ b/spec/lib/gitlab/health_checks/redis/sessions_check_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative '../simple_check_shared'
+
+RSpec.describe Gitlab::HealthChecks::Redis::SessionsCheck do
+ include_examples 'simple_check', 'redis_sessions_ping', 'RedisSessions', 'PONG'
+end
diff --git a/spec/lib/gitlab/import/import_failure_service_spec.rb b/spec/lib/gitlab/import/import_failure_service_spec.rb
index 50b32d634ad..c16d4a7c804 100644
--- a/spec/lib/gitlab/import/import_failure_service_spec.rb
+++ b/spec/lib/gitlab/import/import_failure_service_spec.rb
@@ -2,135 +2,171 @@
require 'spec_helper'
-RSpec.describe Gitlab::Import::ImportFailureService do
+RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
let_it_be(:import_type) { 'import_type' }
+ let_it_be(:project) { create(:project, :import_started, import_type: import_type) }
- let_it_be(:project) do
- create(
- :project,
- :import_started,
- import_type: import_type
- )
- end
-
- let(:import_state) { project.import_state }
let(:exception) { StandardError.new('some error') }
+ let(:arguments) { { project_id: project.id } }
+ let(:base_arguments) { { error_source: 'SomeImporter', exception: exception }.merge(arguments) }
+ let(:exe_arguments) { { fail_import: false, metrics: false } }
+
+ describe '.track' do
+ context 'with all arguments provided' do
+ let(:instance) { double(:failure_service) }
+ let(:instance_arguments) do
+ {
+ exception: exception,
+ import_state: '_import_state_',
+ project_id: '_project_id_',
+ error_source: '_error_source_'
+ }
+ end
- shared_examples 'logs the exception and fails the import' do
- it 'when the failure does not abort the import' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(
- exception,
- project_id: project.id,
- import_type: import_type,
- source: 'SomeImporter'
- )
-
- expect(Gitlab::Import::Logger)
- .to receive(:error)
- .with(
- message: 'importer failed',
- 'error.message': 'some error',
- project_id: project.id,
- import_type: import_type,
- source: 'SomeImporter'
- )
-
- described_class.track(**arguments)
-
- expect(project.import_state.reload.status).to eq('failed')
-
- expect(project.import_failures).not_to be_empty
- expect(project.import_failures.last.exception_class).to eq('StandardError')
- expect(project.import_failures.last.exception_message).to eq('some error')
- end
- end
+ let(:exe_arguments) do
+ {
+ fail_import: '_fail_import_',
+ metrics: '_metrics_'
+ }
+ end
- shared_examples 'logs the exception and does not fail the import' do
- it 'when the failure does not abort the import' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(
- exception,
- project_id: project.id,
- import_type: import_type,
- source: 'SomeImporter'
- )
-
- expect(Gitlab::Import::Logger)
- .to receive(:error)
- .with(
- message: 'importer failed',
- 'error.message': 'some error',
- project_id: project.id,
- import_type: import_type,
- source: 'SomeImporter'
- )
-
- described_class.track(**arguments)
-
- expect(project.import_state.reload.status).to eq('started')
-
- expect(project.import_failures).not_to be_empty
- expect(project.import_failures.last.exception_class).to eq('StandardError')
- expect(project.import_failures.last.exception_message).to eq('some error')
+ it 'invokes a new instance and executes' do
+ expect(described_class).to receive(:new).with(**instance_arguments).and_return(instance)
+ expect(instance).to receive(:execute).with(**exe_arguments)
+
+ described_class.track(**instance_arguments.merge(exe_arguments))
+ end
end
- end
- context 'when using the project as reference' do
- context 'when it fails the import' do
- let(:arguments) do
+ context 'with only necessary arguments utilizing defaults' do
+ let(:instance) { double(:failure_service) }
+ let(:instance_arguments) do
{
- project_id: project.id,
exception: exception,
- error_source: 'SomeImporter',
- fail_import: true
+ import_state: nil,
+ project_id: nil,
+ error_source: nil
}
end
- it_behaves_like 'logs the exception and fails the import'
- end
-
- context 'when it does not fail the import' do
- let(:arguments) do
+ let(:exe_arguments) do
{
- project_id: project.id,
- exception: exception,
- error_source: 'SomeImporter',
- fail_import: false
+ fail_import: false,
+ metrics: false
}
end
- it_behaves_like 'logs the exception and does not fail the import'
+ it 'invokes a new instance and executes' do
+ expect(described_class).to receive(:new).with(**instance_arguments).and_return(instance)
+ expect(instance).to receive(:execute).with(**exe_arguments)
+
+ described_class.track(exception: exception)
+ end
end
end
- context 'when using the import_state as reference' do
- context 'when it fails the import' do
- let(:arguments) do
- {
- import_state: import_state,
- exception: exception,
- error_source: 'SomeImporter',
- fail_import: true
- }
+ describe '#execute' do
+ subject(:service) { described_class.new(**base_arguments) }
+
+ shared_examples 'logs the exception and fails the import' do
+ it 'when the failure does not abort the import' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ exception,
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ expect(Gitlab::Import::Logger)
+ .to receive(:error)
+ .with(
+ message: 'importer failed',
+ 'error.message': 'some error',
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ service.execute(**exe_arguments)
+
+ expect(project.import_state.reload.status).to eq('failed')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
end
+ end
- it_behaves_like 'logs the exception and fails the import'
+ shared_examples 'logs the exception and does not fail the import' do
+ it 'when the failure does not abort the import' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ exception,
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ expect(Gitlab::Import::Logger)
+ .to receive(:error)
+ .with(
+ message: 'importer failed',
+ 'error.message': 'some error',
+ project_id: project.id,
+ import_type: import_type,
+ source: 'SomeImporter'
+ )
+
+ service.execute(**exe_arguments)
+
+ expect(project.import_state.reload.status).to eq('started')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
end
- context 'when it does not fail the import' do
- let(:arguments) do
- {
- import_state: import_state,
- exception: exception,
- error_source: 'SomeImporter',
- fail_import: false
- }
+ context 'when tracking metrics' do
+ let(:exe_arguments) { { fail_import: false, metrics: true } }
+
+ it 'tracks the failed import' do
+ metrics = double(:metrics)
+
+ expect(Gitlab::Import::Metrics).to receive(:new).with("#{project.import_type}_importer", project).and_return(metrics)
+ expect(metrics).to receive(:track_failed_import)
+
+ service.execute(**exe_arguments)
end
+ end
+
+ context 'when using the project as reference' do
+ context 'when it fails the import' do
+ let(:exe_arguments) { { fail_import: true, metrics: false } }
- it_behaves_like 'logs the exception and does not fail the import'
+ it_behaves_like 'logs the exception and fails the import'
+ end
+
+ context 'when it does not fail the import' do
+ it_behaves_like 'logs the exception and does not fail the import'
+ end
+ end
+
+ context 'when using the import_state as reference' do
+ let(:arguments) { { import_state: project.import_state } }
+
+ context 'when it fails the import' do
+ let(:exe_arguments) { { fail_import: true, metrics: false } }
+
+ it_behaves_like 'logs the exception and fails the import'
+ end
+
+ context 'when it does not fail the import' do
+ it_behaves_like 'logs the exception and does not fail the import'
+ end
end
end
end
diff --git a/spec/lib/gitlab/import/metrics_spec.rb b/spec/lib/gitlab/import/metrics_spec.rb
index 0a912427014..035294a620f 100644
--- a/spec/lib/gitlab/import/metrics_spec.rb
+++ b/spec/lib/gitlab/import/metrics_spec.rb
@@ -2,20 +2,67 @@
require 'spec_helper'
-RSpec.describe Gitlab::Import::Metrics do
+RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do
let(:importer) { :test_importer }
- let(:project) { create(:project) }
+ let(:project) { build(:project, id: non_existing_record_id, created_at: Time.current) }
let(:histogram) { double(:histogram) }
let(:counter) { double(:counter) }
subject { described_class.new(importer, project) }
- describe '#report_import_time' do
+ before do
+ allow(Gitlab::Metrics).to receive(:counter) { counter }
+ allow(counter).to receive(:increment)
+ allow(histogram).to receive(:observe)
+ end
+
+ describe '#track_start_import' do
+ context 'when project is not a github import' do
+ it 'does not emit importer metrics' do
+ expect(subject).not_to receive(:track_usage_event)
+
+ subject.track_start_import
+ end
+ end
+
+ context 'when project is a github import' do
+ before do
+ project.import_type = 'github'
+ end
+
+ it 'emits importer metrics' do
+ expect(subject).to receive(:track_usage_event).with(:github_import_project_start, project.id)
+
+ subject.track_start_import
+ end
+ end
+ end
+
+ describe '#track_failed_import' do
+ context 'when project is not a github import' do
+ it 'does not emit importer metrics' do
+ expect(subject).not_to receive(:track_usage_event)
+
+ subject.track_failed_import
+ end
+ end
+
+ context 'when project is a github import' do
+ before do
+ project.import_type = 'github'
+ end
+
+ it 'emits importer metrics' do
+ expect(subject).to receive(:track_usage_event).with(:github_import_project_failure, project.id)
+
+ subject.track_failed_import
+ end
+ end
+ end
+
+ describe '#track_finished_import' do
before do
- allow(Gitlab::Metrics).to receive(:counter) { counter }
allow(Gitlab::Metrics).to receive(:histogram) { histogram }
- allow(counter).to receive(:increment)
- allow(counter).to receive(:observe)
end
it 'emits importer metrics' do
@@ -32,9 +79,56 @@ RSpec.describe Gitlab::Import::Metrics do
)
expect(counter).to receive(:increment)
- expect(histogram).to receive(:observe).with({ importer: :test_importer }, anything)
subject.track_finished_import
+
+ expect(subject.duration).not_to be_nil
+ end
+
+ context 'when project is not a github import' do
+ it 'does not emit importer metrics' do
+ expect(subject).not_to receive(:track_usage_event)
+
+ subject.track_finished_import
+
+ expect(histogram).to have_received(:observe).with({ importer: :test_importer }, anything)
+ end
+ end
+
+ context 'when project is a github import' do
+ before do
+ project.import_type = 'github'
+ end
+
+ it 'emits importer metrics' do
+ expect(subject).to receive(:track_usage_event).with(:github_import_project_success, project.id)
+
+ subject.track_finished_import
+
+ expect(histogram).to have_received(:observe).with({ project: project.full_path }, anything)
+ end
+ end
+ end
+
+ describe '#issues_counter' do
+ it 'creates a counter for issues' do
+ expect(Gitlab::Metrics).to receive(:counter).with(
+ :test_importer_imported_issues_total,
+ 'The number of imported issues'
+ )
+
+ subject.issues_counter
+ end
+ end
+
+ describe '#merge_requests_counter' do
+ it 'creates a counter for issues' do
+ expect(Gitlab::Metrics).to receive(:counter).with(
+ :test_importer_imported_merge_requests_total,
+ 'The number of imported merge (pull) requests'
+ )
+
+ subject.merge_requests_counter
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 614aa55c3c5..10f0e687077 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -59,6 +59,7 @@ issues:
- requirement
- incident_management_issuable_escalation_status
- pending_escalations
+- customer_relations_contacts
work_item_type:
- issues
events:
@@ -272,6 +273,8 @@ ci_pipelines:
- dast_profiles_pipeline
- dast_site_profile
- dast_site_profiles_pipeline
+- package_build_infos
+- package_file_build_infos
ci_refs:
- project
- ci_pipelines
@@ -322,7 +325,6 @@ integrations:
- jira_tracker_data
- zentao_tracker_data
- issue_tracker_data
-- open_project_tracker_data
hooks:
- project
- web_hook_logs
@@ -354,10 +356,7 @@ container_repositories:
- name
project:
- external_status_checks
-- taggings
- base_tags
-- topic_taggings
-- topics_acts_as_taggable
- project_topics
- topics
- chat_services
@@ -593,6 +592,7 @@ project:
- pending_builds
- security_scans
- ci_feature_usages
+- bulk_import_exports
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
index 36a831a785c..2b974f8985d 100644
--- a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
+++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
@@ -83,14 +83,22 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do
where(:relation_name, :permitted_attributes_defined) do
:user | false
:author | false
- :ci_cd_settings | false
- :issuable_sla | false
- :push_rule | false
+ :ci_cd_settings | true
:metrics_setting | true
:project_badges | true
:pipeline_schedules | true
:error_tracking_setting | true
:auto_devops | true
+ :boards | true
+ :custom_attributes | true
+ :labels | true
+ :protected_branches | true
+ :protected_tags | true
+ :create_access_levels | true
+ :merge_access_levels | true
+ :push_access_levels | true
+ :releases | true
+ :links | true
end
with_them do
@@ -99,47 +107,11 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do
end
describe 'included_attributes for Project' do
- let(:prohibited_attributes) { %i[remote_url my_attributes my_ids token my_id test] }
-
subject { described_class.new }
Gitlab::ImportExport::Config.new.to_h[:included_attributes].each do |relation_sym, permitted_attributes|
context "for #{relation_sym}" do
- let(:import_export_config) { Gitlab::ImportExport::Config.new.to_h }
- let(:project_relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
-
- let(:relation_hash) { (permitted_attributes + prohibited_attributes).map(&:to_s).zip([]).to_h }
- let(:relation_name) { project_relation_factory.overrides[relation_sym]&.to_sym || relation_sym }
- let(:relation_class) { project_relation_factory.relation_class(relation_name) }
- let(:excluded_keys) { import_export_config.dig(:excluded_keys, relation_sym) || [] }
-
- let(:cleaned_hash) do
- Gitlab::ImportExport::AttributeCleaner.new(
- relation_hash: relation_hash,
- relation_class: relation_class,
- excluded_keys: excluded_keys
- ).clean
- end
-
- let(:permitted_hash) { subject.permit(relation_sym, relation_hash) }
-
- if described_class.new.permitted_attributes_defined?(relation_sym)
- it 'contains only attributes that are defined as permitted in the import/export config' do
- expect(permitted_hash.keys).to contain_exactly(*permitted_attributes.map(&:to_s))
- end
-
- it 'does not contain attributes that would be cleaned with AttributeCleaner' do
- expect(cleaned_hash.keys).to include(*permitted_hash.keys)
- end
-
- it 'does not contain prohibited attributes that are not related to given relation' do
- expect(permitted_hash.keys).not_to include(*prohibited_attributes.map(&:to_s))
- end
- else
- it 'is disabled' do
- expect(subject).not_to be_permitted_attributes_defined(relation_sym)
- end
- end
+ it_behaves_like 'a permitted attribute', relation_sym, permitted_attributes
end
end
end
diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb
index 39a10f87083..59c4e1083ae 100644
--- a/spec/lib/gitlab/import_export/command_line_util_spec.rb
+++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
let(:path) { "#{Dir.tmpdir}/symlink_test" }
let(:archive) { 'spec/fixtures/symlink_export.tar.gz' }
let(:shared) { Gitlab::ImportExport::Shared.new(nil) }
+ let(:tmpdir) { Dir.mktmpdir }
subject do
Class.new do
@@ -26,6 +27,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
after do
FileUtils.rm_rf(path)
+ FileUtils.remove_entry(tmpdir)
end
it 'has the right mask for project.json' do
@@ -55,7 +57,6 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
describe '#gunzip' do
it 'decompresses specified file' do
- tmpdir = Dir.mktmpdir
filename = 'labels.ndjson.gz'
gz_filepath = "spec/fixtures/bulk_imports/gz/#{filename}"
FileUtils.copy_file(gz_filepath, File.join(tmpdir, filename))
@@ -63,8 +64,6 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
subject.gunzip(dir: tmpdir, filename: filename)
expect(File.exist?(File.join(tmpdir, 'labels.ndjson'))).to eq(true)
-
- FileUtils.remove_entry(tmpdir)
end
context 'when exception occurs' do
@@ -73,4 +72,33 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
end
end
end
+
+ describe '#tar_cf' do
+ let(:archive_dir) { Dir.mktmpdir }
+
+ after do
+ FileUtils.remove_entry(archive_dir)
+ end
+
+ it 'archives a folder without compression' do
+ archive_file = File.join(archive_dir, 'archive.tar')
+
+ result = subject.tar_cf(archive: archive_file, dir: tmpdir)
+
+ expect(result).to eq(true)
+ expect(File.exist?(archive_file)).to eq(true)
+ end
+
+ context 'when something goes wrong' do
+ it 'raises an error' do
+ expect(Gitlab::Popen).to receive(:popen).and_return(['Error', 1])
+
+ klass = Class.new do
+ include Gitlab::ImportExport::CommandLineUtil
+ end.new
+
+ expect { klass.tar_cf(archive: 'test', dir: 'test') }.to raise_error(Gitlab::ImportExport::Error, 'System call failed')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 9e30564b437..d69d775fffb 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
end
it 'orders exported issues by custom column(relative_position)' do
- expected_issues = exportable.issues.order_relative_position_desc.order(id: :desc).map(&:to_json)
+ expected_issues = exportable.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'DESC')).order(id: :desc).map(&:to_json)
expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues)
@@ -163,21 +163,10 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
stub_feature_flags(load_balancing_for_export_workers: true)
end
- context 'when enabled', :db_load_balancing do
- it 'reads from replica' do
- expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
+ it 'reads from replica' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
- subject.execute
- end
- end
-
- context 'when disabled' do
- it 'reads from primary' do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
- expect(Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_replicas_for_read_queries)
-
- subject.execute
- end
+ subject.execute
end
end
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index c558c12f581..550cefea805 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -13,9 +13,11 @@ RSpec.describe Gitlab::ImportExport::MergeRequestParser do
create(:merge_request, source_project: forked_project, target_project: project)
end
+ let(:diff_head_sha) { SecureRandom.hex(20) }
+
let(:parsed_merge_request) do
described_class.new(project,
- 'abcd',
+ diff_head_sha,
merge_request,
merge_request.as_json).parse!
end
@@ -34,14 +36,34 @@ RSpec.describe Gitlab::ImportExport::MergeRequestParser do
expect(project.repository.branch_exists?(parsed_merge_request.target_branch)).to be true
end
- it 'parses a MR that has no source branch' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:branch_exists?).and_call_original
- allow(instance).to receive(:branch_exists?).with(merge_request.source_branch).and_return(false)
- allow(instance).to receive(:fork_merge_request?).and_return(true)
+ # Source and target branch are only created when: fork_merge_request
+ context 'fork merge request' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:fork_merge_request?).and_return(true)
+ end
+ end
+
+ it 'parses a MR that has no source branch' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:branch_exists?).and_call_original
+ allow(instance).to receive(:branch_exists?).with(merge_request.source_branch).and_return(false)
+ end
+
+ expect(parsed_merge_request).to eq(merge_request)
end
- expect(parsed_merge_request).to eq(merge_request)
+ it 'parses a MR that is closed' do
+ merge_request.update!(state: :closed, source_branch: 'new_branch')
+
+ expect(project.repository.branch_exists?(parsed_merge_request.source_branch)).to be false
+ end
+
+ it 'parses a MR that is merged' do
+ merge_request.update!(state: :merged, source_branch: 'new_branch')
+
+ expect(project.repository.branch_exists?(parsed_merge_request.source_branch)).to be false
+ end
end
context 'when the merge request has diffs' do
diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
index 9325cdac9ed..5e4075c2b59 100644
--- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
@@ -30,18 +30,12 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do
subject { relation_tree_restorer.restore }
shared_examples 'import project successfully' do
- it 'restores project tree' do
- expect(subject).to eq(true)
- end
-
describe 'imported project' do
- let(:project) { Project.find_by_path('project') }
+ it 'has the project attributes and relations', :aggregate_failures do
+ expect(subject).to eq(true)
- before do
- subject
- end
+ project = Project.find_by_path('project')
- it 'has the project attributes and relations' do
expect(project.description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.')
expect(project.labels.count).to eq(3)
expect(project.boards.count).to eq(1)
@@ -86,7 +80,10 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do
end
context 'when restoring a project' do
- let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
+ let_it_be(:importable, reload: true) do
+ create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
+ end
+
let(:importable_name) { 'project' }
let(:importable_path) { 'project' }
let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder }
@@ -108,8 +105,10 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do
it_behaves_like 'import project successfully'
context 'logging of relations creation' do
- let(:group) { create(:group) }
- let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project', group: group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:importable) do
+ create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project', group: group)
+ end
include_examples 'logging of relations creation'
end
@@ -120,6 +119,18 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do
let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) }
it_behaves_like 'import project successfully'
+
+ context 'when inside a group' do
+ let_it_be(:group) do
+ create(:group, :disabled_and_unoverridable)
+ end
+
+ before do
+ importable.update!(shared_runners_enabled: false, group: group)
+ end
+
+ it_behaves_like 'import project successfully'
+ end
end
context 'with invalid relations' do
@@ -143,9 +154,10 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do
end
context 'when restoring a group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:importable) { create(:group, parent: group) }
+
let(:path) { 'spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json' }
- let(:group) { create(:group) }
- let(:importable) { create(:group, parent: group) }
let(:importable_name) { nil }
let(:importable_path) { nil }
let(:object_builder) { Gitlab::ImportExport::Group::ObjectBuilder }
diff --git a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
index c1661cf02b6..7d719b6028f 100644
--- a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
@@ -29,6 +29,9 @@ RSpec.describe Gitlab::ImportExport::SnippetRepoRestorer do
expect(restorer.restore).to be_truthy
end.to change { SnippetRepository.count }.by(1)
+ snippet.repository.expire_method_caches(%i(exists?))
+ expect(snippet.repository_exists?).to be_truthy
+
blob = snippet.repository.blob_at(snippet.default_branch, snippet.file_name)
expect(blob).not_to be_nil
expect(blob.data).to eq(snippet.content)
diff --git a/spec/lib/gitlab/instrumentation/redis_spec.rb b/spec/lib/gitlab/instrumentation/redis_spec.rb
index ebc2e92a0dd..900a079cdd2 100644
--- a/spec/lib/gitlab/instrumentation/redis_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_spec.rb
@@ -76,7 +76,9 @@ RSpec.describe Gitlab::Instrumentation::Redis do
details_row.merge(storage: 'Cache'),
details_row.merge(storage: 'Queues'),
details_row.merge(storage: 'SharedState'),
- details_row.merge(storage: 'TraceChunks'))
+ details_row.merge(storage: 'TraceChunks'),
+ details_row.merge(storage: 'RateLimiting'),
+ details_row.merge(storage: 'Sessions'))
end
end
end
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 85daf50717c..52d3623c304 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -107,75 +107,50 @@ RSpec.describe Gitlab::InstrumentationHelper do
end
end
- context 'when load balancing is enabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
-
- it 'includes DB counts' do
- subject
-
- expect(payload).to include(db_replica_count: 0,
- db_replica_cached_count: 0,
- db_primary_count: 0,
- db_primary_cached_count: 0,
- db_primary_wal_count: 0,
- db_replica_wal_count: 0,
- db_primary_wal_cached_count: 0,
- db_replica_wal_cached_count: 0)
- end
-
- context 'when replica caught up search was made' do
- before do
- Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
- Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
- end
+ it 'includes DB counts' do
+ subject
- it 'includes related metrics' do
- subject
+ expect(payload).to include(db_replica_count: 0,
+ db_replica_cached_count: 0,
+ db_primary_count: 0,
+ db_primary_cached_count: 0,
+ db_primary_wal_count: 0,
+ db_replica_wal_count: 0,
+ db_primary_wal_cached_count: 0,
+ db_replica_wal_cached_count: 0)
+ end
- expect(payload).to include(caught_up_replica_pick_ok: 2)
- expect(payload).to include(caught_up_replica_pick_fail: 1)
- end
+ context 'when replica caught up search was made' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
end
- context 'when only a single counter was updated' do
- before do
- Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 1
- Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = nil
- end
-
- it 'includes only that counter into logging' do
- subject
+ it 'includes related metrics' do
+ subject
- expect(payload).to include(caught_up_replica_pick_ok: 1)
- expect(payload).not_to include(:caught_up_replica_pick_fail)
- end
+ expect(payload).to include(caught_up_replica_pick_ok: 2)
+ expect(payload).to include(caught_up_replica_pick_fail: 1)
end
end
- context 'when load balancing is disabled' do
+ context 'when only a single counter was updated' do
before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 1
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = nil
end
- it 'does not include DB counts' do
+ it 'includes only that counter into logging' do
subject
- expect(payload).not_to include(db_replica_count: 0,
- db_replica_cached_count: 0,
- db_primary_count: 0,
- db_primary_cached_count: 0,
- db_primary_wal_count: 0,
- db_replica_wal_count: 0,
- db_primary_wal_cached_count: 0,
- db_replica_wal_cached_count: 0)
+ expect(payload).to include(caught_up_replica_pick_ok: 1)
+ expect(payload).not_to include(:caught_up_replica_pick_fail)
end
end
end
- describe '.queue_duration_for_job' do
- where(:enqueued_at, :created_at, :time_now, :expected_duration) do
+ describe 'duration calculations' do
+ where(:end_time, :start_time, :time_now, :expected_duration) do
"2019-06-01T00:00:00.000+0000" | nil | "2019-06-01T02:00:00.000+0000" | 2.hours.to_f
"2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.001
"2019-06-01T02:00:00.000+0000" | "2019-05-01T02:00:00.000+0000" | "2019-06-01T02:00:01.000+0000" | 1
@@ -189,15 +164,29 @@ RSpec.describe Gitlab::InstrumentationHelper do
0 | nil | "2019-10-23T12:13:16.000+0200" | nil
-1 | nil | "2019-10-23T12:13:16.000+0200" | nil
"2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T00:00:00.000+0000" | 0
- Time.at(1571999233) | nil | "2019-10-25T12:29:16.000+0200" | 123
+ Time.at(1571999233).utc | nil | "2019-10-25T12:29:16.000+0200" | 123
+ end
+
+ describe '.queue_duration_for_job' do
+ with_them do
+ let(:job) { { 'enqueued_at' => end_time, 'created_at' => start_time } }
+
+ it "returns the correct duration" do
+ travel_to(Time.iso8601(time_now)) do
+ expect(described_class.queue_duration_for_job(job)).to eq(expected_duration)
+ end
+ end
+ end
end
- with_them do
- let(:job) { { 'enqueued_at' => enqueued_at, 'created_at' => created_at } }
+ describe '.enqueue_latency_for_scheduled_job' do
+ with_them do
+ let(:job) { { 'enqueued_at' => end_time, 'scheduled_at' => start_time } }
- it "returns the correct duration" do
- Timecop.freeze(Time.iso8601(time_now)) do
- expect(described_class.queue_duration_for_job(job)).to eq(expected_duration)
+ it "returns the correct duration" do
+ travel_to(Time.iso8601(time_now)) do
+ expect(described_class.enqueue_latency_for_scheduled_job(job)).to eq(expected_duration)
+ end
end
end
end
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index 17d038ed16c..0fbb5f31210 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -70,30 +70,44 @@ RSpec.describe Gitlab::Kas do
stub_config(gitlab_kas: { external_url: external_url })
end
+ let(:external_url) { 'xyz' }
+
subject { described_class.tunnel_url }
- context 'external_url uses wss://' do
- let(:external_url) { 'wss://kas.gitlab.example.com' }
+ context 'with a gitlab_kas.external_k8s_proxy_url setting' do
+ let(:external_k8s_proxy_url) { 'abc' }
+
+ before do
+ stub_config(gitlab_kas: { external_k8s_proxy_url: external_k8s_proxy_url })
+ end
- it { is_expected.to eq('https://kas.gitlab.example.com/k8s-proxy') }
+ it { is_expected.to eq(external_k8s_proxy_url) }
end
- context 'external_url uses ws://' do
- let(:external_url) { 'ws://kas.gitlab.example.com' }
+ context 'without a gitlab_kas.external_k8s_proxy_url setting' do
+ context 'external_url uses wss://' do
+ let(:external_url) { 'wss://kas.gitlab.example.com' }
- it { is_expected.to eq('http://kas.gitlab.example.com/k8s-proxy') }
- end
+ it { is_expected.to eq('https://kas.gitlab.example.com/k8s-proxy') }
+ end
- context 'external_url uses grpcs://' do
- let(:external_url) { 'grpcs://kas.gitlab.example.com' }
+ context 'external_url uses ws://' do
+ let(:external_url) { 'ws://kas.gitlab.example.com' }
- it { is_expected.to eq('https://kas.gitlab.example.com/k8s-proxy') }
- end
+ it { is_expected.to eq('http://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses grpcs://' do
+ let(:external_url) { 'grpcs://kas.gitlab.example.com' }
- context 'external_url uses grpc://' do
- let(:external_url) { 'grpc://kas.gitlab.example.com' }
+ it { is_expected.to eq('https://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses grpc://' do
+ let(:external_url) { 'grpc://kas.gitlab.example.com' }
- it { is_expected.to eq('http://kas.gitlab.example.com/k8s-proxy') }
+ it { is_expected.to eq('http://kas.gitlab.example.com/k8s-proxy') }
+ end
end
end
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index a42da4ad3e0..0bd1a27c65e 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -93,7 +93,7 @@ RSpec.describe Gitlab::MailRoom do
end
describe 'setting up redis settings' do
- let(:fake_redis_queues) { double(url: "localhost", sentinels: "yes, them", sentinels?: true) }
+ let(:fake_redis_queues) { double(url: "localhost", db: 99, sentinels: "yes, them", sentinels?: true) }
before do
allow(Gitlab::Redis::Queues).to receive(:new).and_return(fake_redis_queues)
@@ -103,6 +103,7 @@ RSpec.describe Gitlab::MailRoom do
config = described_class.enabled_configs.first
expect(config[:redis_url]).to eq('localhost')
+ expect(config[:redis_db]).to eq(99)
expect(config[:sentinels]).to eq('yes, them')
end
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
new file mode 100644
index 00000000000..4f437e57600
--- /dev/null
+++ b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::MergeRequests::Mergeability::CheckResult do
+ subject(:check_result) { described_class }
+
+ let(:time) { Time.current }
+
+ around do |example|
+ freeze_time do
+ example.run
+ end
+ end
+
+ describe '.default_payload' do
+ it 'returns the expected defaults' do
+ expect(check_result.default_payload).to eq({ last_run_at: time })
+ end
+ end
+
+ describe '.success' do
+ subject(:success) { check_result.success(payload: payload) }
+
+ let(:payload) { {} }
+
+ it 'creates a success result' do
+ expect(success.status).to eq described_class::SUCCESS_STATUS
+ end
+
+ it 'uses the default payload' do
+ expect(success.payload).to eq described_class.default_payload
+ end
+
+ context 'when given a payload' do
+ let(:payload) { { last_run_at: time + 1.day, test: 'test' } }
+
+ it 'uses the payload passed' do
+ expect(success.payload).to eq payload
+ end
+ end
+ end
+
+ describe '.failed' do
+ subject(:failed) { check_result.failed(payload: payload) }
+
+ let(:payload) { {} }
+
+ it 'creates a failure result' do
+ expect(failed.status).to eq described_class::FAILED_STATUS
+ end
+
+ it 'uses the default payload' do
+ expect(failed.payload).to eq described_class.default_payload
+ end
+
+ context 'when given a payload' do
+ let(:payload) { { last_run_at: time + 1.day, test: 'test' } }
+
+ it 'uses the payload passed' do
+ expect(failed.payload).to eq payload
+ end
+ end
+ end
+
+ describe '.from_hash' do
+ subject(:from_hash) { described_class.from_hash(hash) }
+
+ let(:status) { described_class::SUCCESS_STATUS }
+ let(:payload) { { test: 'test' } }
+ let(:hash) do
+ {
+ status: status,
+ payload: payload
+ }
+ end
+
+ it 'returns the expected status and payload' do
+ expect(from_hash.status).to eq status
+ expect(from_hash.payload).to eq payload
+ end
+ end
+
+ describe '#to_hash' do
+ subject(:to_hash) { described_class.new(**hash).to_hash }
+
+ let(:status) { described_class::SUCCESS_STATUS }
+ let(:payload) { { test: 'test' } }
+ let(:hash) do
+ {
+ status: status,
+ payload: payload
+ }
+ end
+
+ it 'returns the expected hash' do
+ expect(to_hash).to eq hash
+ end
+ end
+
+ describe '#failed?' do
+ subject(:failed) { described_class.new(status: status).failed? }
+
+ context 'when it has failed' do
+ let(:status) { described_class::FAILED_STATUS }
+
+ it 'returns true' do
+ expect(failed).to eq true
+ end
+ end
+
+ context 'when it has succeeded' do
+ let(:status) { described_class::SUCCESS_STATUS }
+
+ it 'returns false' do
+ expect(failed).to eq false
+ end
+ end
+ end
+
+ describe '#success?' do
+ subject(:success) { described_class.new(status: status).success? }
+
+ context 'when it has failed' do
+ let(:status) { described_class::FAILED_STATUS }
+
+ it 'returns false' do
+ expect(success).to eq false
+ end
+ end
+
+ context 'when it has succeeded' do
+ let(:status) { described_class::SUCCESS_STATUS }
+
+ it 'returns true' do
+ expect(success).to eq true
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
new file mode 100644
index 00000000000..e5475d04d86
--- /dev/null
+++ b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitlab_redis_shared_state do
+ subject(:redis_interface) { described_class.new }
+
+ let(:merge_check) { double(cache_key: '13') }
+ let(:result_hash) { { 'test' => 'test' } }
+ let(:expected_key) { "#{merge_check.cache_key}:#{described_class::VERSION}" }
+
+ describe '#save_check' do
+ it 'saves the hash' do
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(expected_key) }).to be_nil
+
+ redis_interface.save_check(merge_check: merge_check, result_hash: result_hash)
+
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(expected_key) }).to eq result_hash.to_json
+ end
+ end
+
+ describe '#retrieve_check' do
+ it 'returns the hash' do
+ Gitlab::Redis::SharedState.with { |redis| redis.set(expected_key, result_hash.to_json) }
+
+ expect(redis_interface.retrieve_check(merge_check: merge_check)).to eq result_hash
+ end
+ end
+end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
new file mode 100644
index 00000000000..d376dcb5b18
--- /dev/null
+++ b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::MergeRequests::Mergeability::ResultsStore do
+ subject(:results_store) { described_class.new(merge_request: merge_request, interface: interface) }
+
+ let(:merge_check) { double }
+ let(:interface) { double }
+ let(:merge_request) { double }
+
+ describe '#read' do
+ it 'calls #retrieve on the interface' do
+ expect(interface).to receive(:retrieve_check).with(merge_check: merge_check)
+
+ results_store.read(merge_check: merge_check)
+ end
+ end
+
+ describe '#write' do
+ let(:result_hash) { double }
+
+ it 'calls #save_check on the interface' do
+ expect(interface).to receive(:save_check).with(merge_check: merge_check, result_hash: result_hash)
+
+ results_store.write(merge_check: merge_check, result_hash: result_hash)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
index ce98c807e2e..9deaecbf41b 100644
--- a/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
@@ -30,6 +30,15 @@ RSpec.describe Gitlab::Metrics::Exporter::WebExporter do
expect(readiness_probe.json).to include(status: 'ok')
expect(readiness_probe.json).to include('web_exporter' => [{ 'status': 'ok' }])
end
+
+ it 'initializes request metrics', :prometheus do
+ expect(Gitlab::Metrics::RailsSlis).to receive(:initialize_request_slis_if_needed!).and_call_original
+
+ http = Net::HTTP.new(exporter.server.config[:BindAddress], exporter.server.config[:Port])
+ response = http.request(Net::HTTP::Get.new('/metrics'))
+
+ expect(response.body).to include('gitlab_sli:rails_request_apdex')
+ end
end
describe '#mark_as_not_running!' do
diff --git a/spec/lib/gitlab/metrics/instrumentation_spec.rb b/spec/lib/gitlab/metrics/instrumentation_spec.rb
deleted file mode 100644
index b15e06a0861..00000000000
--- a/spec/lib/gitlab/metrics/instrumentation_spec.rb
+++ /dev/null
@@ -1,342 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Instrumentation do
- let(:env) { {} }
- let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
-
- before do
- @dummy = Class.new do
- def self.foo(text = 'foo')
- text
- end
-
- def self.wat(text = 'wat')
- text
- end
- private_class_method :wat
-
- class << self
- def buzz(text = 'buzz')
- text
- end
- private :buzz
-
- def flaky(text = 'flaky')
- text
- end
- protected :flaky
- end
-
- def bar(text = 'bar')
- text
- end
-
- def wadus(text = 'wadus')
- text
- end
- private :wadus
-
- def chaf(text = 'chaf')
- text
- end
- protected :chaf
- end
-
- allow(@dummy).to receive(:name).and_return('Dummy')
- end
-
- describe '.series' do
- it 'returns a String' do
- expect(described_class.series).to be_an_instance_of(String)
- end
- end
-
- describe '.configure' do
- it 'yields self' do
- described_class.configure do |c|
- expect(c).to eq(described_class)
- end
- end
- end
-
- describe '.instrument_method' do
- describe 'with metrics enabled' do
- before do
- allow(Gitlab::Metrics).to receive(:enabled?).and_return(true)
-
- described_class.instrument_method(@dummy, :foo)
- end
-
- it 'instruments the Class' do
- target = @dummy.singleton_class
-
- expect(described_class.instrumented?(target)).to eq(true)
- end
-
- it 'defines a proxy method' do
- mod = described_class.proxy_module(@dummy.singleton_class)
-
- expect(mod.method_defined?(:foo)).to eq(true)
- end
-
- it 'calls the instrumented method with the correct arguments' do
- expect(@dummy.foo).to eq('foo')
- end
-
- it 'tracks the call duration upon calling the method' do
- allow(Gitlab::Metrics).to receive(:method_call_threshold)
- .and_return(0)
-
- allow(described_class).to receive(:transaction)
- .and_return(transaction)
-
- expect_next_instance_of(Gitlab::Metrics::MethodCall) do |instance|
- expect(instance).to receive(:measure)
- end
-
- @dummy.foo
- end
-
- it 'does not track method calls below a given duration threshold' do
- allow(Gitlab::Metrics).to receive(:method_call_threshold)
- .and_return(100)
-
- expect(transaction).not_to receive(:add_metric)
-
- @dummy.foo
- end
-
- it 'generates a method with the correct arity when using methods without arguments' do
- dummy = Class.new do
- def self.test; end
- end
-
- described_class.instrument_method(dummy, :test)
-
- expect(dummy.method(:test).arity).to eq(0)
- end
-
- describe 'when a module is instrumented multiple times' do
- it 'calls the instrumented method with the correct arguments' do
- described_class.instrument_method(@dummy, :foo)
-
- expect(@dummy.foo).to eq('foo')
- end
- end
- end
-
- describe 'with metrics disabled' do
- before do
- allow(Gitlab::Metrics).to receive(:enabled?).and_return(false)
- end
-
- it 'does not instrument the method' do
- described_class.instrument_method(@dummy, :foo)
-
- target = @dummy.singleton_class
-
- expect(described_class.instrumented?(target)).to eq(false)
- end
- end
- end
-
- describe '.instrument_instance_method' do
- describe 'with metrics enabled' do
- before do
- allow(Gitlab::Metrics).to receive(:enabled?).and_return(true)
-
- described_class
- .instrument_instance_method(@dummy, :bar)
- end
-
- it 'instruments instances of the Class' do
- expect(described_class.instrumented?(@dummy)).to eq(true)
- end
-
- it 'defines a proxy method' do
- mod = described_class.proxy_module(@dummy)
-
- expect(mod.method_defined?(:bar)).to eq(true)
- end
-
- it 'calls the instrumented method with the correct arguments' do
- expect(@dummy.new.bar).to eq('bar')
- end
-
- it 'tracks the call duration upon calling the method' do
- allow(Gitlab::Metrics).to receive(:method_call_threshold)
- .and_return(0)
-
- allow(described_class).to receive(:transaction)
- .and_return(transaction)
-
- expect_next_instance_of(Gitlab::Metrics::MethodCall) do |instance|
- expect(instance).to receive(:measure)
- end
-
- @dummy.new.bar
- end
-
- it 'does not track method calls below a given duration threshold' do
- allow(Gitlab::Metrics).to receive(:method_call_threshold)
- .and_return(100)
-
- expect(transaction).not_to receive(:add_metric)
-
- @dummy.new.bar
- end
- end
-
- describe 'with metrics disabled' do
- before do
- allow(Gitlab::Metrics).to receive(:enabled?).and_return(false)
- end
-
- it 'does not instrument the method' do
- described_class
- .instrument_instance_method(@dummy, :bar)
-
- expect(described_class.instrumented?(@dummy)).to eq(false)
- end
- end
- end
-
- describe '.instrument_class_hierarchy' do
- before do
- allow(Gitlab::Metrics).to receive(:enabled?).and_return(true)
-
- @child1 = Class.new(@dummy) do
- def self.child1_foo; end
-
- def child1_bar; end
- end
-
- @child2 = Class.new(@child1) do
- def self.child2_foo; end
-
- def child2_bar; end
- end
- end
-
- it 'recursively instruments a class hierarchy' do
- described_class.instrument_class_hierarchy(@dummy)
-
- expect(described_class.instrumented?(@child1.singleton_class)).to eq(true)
- expect(described_class.instrumented?(@child2.singleton_class)).to eq(true)
-
- expect(described_class.instrumented?(@child1)).to eq(true)
- expect(described_class.instrumented?(@child2)).to eq(true)
- end
-
- it 'does not instrument the root module' do
- described_class.instrument_class_hierarchy(@dummy)
-
- expect(described_class.instrumented?(@dummy)).to eq(false)
- end
- end
-
- describe '.instrument_methods' do
- before do
- allow(Gitlab::Metrics).to receive(:enabled?).and_return(true)
- end
-
- it 'instruments all public class methods' do
- described_class.instrument_methods(@dummy)
-
- expect(described_class.instrumented?(@dummy.singleton_class)).to eq(true)
- expect(@dummy.method(:foo).source_location.first).to match(/instrumentation\.rb/)
- expect(@dummy.public_methods).to include(:foo)
- end
-
- it 'instruments all protected class methods' do
- described_class.instrument_methods(@dummy)
-
- expect(described_class.instrumented?(@dummy.singleton_class)).to eq(true)
- expect(@dummy.method(:flaky).source_location.first).to match(/instrumentation\.rb/)
- expect(@dummy.protected_methods).to include(:flaky)
- end
-
- it 'instruments all private class methods' do
- described_class.instrument_methods(@dummy)
-
- expect(described_class.instrumented?(@dummy.singleton_class)).to eq(true)
- expect(@dummy.method(:buzz).source_location.first).to match(/instrumentation\.rb/)
- expect(@dummy.private_methods).to include(:buzz)
- expect(@dummy.private_methods).to include(:wat)
- end
-
- it 'only instruments methods directly defined in the module' do
- mod = Module.new do
- def kittens
- end
- end
-
- @dummy.extend(mod)
-
- described_class.instrument_methods(@dummy)
-
- expect(@dummy).not_to respond_to(:_original_kittens)
- end
-
- it 'can take a block to determine if a method should be instrumented' do
- described_class.instrument_methods(@dummy) do
- false
- end
-
- expect(@dummy).not_to respond_to(:_original_foo)
- end
- end
-
- describe '.instrument_instance_methods' do
- before do
- allow(Gitlab::Metrics).to receive(:enabled?).and_return(true)
- end
-
- it 'instruments all public instance methods' do
- described_class.instrument_instance_methods(@dummy)
-
- expect(described_class.instrumented?(@dummy)).to eq(true)
- expect(@dummy.new.method(:bar).source_location.first).to match(/instrumentation\.rb/)
- expect(@dummy.public_instance_methods).to include(:bar)
- end
-
- it 'instruments all protected instance methods' do
- described_class.instrument_instance_methods(@dummy)
-
- expect(described_class.instrumented?(@dummy)).to eq(true)
- expect(@dummy.new.method(:chaf).source_location.first).to match(/instrumentation\.rb/)
- expect(@dummy.protected_instance_methods).to include(:chaf)
- end
-
- it 'instruments all private instance methods' do
- described_class.instrument_instance_methods(@dummy)
-
- expect(described_class.instrumented?(@dummy)).to eq(true)
- expect(@dummy.new.method(:wadus).source_location.first).to match(/instrumentation\.rb/)
- expect(@dummy.private_instance_methods).to include(:wadus)
- end
-
- it 'only instruments methods directly defined in the module' do
- mod = Module.new do
- def kittens
- end
- end
-
- @dummy.include(mod)
-
- described_class.instrument_instance_methods(@dummy)
-
- expect(@dummy.new.method(:kittens).source_location.first).not_to match(/instrumentation\.rb/)
- end
-
- it 'can take a block to determine if a method should be instrumented' do
- described_class.instrument_instance_methods(@dummy) do
- false
- end
-
- expect(@dummy.new.method(:bar).source_location.first).not_to match(/instrumentation\.rb/)
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/rails_slis_spec.rb b/spec/lib/gitlab/metrics/rails_slis_spec.rb
new file mode 100644
index 00000000000..16fcb9d46a2
--- /dev/null
+++ b/spec/lib/gitlab/metrics/rails_slis_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::RailsSlis do
+ # Limit what routes we'll initialize so we don't have to load the entire thing
+ before do
+ api_route = API::API.routes.find do |route|
+ API::Base.endpoint_id_for_route(route) == "GET /api/:version/version"
+ end
+
+ allow(Gitlab::RequestEndpoints).to receive(:all_api_endpoints).and_return([api_route])
+ allow(Gitlab::RequestEndpoints).to receive(:all_controller_actions).and_return([[ProjectsController, 'show']])
+ end
+
+ describe '.initialize_request_slis_if_needed!' do
+ it "initializes the SLI for all possible endpoints if they weren't" do
+ possible_labels = [
+ {
+ endpoint_id: "GET /api/:version/version",
+ feature_category: :not_owned
+ },
+ {
+ endpoint_id: "ProjectsController#show",
+ feature_category: :projects
+ }
+ ]
+
+ expect(Gitlab::Metrics::Sli).to receive(:initialized?).with(:rails_request_apdex) { false }
+ expect(Gitlab::Metrics::Sli).to receive(:initialize_sli).with(:rails_request_apdex, array_including(*possible_labels)).and_call_original
+
+ described_class.initialize_request_slis_if_needed!
+ end
+
+ it 'does not initialize the SLI if they were initialized already' do
+ expect(Gitlab::Metrics::Sli).to receive(:initialized?).with(:rails_request_apdex) { true }
+ expect(Gitlab::Metrics::Sli).not_to receive(:initialize_sli)
+
+ described_class.initialize_request_slis_if_needed!
+ end
+
+ it 'does not initialize anything if the feature flag is disabled' do
+ stub_feature_flags(request_apdex_counters: false)
+
+ expect(Gitlab::Metrics::Sli).not_to receive(:initialize_sli)
+ expect(Gitlab::Metrics::Sli).not_to receive(:initialized?)
+
+ described_class.initialize_request_slis_if_needed!
+ end
+ end
+
+ describe '.request_apdex' do
+ it 'returns the initialized request apdex SLI object' do
+ described_class.initialize_request_slis_if_needed!
+
+ expect(described_class.request_apdex).to be_initialized
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
index 9d5c4bdf9e2..5870f9a8f68 100644
--- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
it 'tracks request count and duration' do
expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown')
expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ method: 'get' }, a_positive_execution_time)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, success: true)
subject.call(env)
end
@@ -70,7 +71,7 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
expect(described_class).not_to receive(:http_health_requests_total)
expect(described_class)
.to receive_message_chain(:http_request_duration_seconds, :observe)
- .with({ method: 'get' }, a_positive_execution_time)
+ .with({ method: 'get' }, a_positive_execution_time)
subject.call(env)
end
@@ -82,9 +83,10 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
context '@app.call returns an error code' do
let(:status) { '500' }
- it 'tracks count but not duration' do
+ it 'tracks count but not duration or apdex' do
expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '500', feature_category: 'unknown')
expect(described_class).not_to receive(:http_request_duration_seconds)
+ expect(Gitlab::Metrics::RailsSlis).not_to receive(:request_apdex)
subject.call(env)
end
@@ -104,20 +106,23 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
expect(described_class).to receive_message_chain(:rack_uncaught_errors_count, :increment)
expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'unknown')
expect(described_class.http_request_duration_seconds).not_to receive(:observe)
+ expect(Gitlab::Metrics::RailsSlis).not_to receive(:request_apdex)
expect { subject.call(env) }.to raise_error(StandardError)
end
end
- context 'feature category header' do
- context 'when a feature category context is present' do
+ context 'application context' do
+ context 'when a context is present' do
before do
- ::Gitlab::ApplicationContext.push(feature_category: 'issue_tracking')
+ ::Gitlab::ApplicationContext.push(feature_category: 'issue_tracking', caller_id: 'IssuesController#show')
end
- it 'adds the feature category to the labels for http_requests_total' do
+ it 'adds the feature category to the labels for required metrics' do
expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'issue_tracking')
expect(described_class).not_to receive(:http_health_requests_total)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex)
+ .to receive(:increment).with(labels: { feature_category: 'issue_tracking', endpoint_id: 'IssuesController#show' }, success: true)
subject.call(env)
end
@@ -127,6 +132,7 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get', status: '200')
expect(described_class).not_to receive(:http_requests_total)
+ expect(Gitlab::Metrics::RailsSlis).not_to receive(:request_apdex)
subject.call(env)
end
@@ -140,19 +146,180 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
it 'adds the feature category to the labels for http_requests_total' do
expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'issue_tracking')
+ expect(Gitlab::Metrics::RailsSlis).not_to receive(:request_apdex)
expect { subject.call(env) }.to raise_error(StandardError)
end
end
- context 'when the feature category context is not available' do
- it 'sets the feature category to unknown' do
+ context 'when the context is not available' do
+ it 'sets the required labels to unknown' do
expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown')
expect(described_class).not_to receive(:http_health_requests_total)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, success: true)
subject.call(env)
end
end
+
+ context 'SLI satisfactory' do
+ where(:request_urgency_name, :duration, :success) do
+ [
+ [:high, 0.1, true],
+ [:high, 0.25, false],
+ [:high, 0.3, false],
+ [:medium, 0.3, true],
+ [:medium, 0.5, false],
+ [:medium, 0.6, false],
+ [:default, 0.6, true],
+ [:default, 1.0, false],
+ [:default, 1.2, false],
+ [:low, 4.5, true],
+ [:low, 5.0, false],
+ [:low, 6, false]
+ ]
+ end
+
+ with_them do
+ context 'Grape API handler having expected duration setup' do
+ let(:api_handler) do
+ request_urgency = request_urgency_name
+ Class.new(::API::Base) do
+ feature_category :hello_world, ['/projects/:id/archive']
+ urgency request_urgency, ['/projects/:id/archive']
+ end
+ end
+
+ let(:endpoint) do
+ route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)')
+ double(:endpoint, route: route,
+ options: { for: api_handler, path: [":id/archive"] },
+ namespace: "/projects")
+ end
+
+ let(:env) { { 'api.endpoint' => endpoint, 'REQUEST_METHOD' => 'GET' } }
+
+ before do
+ ::Gitlab::ApplicationContext.push(feature_category: 'hello_world', caller_id: 'GET /projects/:id/archive')
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100 + duration)
+ end
+
+ it "captures SLI metrics" do
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'hello_world', endpoint_id: 'GET /projects/:id/archive' },
+ success: success
+ )
+ subject.call(env)
+ end
+ end
+
+ context 'Rails controller having expected duration setup' do
+ let(:controller) do
+ request_urgency = request_urgency_name
+ Class.new(ApplicationController) do
+ feature_category :hello_world, [:index, :show]
+ urgency request_urgency, [:index, :show]
+ end
+ end
+
+ let(:env) do
+ controller_instance = controller.new
+ controller_instance.action_name = :index
+ { 'action_controller.instance' => controller_instance, 'REQUEST_METHOD' => 'GET' }
+ end
+
+ before do
+ ::Gitlab::ApplicationContext.push(feature_category: 'hello_world', caller_id: 'AnonymousController#index')
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100 + duration)
+ end
+
+ it "captures SLI metrics" do
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'hello_world', endpoint_id: 'AnonymousController#index' },
+ success: success
+ )
+ subject.call(env)
+ end
+ end
+ end
+
+ context 'Grape API without expected duration' do
+ let(:endpoint) do
+ route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)')
+ double(:endpoint, route: route,
+ options: { for: api_handler, path: [":id/archive"] },
+ namespace: "/projects")
+ end
+
+ let(:env) { { 'api.endpoint' => endpoint, 'REQUEST_METHOD' => 'GET' } }
+
+ let(:api_handler) { Class.new(::API::Base) }
+
+ it "falls back request's expectation to medium (1 second)" do
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100.9)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'unknown', endpoint_id: 'unknown' },
+ success: true
+ )
+ subject.call(env)
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 101)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'unknown', endpoint_id: 'unknown' },
+ success: false
+ )
+ subject.call(env)
+ end
+ end
+
+ context 'Rails controller without expected duration' do
+ let(:controller) { Class.new(ApplicationController) }
+
+ let(:env) do
+ controller_instance = controller.new
+ controller_instance.action_name = :index
+ { 'action_controller.instance' => controller_instance, 'REQUEST_METHOD' => 'GET' }
+ end
+
+ it "falls back request's expectation to medium (1 second)" do
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100.9)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'unknown', endpoint_id: 'unknown' },
+ success: true
+ )
+ subject.call(env)
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 101)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'unknown', endpoint_id: 'unknown' },
+ success: false
+ )
+ subject.call(env)
+ end
+ end
+
+ context 'An unknown request' do
+ let(:env) do
+ { 'REQUEST_METHOD' => 'GET' }
+ end
+
+ it "falls back request's expectation to medium (1 second)" do
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100.9)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'unknown', endpoint_id: 'unknown' },
+ success: true
+ )
+ subject.call(env)
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 101)
+ expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(
+ labels: { feature_category: 'unknown', endpoint_id: 'unknown' },
+ success: false
+ )
+ subject.call(env)
+ end
+ end
+ end
end
describe '.initialize_metrics', :prometheus do
@@ -181,8 +348,8 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
end
it 'has every label in config/feature_categories.yml' do
- defaults = [described_class::FEATURE_CATEGORY_DEFAULT, 'not_owned']
- feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).map(&:strip) + defaults
+ defaults = [::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT, 'not_owned']
+ feature_categories = Gitlab::FeatureCategories.default.categories + defaults
expect(described_class::FEATURE_CATEGORIES_TO_INITIALIZE).to all(be_in(feature_categories))
end
diff --git a/spec/lib/gitlab/metrics/sli_spec.rb b/spec/lib/gitlab/metrics/sli_spec.rb
new file mode 100644
index 00000000000..8ba4bf29568
--- /dev/null
+++ b/spec/lib/gitlab/metrics/sli_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Metrics::Sli do
+ let(:prometheus) { double("prometheus") }
+
+ before do
+ stub_const("Gitlab::Metrics", prometheus)
+ end
+
+ describe 'Class methods' do
+ before do
+ described_class.instance_variable_set(:@known_slis, nil)
+ end
+
+ describe '.[]' do
+ it 'warns about an uninitialized SLI but returns and stores a new one' do
+ sli = described_class[:bar]
+
+ expect(described_class[:bar]).to be(sli)
+ end
+
+ it 'returns the same object for multiple accesses' do
+ sli = described_class.initialize_sli(:huzzah, [])
+
+ 2.times do
+ expect(described_class[:huzzah]).to be(sli)
+ end
+ end
+ end
+
+ describe '.initialized?' do
+ before do
+ fake_total_counter(:boom)
+ fake_success_counter(:boom)
+ end
+
+ it 'is true when an SLI was initialized with labels' do
+ expect { described_class.initialize_sli(:boom, [{ hello: :world }]) }
+ .to change { described_class.initialized?(:boom) }.from(false).to(true)
+ end
+
+ it 'is false when an SLI was not initialized with labels' do
+ expect { described_class.initialize_sli(:boom, []) }
+ .not_to change { described_class.initialized?(:boom) }.from(false)
+ end
+ end
+ end
+
+ describe '#initialize_counters' do
+ it 'initializes counters for the passed label combinations' do
+ counters = [fake_total_counter(:hey), fake_success_counter(:hey)]
+
+ described_class.new(:hey).initialize_counters([{ foo: 'bar' }, { foo: 'baz' }])
+
+ expect(counters).to all(have_received(:get).with({ foo: 'bar' }))
+ expect(counters).to all(have_received(:get).with({ foo: 'baz' }))
+ end
+ end
+
+ describe "#increment" do
+ let!(:sli) { described_class.new(:heyo) }
+ let!(:total_counter) { fake_total_counter(:heyo) }
+ let!(:success_counter) { fake_success_counter(:heyo) }
+
+ it 'increments both counters for labels successes' do
+ sli.increment(labels: { hello: "world" }, success: true)
+
+ expect(total_counter).to have_received(:increment).with({ hello: 'world' })
+ expect(success_counter).to have_received(:increment).with({ hello: 'world' })
+ end
+
+ it 'only increments the total counters for labels when not successful' do
+ sli.increment(labels: { hello: "world" }, success: false)
+
+ expect(total_counter).to have_received(:increment).with({ hello: 'world' })
+ expect(success_counter).not_to have_received(:increment).with({ hello: 'world' })
+ end
+ end
+
+ def fake_prometheus_counter(name)
+ fake_counter = double("prometheus counter: #{name}")
+
+ allow(fake_counter).to receive(:get)
+ allow(fake_counter).to receive(:increment)
+ allow(prometheus).to receive(:counter).with(name.to_sym, anything).and_return(fake_counter)
+
+ fake_counter
+ end
+
+ def fake_total_counter(name)
+ fake_prometheus_counter("gitlab_sli:#{name}:total")
+ end
+
+ def fake_success_counter(name)
+ fake_prometheus_counter("gitlab_sli:#{name}:success_total")
+ end
+end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index 3ffbcbea03c..a8e4f039da4 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
let(:env) { {} }
let(:subscriber) { described_class.new }
- let(:connection) { ActiveRecord::Base.connection }
+ let(:connection) { ActiveRecord::Base.retrieve_connection }
let(:db_config_name) { ::Gitlab::Database.db_config_name(connection) }
describe '#transaction' do
@@ -135,7 +135,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
it_behaves_like 'record ActiveRecord metrics'
- it_behaves_like 'store ActiveRecord info in RequestStore'
+ it_behaves_like 'store ActiveRecord info in RequestStore', :primary
end
end
@@ -195,10 +195,6 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
with_them do
let(:payload) { { name: name, sql: sql(sql_query, comments: comments), connection: connection } }
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
-
context 'query using a connection to a replica' do
before do
allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).and_return(:replica)
diff --git a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb
index 21a6573c6fd..bc6effd0438 100644
--- a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb
@@ -5,10 +5,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Subscribers::LoadBalancing, :request_store do
let(:subscriber) { described_class.new }
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
-
describe '#caught_up_replica_pick' do
shared_examples 'having payload result value' do |result, counter_name|
subject { subscriber.caught_up_replica_pick(event) }
diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb
index 5261d04c879..9e22dccb2a2 100644
--- a/spec/lib/gitlab/metrics/web_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
it 'measures with correct labels and value' do
value = 1
- expect(prometheus_metric).to receive(metric_method).with({ controller: 'TestController', action: 'show', feature_category: '' }, value)
+ expect(prometheus_metric).to receive(metric_method).with({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT }, value)
transaction.send(metric_method, :bau, value)
end
@@ -105,6 +105,9 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
namespace: "/projects")
env['api.endpoint'] = endpoint
+
+ # This is needed since we're not actually making a request, which would trigger the controller pushing to the context
+ ::Gitlab::ApplicationContext.push(feature_category: 'projects')
end
it 'provides labels with the method and path of the route in the grape endpoint' do
@@ -129,7 +132,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
include_context 'ActionController request'
it 'tags a transaction with the name and action of a controller' do
- expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: '' })
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT })
end
it 'contains only the labels defined for transactions' do
@@ -140,7 +143,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
let(:request) { double(:request, format: double(:format, ref: :json)) }
it 'appends the mime type to the transaction action' do
- expect(transaction.labels).to eq({ controller: 'TestController', action: 'show.json', feature_category: '' })
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show.json', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT })
end
end
@@ -148,13 +151,15 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
let(:request) { double(:request, format: double(:format, ref: 'http://example.com')) }
it 'does not append the MIME type to the transaction action' do
- expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: '' })
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT })
end
end
context 'when the feature category is known' do
it 'includes it in the feature category label' do
- expect(controller_class).to receive(:feature_category_for_action).with('show').and_return(:source_code_management)
+ # This is needed since we're not actually making a request, which would trigger the controller pushing to the context
+ ::Gitlab::ApplicationContext.push(feature_category: 'source_code_management')
+
expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: "source_code_management" })
end
end
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index 7bac041cd65..0ce95fdb5af 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::Middleware::Go do
end
end
- context 'without access to the project' do
+ context 'without access to the project', :sidekiq_inline do
before do
project.team.find_member(current_user).destroy
end
diff --git a/spec/lib/gitlab/middleware/multipart/handler_spec.rb b/spec/lib/gitlab/middleware/multipart/handler_spec.rb
index aac3f00defe..53b59b042e2 100644
--- a/spec/lib/gitlab/middleware/multipart/handler_spec.rb
+++ b/spec/lib/gitlab/middleware/multipart/handler_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Gitlab::Middleware::Multipart::Handler do
::Gitlab.config.uploads.storage_path,
::JobArtifactUploader.workhorse_upload_path,
::LfsObjectUploader.workhorse_upload_path,
+ ::DependencyProxy::FileUploader.workhorse_upload_path,
File.join(Rails.root, 'public/uploads/tmp')
]
end
diff --git a/spec/lib/gitlab/middleware/speedscope_spec.rb b/spec/lib/gitlab/middleware/speedscope_spec.rb
index bb830a2fbda..c1d452f69f8 100644
--- a/spec/lib/gitlab/middleware/speedscope_spec.rb
+++ b/spec/lib/gitlab/middleware/speedscope_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::Middleware::Speedscope do
allow(env).to receive(:[]).with('warden').and_return(double('Warden', user: create(:admin)))
end
- it 'runs StackProf and returns a flamegraph' do
+ it 'returns a flamegraph' do
expect(StackProf).to receive(:run).and_call_original
status, headers, body = middleware.call(env)
@@ -55,6 +55,56 @@ RSpec.describe Gitlab::Middleware::Speedscope do
expect(headers).to eq({ 'Content-Type' => 'text/html' })
expect(body.first).to include('speedscope-iframe')
end
+
+ context 'when the stackprof_mode parameter is set and valid' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'performance_bar' => 'flamegraph', 'stackprof_mode' => 'cpu' }) }
+
+ it 'runs StackProf in the mode specified in the stackprof_mode parameter' do
+ expect(StackProf).to receive(:run).with(hash_including(mode: :cpu))
+
+ middleware.call(env)
+ end
+ end
+
+ context 'when the stackprof_mode parameter is not set' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'performance_bar' => 'flamegraph' }) }
+
+ it 'runs StackProf in wall mode' do
+ expect(StackProf).to receive(:run).with(hash_including(mode: :wall))
+
+ middleware.call(env)
+ end
+ end
+
+ context 'when the stackprof_mode parameter is invalid' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'performance_bar' => 'flamegraph', 'stackprof_mode' => 'invalid' }) }
+
+ it 'runs StackProf in wall mode' do
+ expect(StackProf).to receive(:run).with(hash_including(mode: :wall))
+
+ middleware.call(env)
+ end
+ end
+
+ context 'when the stackprof_mode parameter is set to object mode' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'performance_bar' => 'flamegraph', 'stackprof_mode' => 'object' }) }
+
+ it 'runs StackProf with an interval of 100' do
+ expect(StackProf).to receive(:run).with(hash_including(interval: 100))
+
+ middleware.call(env)
+ end
+ end
+
+ context 'when the stackprof_mode parameter is not set to object mode' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'performance_bar' => 'flamegraph', 'stackprof_mode' => 'wall' }) }
+
+ it 'runs StackProf with an interval of 10_100' do
+ expect(StackProf).to receive(:run).with(hash_including(interval: 10_100))
+
+ middleware.call(env)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
index 4ce51e37685..00beacd4b35 100644
--- a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
@@ -41,14 +41,40 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
)
end
- it 'returns records in correct order' do
+ let(:all_records) do
all_records = []
iterator.each_batch(of: batch_size) do |records|
all_records.concat(records)
end
+ all_records
+ end
+ it 'returns records in correct order' do
expect(all_records).to eq(expected_order)
end
+
+ context 'when not passing the finder query' do
+ before do
+ in_operator_optimization_options.delete(:finder_query)
+ end
+
+ it 'returns records in correct order' do
+ expect(all_records).to eq(expected_order)
+ end
+
+ it 'loads only the order by column' do
+ order_by_attribute_names = iterator
+ .send(:order)
+ .column_definitions
+ .map(&:attribute_name)
+ .map(&:to_s)
+
+ record = all_records.first
+ loaded_attributes = record.attributes.keys - ['time_estimate'] # time_estimate is always present (has default value)
+
+ expect(loaded_attributes).to eq(order_by_attribute_names)
+ end
+ end
end
context 'when ordering by issues.id DESC' do
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
new file mode 100644
index 00000000000..fe95d5406dd
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::Strategies::OrderValuesLoaderStrategy do
+ let(:model) { Project }
+
+ let(:keyset_scope) do
+ scope, _ = Gitlab::Pagination::Keyset::SimpleOrderBuilder.build(
+ Project.order(:created_at, :id)
+ )
+
+ scope
+ end
+
+ let(:keyset_order) do
+ Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(keyset_scope)
+ end
+
+ let(:order_by_columns) do
+ Gitlab::Pagination::Keyset::InOperatorOptimization::OrderByColumns.new(keyset_order.column_definitions, model.arel_table)
+ end
+
+ subject(:strategy) { described_class.new(model, order_by_columns) }
+
+ describe '#initializer_columns' do
+ it 'returns NULLs for each ORDER BY columns' do
+ expect(strategy.initializer_columns).to eq([
+ 'NULL::timestamp without time zone AS created_at',
+ 'NULL::integer AS id'
+ ])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb
new file mode 100644
index 00000000000..5180403b493
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::Strategies::RecordLoaderStrategy do
+ let(:finder_query) { -> (created_at_value, id_value) { Project.where(Project.arel_table[:id].eq(id_value)) } }
+ let(:model) { Project }
+
+ let(:keyset_scope) do
+ scope, _ = Gitlab::Pagination::Keyset::SimpleOrderBuilder.build(
+ Project.order(:created_at, :id)
+ )
+
+ scope
+ end
+
+ let(:keyset_order) do
+ Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(keyset_scope)
+ end
+
+ let(:order_by_columns) do
+ Gitlab::Pagination::Keyset::InOperatorOptimization::OrderByColumns.new(keyset_order.column_definitions, model.arel_table)
+ end
+
+ subject(:strategy) { described_class.new(finder_query, model, order_by_columns) }
+
+ describe '#initializer_columns' do
+ # Explanation:
+ # > SELECT NULL::projects AS records
+ #
+ # The query returns one row and one column. The column may contain a full project row.
+ # In this particular case the row is NULL.
+ it 'returns a NULL table row as the result column' do
+ expect(strategy.initializer_columns).to eq(["NULL::projects AS records"])
+ end
+ end
+
+ describe '#columns' do
+ # Explanation:
+ # > SELECT (SELECT projects FROM projects limit 1)
+ #
+ # Selects one row from the database and collapses it into one column.
+ #
+ # Side note: Due to the type casts, columns and initializer_columns can be also UNION-ed:
+ # SELECT * FROM (
+ # (
+ # SELECT NULL::projects AS records
+ # UNION
+ # SELECT (SELECT projects FROM projects limit 1)
+ # )
+ # ) as records
+ it 'uses the finder query to load the row in the result column' do
+ expected_loader_query = <<~SQL
+ (SELECT projects FROM "projects" WHERE "projects"."id" = recursive_keyset_cte.projects_id_array[position] LIMIT 1)
+ SQL
+
+ expect(strategy.columns).to eq([expected_loader_query.chomp])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
index d8e79287745..09cbca2c1cb 100644
--- a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -32,8 +32,11 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
])
end
+ let(:iterator_params) { nil }
let(:scope) { project.issues.reorder(custom_reorder) }
+ subject(:iterator) { described_class.new(**iterator_params) }
+
shared_examples 'iterator examples' do
describe '.each_batch' do
it 'yields an ActiveRecord::Relation when a block is given' do
@@ -56,6 +59,29 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
expect(count).to eq(9)
end
+ it 'continues after the cursor' do
+ loaded_records = []
+ cursor = nil
+
+ # stopping the iterator after the first batch and storing the cursor
+ iterator.each_batch(of: 2) do |relation| # rubocop: disable Lint/UnreachableLoop
+ loaded_records.concat(relation.to_a)
+ record = loaded_records.last
+
+ cursor = custom_reorder.cursor_attributes_for_node(record)
+ break
+ end
+
+ expect(loaded_records).to eq(project.issues.order(custom_reorder).take(2))
+
+ new_iterator = described_class.new(**iterator_params.merge(cursor: cursor))
+ new_iterator.each_batch(of: 2) do |relation|
+ loaded_records.concat(relation.to_a)
+ end
+
+ expect(loaded_records).to eq(project.issues.order(custom_reorder))
+ end
+
it 'allows updating of the yielded relations' do
time = Time.current
@@ -73,7 +99,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.order_relative_position_asc.order(id: :asc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id))
end
end
@@ -85,7 +111,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.order_relative_position_desc.order(id: :desc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id))
end
end
@@ -131,13 +157,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
end
context 'when use_union_optimization is used' do
- subject(:iterator) { described_class.new(scope: scope, use_union_optimization: true) }
+ let(:iterator_params) { { scope: scope, use_union_optimization: true } }
include_examples 'iterator examples'
end
context 'when use_union_optimization is not used' do
- subject(:iterator) { described_class.new(scope: scope, use_union_optimization: false) }
+ let(:iterator_params) { { scope: scope, use_union_optimization: false } }
include_examples 'iterator examples'
end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index aa13660deb4..2f38ed58727 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -561,4 +561,25 @@ RSpec.describe Gitlab::PathRegex do
expect(subject.match('sha256:asdf1234%2f')[0]).to eq('sha256:asdf1234')
end
end
+
+ describe '.dependency_proxy_route_regex' do
+ subject { described_class.dependency_proxy_route_regex }
+
+ it { is_expected.to match('/v2/group1/dependency_proxy/containers/alpine/manifests/latest') }
+ it { is_expected.to match('/v2/group1/dependency_proxy/containers/alpine/blobs/sha256:14119a10abf4669e8cdbdff324a9f9605d99697215a0d21c360fe8dfa8471bab') }
+
+ it { is_expected.not_to match('') }
+ it { is_expected.not_to match('/v3/group1/dependency_proxy/containers/alpine/manifests/latest') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/container/alpine/manifests/latest') }
+ it { is_expected.not_to match('/v2/group1/dependency_prox/containers/alpine/manifests/latest') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/containers/alpine/manifest/latest') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/containers/alpine/manifest/la%2Ftest') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/containers/alpine/manifest/latest/../one') }
+ it { is_expected.not_to match('/v3/group1/dependency_proxy/containers/alpine/blobs/sha256:14119a10abf4669e8cdbdff324a9f9605d99697215a0d21c360fe8dfa8471bab') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/container/alpine/blobs/sha256:14119a10abf4669e8cdbdff324a9f9605d99697215a0d21c360fe8dfa8471bab') }
+ it { is_expected.not_to match('/v2/group1/dependency_prox/containers/alpine/blobs/sha256:14119a10abf4669e8cdbdff324a9f9605d99697215a0d21c360fe8dfa8471bab') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/containers/alpine/blob/sha256:14119a10abf4669e8cdbdff324a9f9605d99697215a0d21c360fe8dfa8471bab') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/containers/alpine/blob/sha256:F14119a10abf4669e8cdbdff324a9f9605d99697215a0d21c360fe8dfa8471bab/../latest') }
+ it { is_expected.not_to match('/v2/group1/dependency_proxy/containers/alpine/blob/sha256:F14119a10abf4669e8cdbdff324a9f9605d99697215a0d21c360fe8dfa8471bab/latest') }
+ end
end
diff --git a/spec/lib/gitlab/performance_bar/stats_spec.rb b/spec/lib/gitlab/performance_bar/stats_spec.rb
index ad11eca56d1..b4f90745ee7 100644
--- a/spec/lib/gitlab/performance_bar/stats_spec.rb
+++ b/spec/lib/gitlab/performance_bar/stats_spec.rb
@@ -23,11 +23,23 @@ RSpec.describe Gitlab::PerformanceBar::Stats do
expect(logger).to receive(:info)
.with({ duration_ms: 1.096, filename: 'lib/gitlab/pagination/offset_pagination.rb',
method_path: 'lib/gitlab/pagination/offset_pagination.rb:add_pagination_headers',
- count: 1, request_id: 'foo', type: :sql })
+ count: 1, request_id: 'foo', query_type: 'active-record' })
expect(logger).to receive(:info)
.with({ duration_ms: 1.634, filename: 'lib/api/helpers.rb',
method_path: 'lib/api/helpers.rb:find_project',
- count: 2, request_id: 'foo', type: :sql })
+ count: 2, request_id: 'foo', query_type: 'active-record' })
+ expect(logger).to receive(:info)
+ .with({ duration_ms: 23.709, filename: 'lib/gitlab/gitaly_client/commit_service.rb',
+ method_path: 'lib/gitlab/gitaly_client/commit_service.rb:each',
+ count: 1, request_id: 'foo', query_type: 'gitaly' })
+ expect(logger).to receive(:info)
+ .with({ duration_ms: 6.678, filename: 'lib/gitlab/gitaly_client/commit_service.rb',
+ method_path: 'lib/gitlab/gitaly_client/commit_service.rb:call_find_commit',
+ count: 1, request_id: 'foo', query_type: 'gitaly' })
+ expect(logger).to receive(:info)
+ .with({ duration_ms: 0.155, filename: 'lib/feature.rb',
+ method_path: 'lib/feature.rb:enabled?',
+ count: 1, request_id: 'foo', query_type: 'redis' })
subject
end
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index d2b41ee31d9..16066934194 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -204,6 +204,43 @@ RSpec.describe Gitlab::ProjectAuthorizations do
end
end
+ context 'with shared projects' do
+ let_it_be(:shared_with_group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, group: create(:group)) }
+
+ let(:mapping) { map_access_levels(authorizations) }
+
+ before do
+ create(:project_group_link, :developer, project: project, group: shared_with_group)
+ shared_with_group.add_maintainer(user)
+ end
+
+ it 'creates proper authorizations' do
+ expect(mapping[project.id]).to eq(Gitlab::Access::DEVELOPER)
+ end
+
+ context 'even when the `lock_memberships_to_ldap` setting has been turned ON' do
+ before do
+ stub_application_setting(lock_memberships_to_ldap: true)
+ end
+
+ it 'creates proper authorizations' do
+ expect(mapping[project.id]).to eq(Gitlab::Access::DEVELOPER)
+ end
+ end
+
+ context 'when the group containing the project has forbidden group shares for any of its projects' do
+ before do
+ project.namespace.update!(share_with_group_lock: true)
+ end
+
+ it 'does not create authorizations' do
+ expect(mapping[project.id]).to be_nil
+ end
+ end
+ end
+
context 'with shared groups' do
let(:parent_group_user) { create(:user) }
let(:group_user) { create(:user) }
diff --git a/spec/lib/gitlab/rack_attack/request_spec.rb b/spec/lib/gitlab/rack_attack/request_spec.rb
index 3be7ec17e45..ecdcc23e588 100644
--- a/spec/lib/gitlab/rack_attack/request_spec.rb
+++ b/spec/lib/gitlab/rack_attack/request_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::RackAttack::Request do
+ using RSpec::Parameterized::TableSyntax
+
describe 'FILES_PATH_REGEX' do
subject { described_class::FILES_PATH_REGEX }
@@ -13,4 +15,33 @@ RSpec.describe Gitlab::RackAttack::Request do
it { is_expected.to match('/api/v4/projects/some%2Fnested%2Frepo/repository/files/README') }
it { is_expected.not_to match('/api/v4/projects/some/nested/repo/repository/files/README') }
end
+
+ describe '#deprecated_api_request?' do
+ let(:env) { { 'REQUEST_METHOD' => 'GET', 'rack.input' => StringIO.new, 'PATH_INFO' => path, 'QUERY_STRING' => query } }
+ let(:request) { ::Rack::Attack::Request.new(env) }
+
+ subject { !!request.__send__(:deprecated_api_request?) }
+
+ where(:path, :query, :expected) do
+ '/' | '' | false
+
+ '/api/v4/groups/1/' | '' | true
+ '/api/v4/groups/1' | '' | true
+ '/api/v4/groups/foo/' | '' | true
+ '/api/v4/groups/foo' | '' | true
+
+ '/api/v4/groups/1' | 'with_projects=' | true
+ '/api/v4/groups/1' | 'with_projects=1' | true
+ '/api/v4/groups/1' | 'with_projects=0' | false
+
+ '/foo/api/v4/groups/1' | '' | false
+ '/api/v4/groups/1/foo' | '' | false
+
+ '/api/v4/groups/nested%2Fgroup' | '' | true
+ end
+
+ with_them do
+ it { is_expected.to eq(expected) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/rate_limit_helpers_spec.rb b/spec/lib/gitlab/rate_limit_helpers_spec.rb
index d583c8e58fb..ad0e2de1448 100644
--- a/spec/lib/gitlab/rate_limit_helpers_spec.rb
+++ b/spec/lib/gitlab/rate_limit_helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_rate_limiting do
let(:limiter_class) do
Class.new do
include ::Gitlab::RateLimitHelpers
diff --git a/spec/lib/gitlab/redis/queues_spec.rb b/spec/lib/gitlab/redis/queues_spec.rb
index 2e396cde3bf..a0f73a654e7 100644
--- a/spec/lib/gitlab/redis/queues_spec.rb
+++ b/spec/lib/gitlab/redis/queues_spec.rb
@@ -9,10 +9,24 @@ RSpec.describe Gitlab::Redis::Queues do
include_examples "redis_shared_examples"
describe '#raw_config_hash' do
- it 'has a legacy default URL' do
- expect(subject).to receive(:fetch_config) { false }
+ before do
+ expect(subject).to receive(:fetch_config) { config }
+ end
+
+ context 'when the config url is blank' do
+ let(:config) { nil }
+
+ it 'has a legacy default URL' do
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6381' )
+ end
+ end
+
+ context 'when the config url is present' do
+ let(:config) { { url: 'redis://localhost:1111' } }
- expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6381' )
+ it 'sets the configured url' do
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:1111' )
+ end
end
end
end
diff --git a/spec/lib/gitlab/redis/rate_limiting_spec.rb b/spec/lib/gitlab/redis/rate_limiting_spec.rb
new file mode 100644
index 00000000000..e79c070df93
--- /dev/null
+++ b/spec/lib/gitlab/redis/rate_limiting_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::RateLimiting do
+ include_examples "redis_new_instance_shared_examples", 'rate_limiting', Gitlab::Redis::Cache
+end
diff --git a/spec/lib/gitlab/redis/sessions_spec.rb b/spec/lib/gitlab/redis/sessions_spec.rb
new file mode 100644
index 00000000000..7e239c08e9f
--- /dev/null
+++ b/spec/lib/gitlab/redis/sessions_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::Sessions do
+ include_examples "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState
+end
diff --git a/spec/lib/gitlab/redis/trace_chunks_spec.rb b/spec/lib/gitlab/redis/trace_chunks_spec.rb
index e974dc519d6..bb3c3089430 100644
--- a/spec/lib/gitlab/redis/trace_chunks_spec.rb
+++ b/spec/lib/gitlab/redis/trace_chunks_spec.rb
@@ -3,53 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Redis::TraceChunks do
- let(:instance_specific_config_file) { "config/redis.trace_chunks.yml" }
- let(:environment_config_file_name) { "GITLAB_REDIS_TRACE_CHUNKS_CONFIG_FILE" }
- let(:shared_state_config_file) { nil }
-
- before do
- allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(shared_state_config_file)
- end
-
- include_examples "redis_shared_examples"
-
- describe '.config_file_name' do
- subject { described_class.config_file_name }
-
- let(:rails_root) { Dir.mktmpdir('redis_shared_examples') }
-
- before do
- # Undo top-level stub of config_file_name because we are testing that method now.
- allow(described_class).to receive(:config_file_name).and_call_original
-
- allow(described_class).to receive(:rails_root).and_return(rails_root)
- FileUtils.mkdir_p(File.join(rails_root, 'config'))
- end
-
- after do
- FileUtils.rm_rf(rails_root)
- end
-
- context 'when there is only a resque.yml' do
- before do
- FileUtils.touch(File.join(rails_root, 'config/resque.yml'))
- end
-
- it { expect(subject).to eq("#{rails_root}/config/resque.yml") }
-
- context 'and there is a global env override' do
- before do
- stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override')
- end
-
- it { expect(subject).to eq('global override') }
-
- context 'and SharedState has a different config file' do
- let(:shared_state_config_file) { 'shared state config file' }
-
- it { expect(subject).to eq('shared state config file') }
- end
- end
- end
- end
+ include_examples "redis_new_instance_shared_examples", 'trace_chunks', Gitlab::Redis::SharedState
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index f1b4e50b1eb..9514654204b 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -12,22 +12,29 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to match('Dash – is this') }
end
- shared_examples_for 'project/group name regex' do
+ shared_examples_for 'group name regex' do
it_behaves_like 'project/group name chars regex'
it { is_expected.not_to match('?gitlab') }
it { is_expected.not_to match("Users's something") }
end
+ shared_examples_for 'project name regex' do
+ it_behaves_like 'project/group name chars regex'
+ it { is_expected.to match("Gitlab++") }
+ it { is_expected.not_to match('?gitlab') }
+ it { is_expected.not_to match("Users's something") }
+ end
+
describe '.project_name_regex' do
subject { described_class.project_name_regex }
- it_behaves_like 'project/group name regex'
+ it_behaves_like 'project name regex'
end
describe '.group_name_regex' do
subject { described_class.group_name_regex }
- it_behaves_like 'project/group name regex'
+ it_behaves_like 'group name regex'
it 'allows parenthesis' do
is_expected.to match('Group One (Test)')
@@ -51,7 +58,7 @@ RSpec.describe Gitlab::Regex do
describe '.project_name_regex_message' do
subject { described_class.project_name_regex_message }
- it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'.") }
+ it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'.") }
end
describe '.group_name_regex_message' do
@@ -646,13 +653,24 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to match('release') }
it { is_expected.to match('my-repo') }
- it { is_expected.to match('my-repo42') }
+ it { is_expected.to match('My-Re_po') }
+ it { is_expected.to match('my_repo42') }
+ it { is_expected.to match('1.2.3') }
+ it { is_expected.to match('v1.2.3-beta-12') }
+ it { is_expected.to match('renovate_https-github.com-operator-framework-operator-lifecycle-manager.git-0.x') }
# Do not allow empty
it { is_expected.not_to match('') }
# Do not allow Unicode
it { is_expected.not_to match('hé') }
+
+ it { is_expected.not_to match('.1.23') }
+ it { is_expected.not_to match('1..23') }
+ it { is_expected.not_to match('1.2.3.') }
+ it { is_expected.not_to match('1..2.3.') }
+ it { is_expected.not_to match('1/../2.3.') }
+ it { is_expected.not_to match('1/..%2F2.3.') }
end
describe '.helm_package_regex' do
diff --git a/spec/lib/gitlab/request_endpoints_spec.rb b/spec/lib/gitlab/request_endpoints_spec.rb
new file mode 100644
index 00000000000..0c939bfb0ee
--- /dev/null
+++ b/spec/lib/gitlab/request_endpoints_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::RequestEndpoints do
+ describe '.all_api_endpoints' do
+ it 'selects all feature API classes' do
+ api_classes = described_class.all_api_endpoints.map { |route| route.app.options[:for] }
+
+ expect(api_classes).to all(include(Gitlab::EndpointAttributes))
+ end
+ end
+
+ describe '.all_controller_actions' do
+ it 'selects all feature controllers and action names' do
+ all_controller_actions = described_class.all_controller_actions
+ controller_classes = all_controller_actions.map(&:first)
+ all_actions = all_controller_actions.map(&:last)
+
+ expect(controller_classes).to all(include(Gitlab::EndpointAttributes))
+ expect(controller_classes).not_to include(ApplicationController, Devise::UnlocksController)
+ expect(all_actions).to all(be_a(String))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index a98038cd3f8..d801b84775b 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs start and end of job' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(end_payload).ordered
expect(subject).to receive(:log_job_start).and_call_original
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
"wrapped" => "TestWorker"
)
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(end_payload).ordered
expect(subject).to receive(:log_job_start).and_call_original
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs an exception in job' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
expect(logger).to receive(:warn).with(include(exception_payload))
expect(subject).to receive(:log_job_start).and_call_original
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs the root cause of an Sidekiq::JobRetry::Skip exception in the job' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
expect(logger).to receive(:warn).with(include(exception_payload))
expect(subject).to receive(:log_job_start).and_call_original
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs the root cause of an Sidekiq::JobRetry::Handled exception in the job' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
expect(logger).to receive(:warn).with(include(exception_payload))
expect(subject).to receive(:log_job_start).and_call_original
@@ -94,7 +94,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'keeps Sidekiq::JobRetry::Handled exception if the cause does not exist' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
expect(logger).to receive(:warn).with(
include(
@@ -116,7 +116,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'does not modify the job' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
job_copy = job.deep_dup
allow(logger).to receive(:info)
@@ -130,7 +130,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'does not modify the wrapped job' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
wrapped_job = job.merge(
"class" => "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper",
"wrapped" => "TestWorker"
@@ -154,7 +154,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs start and end of job without args' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
expect(logger).to receive(:info).with(end_payload.except('args')).ordered
expect(subject).to receive(:log_job_start).and_call_original
@@ -165,7 +165,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs without created_at and enqueued_at fields' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
excluded_fields = %w(created_at enqueued_at args scheduling_latency_s)
expect(logger).to receive(:info).with(start_payload.except(*excluded_fields)).ordered
@@ -183,7 +183,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
let(:scheduling_latency_s) { 7200.0 }
it 'logs with scheduling latency' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(end_payload).ordered
expect(subject).to receive(:log_job_start).and_call_original
@@ -194,6 +194,35 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
+ context 'with enqueue latency' do
+ let(:expected_start_payload) do
+ start_payload.merge(
+ 'scheduled_at' => job['scheduled_at'],
+ 'enqueue_latency_s' => 1.hour.to_f
+ )
+ end
+
+ let(:expected_end_payload) do
+ end_payload.merge('enqueue_latency_s' => 1.hour.to_f)
+ end
+
+ before do
+ # enqueued_at is set to created_at
+ job['scheduled_at'] = created_at - 1.hour
+ end
+
+ it 'logs with scheduling latency' do
+ travel_to(timestamp) do
+ expect(logger).to receive(:info).with(expected_start_payload).ordered
+ expect(logger).to receive(:info).with(expected_end_payload).ordered
+ expect(subject).to receive(:log_job_start).and_call_original
+ expect(subject).to receive(:log_job_done).and_call_original
+
+ call_subject(job, 'test_queue') { }
+ end
+ end
+ end
+
context 'with Gitaly, Rugged, and Redis calls' do
let(:timing_data) do
{
@@ -218,7 +247,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs with Gitaly and Rugged timing data', :aggregate_failures do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
@@ -243,8 +272,22 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expected_end_payload.merge(
'db_duration_s' => a_value >= 0.1,
'db_count' => a_value >= 1,
- 'db_cached_count' => 0,
- 'db_write_count' => 0
+ "db_replica_#{db_config_name}_count" => 0,
+ 'db_replica_duration_s' => a_value >= 0,
+ 'db_primary_count' => a_value >= 1,
+ "db_primary_#{db_config_name}_count" => a_value >= 1,
+ 'db_primary_duration_s' => a_value > 0,
+ "db_primary_#{db_config_name}_duration_s" => a_value > 0
+ )
+ end
+
+ let(:end_payload) do
+ start_payload.merge(db_payload_defaults).merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
+ 'job_status' => 'done',
+ 'duration_s' => 0.0,
+ 'completed_at' => timestamp.to_f,
+ 'cpu_s' => 1.111112
)
end
@@ -274,59 +317,9 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
- context 'when load balancing is disabled' do
- before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
- end
-
- let(:expected_end_payload_with_db) do
- expected_end_payload.merge(
- 'db_duration_s' => a_value >= 0.1,
- 'db_count' => a_value >= 1,
- 'db_cached_count' => 0,
- 'db_write_count' => 0
- )
- end
-
- include_examples 'performs database queries'
- end
-
- context 'when load balancing is enabled', :db_load_balancing do
- let(:db_config_name) { ::Gitlab::Database.db_config_name(ApplicationRecord.connection) }
-
- let(:expected_db_payload_defaults) do
- metrics =
- ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_counter_keys +
- ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_duration_keys +
- ::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_keys +
- [:db_duration_s]
-
- metrics.each_with_object({}) do |key, result|
- result[key.to_s] = 0
- end
- end
-
- let(:expected_end_payload_with_db) do
- expected_end_payload.merge(expected_db_payload_defaults).merge(
- 'db_duration_s' => a_value >= 0.1,
- 'db_count' => a_value >= 1,
- "db_replica_#{db_config_name}_count" => 0,
- 'db_replica_duration_s' => a_value >= 0,
- 'db_primary_count' => a_value >= 1,
- "db_primary_#{db_config_name}_count" => a_value >= 1,
- 'db_primary_duration_s' => a_value > 0,
- "db_primary_#{db_config_name}_duration_s" => a_value > 0
- )
- end
-
- let(:end_payload) do
- start_payload.merge(expected_db_payload_defaults).merge(
- 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
- 'job_status' => 'done',
- 'duration_s' => 0.0,
- 'completed_at' => timestamp.to_f,
- 'cpu_s' => 1.111112
- )
+ context 'when load balancing is enabled' do
+ let(:db_config_name) do
+ ::Gitlab::Database.db_config_name(ApplicationRecord.retrieve_connection)
end
include_examples 'performs database queries'
@@ -359,7 +352,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs it in the done log' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(expected_start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
@@ -401,7 +394,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
it 'logs it in the done log' do
- Timecop.freeze(timestamp) do
+ travel_to(timestamp) do
expect(logger).to receive(:info).with(expected_start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
@@ -426,13 +419,13 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'message' => 'my-message',
'job_status' => 'my-job-status',
'duration_s' => 0.123123,
- 'completed_at' => current_utc_time.to_f }
+ 'completed_at' => current_utc_time.to_i }
end
subject { described_class.new }
it 'update payload correctly' do
- Timecop.freeze(current_utc_time) do
+ travel_to(current_utc_time) do
subject.send(:add_time_keys!, time, payload)
expect(payload).to eq(payload_with_time_keys)
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
index 698758a13fd..dca00c85e30 100644
--- a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -62,6 +62,27 @@ RSpec.describe Gitlab::SidekiqMiddleware::ClientMetrics do
Sidekiq::Testing.inline! { TestWorker.perform_in(1.second) }
end
+
+ it 'sets the scheduled_at field' do
+ job = { 'at' => Time.current }
+
+ subject.call('TestWorker', job, 'queue', nil) do
+ expect(job[:scheduled_at]).to eq(job['at'])
+ end
+ end
+ end
+
+ context 'when the worker class cannot be found' do
+ it 'increments enqueued jobs metric with the worker labels set to NilClass' do
+ test_anonymous_worker = Class.new(TestWorker)
+
+ expect(enqueued_jobs_metric).to receive(:increment).with(a_hash_including(worker: 'NilClass'), 1)
+
+ # Sidekiq won't be able to create an instance of this class
+ expect do
+ Sidekiq::Testing.inline! { test_anonymous_worker.perform_async }
+ end.to raise_error(NameError)
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index cc69a11f7f8..5083ac514db 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -472,6 +472,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
expect(duplicate_job).to be_idempotent
end
end
+
+ context 'when worker class is utilizing load balancing capabilities' do
+ before do
+ allow(AuthorizedProjectsWorker).to receive(:utilizes_load_balancing_capabilities?).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(duplicate_job).to be_idempotent
+ end
+
+ context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
+ before do
+ stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
+ end
+
+ it 'returns false' do
+ expect(duplicate_job).not_to be_idempotent
+ end
+ end
+ end
end
def existing_wal_location_key(idempotency_key, config_name)
diff --git a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
index 0d1616c4aed..1667622ad8e 100644
--- a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::MemoryKiller do
expect(subject).to receive(:sleep).with(30).ordered
expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
- expect(subject).to receive(:sleep).with(10).ordered
+ expect(subject).to receive(:sleep).with(Sidekiq.options[:timeout] + 2).ordered
expect(Process).to receive(:kill).with('SIGKILL', pid).ordered
expect(Sidekiq.logger)
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index cae0bb6b167..914f5a30c3a 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -211,6 +211,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
end
+ include_context 'server metrics with mocked prometheus'
+ include_context 'server metrics call'
+
before do
stub_const('TestWorker', Class.new)
TestWorker.class_eval do
@@ -234,9 +237,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
end
- include_context 'server metrics with mocked prometheus'
- include_context 'server metrics call'
-
shared_context 'worker declaring data consistency' do
let(:worker_class) { LBTestWorker }
@@ -250,61 +250,93 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
end
- context 'when load_balancing is enabled' do
- before do
- allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- end
+ describe '#call' do
+ context 'when worker declares data consistency' do
+ include_context 'worker declaring data consistency'
- describe '#call' do
- context 'when worker declares data consistency' do
- include_context 'worker declaring data consistency'
+ it 'increments load balancing counter with defined data consistency' do
+ process_job
+
+ expect(load_balancing_metric).to have_received(:increment).with(
+ a_hash_including(
+ data_consistency: :delayed,
+ load_balancing_strategy: 'replica'
+ ), 1)
+ end
+ end
- it 'increments load balancing counter with defined data consistency' do
- process_job
+ context 'when worker does not declare data consistency' do
+ it 'increments load balancing counter with default data consistency' do
+ process_job
- expect(load_balancing_metric).to have_received(:increment).with(
- a_hash_including(
- data_consistency: :delayed,
- load_balancing_strategy: 'replica'
- ), 1)
- end
+ expect(load_balancing_metric).to have_received(:increment).with(
+ a_hash_including(
+ data_consistency: :always,
+ load_balancing_strategy: 'primary'
+ ), 1)
end
+ end
+ end
+ end
- context 'when worker does not declare data consistency' do
- it 'increments load balancing counter with default data consistency' do
- process_job
+ context 'feature attribution' do
+ let(:test_worker) do
+ category = worker_category
- expect(load_balancing_metric).to have_received(:increment).with(
- a_hash_including(
- data_consistency: :always,
- load_balancing_strategy: 'primary'
- ), 1)
- end
+ Class.new do
+ include Sidekiq::Worker
+ include WorkerAttributes
+
+ if category
+ feature_category category
+ else
+ feature_category_not_owned!
+ end
+
+ def perform
end
end
end
- context 'when load_balancing is disabled' do
- include_context 'worker declaring data consistency'
+ let(:context_category) { 'continuous_integration' }
+ let(:job) { { 'meta.feature_category' => 'continuous_integration' } }
- before do
- allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
+ before do
+ stub_const('TestWorker', test_worker)
+ end
+
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ Gitlab::SidekiqMiddleware.server_configurator(
+ metrics: true,
+ arguments_logger: false,
+ memory_killer: false
+ ).call(chain)
+
+ Sidekiq::Testing.inline! { example.run }
end
+ end
- describe '#initialize' do
- it 'does not set load_balancing metrics' do
- expect(Gitlab::Metrics).not_to receive(:counter).with(:sidekiq_load_balancing_count, anything)
+ include_context 'server metrics with mocked prometheus'
+ include_context 'server metrics call'
- subject
- end
+ context 'when a worker has a feature category' do
+ let(:worker_category) { 'authentication_and_authorization' }
+
+ it 'uses that category for metrics' do
+ expect(completion_seconds_metric).to receive(:observe).with(a_hash_including(feature_category: worker_category), anything)
+
+ TestWorker.process_job(job)
end
+ end
- describe '#call' do
- it 'does not increment load balancing counter' do
- process_job
+ context 'when a worker does not have a feature category' do
+ let(:worker_category) { nil }
- expect(load_balancing_metric).not_to have_received(:increment)
- end
+ it 'uses the category from the context for metrics' do
+ expect(completion_seconds_metric).to receive(:observe).with(a_hash_including(feature_category: context_category), anything)
+
+ TestWorker.process_job(job)
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
index d6cc787f53d..92a11c83a4a 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
@@ -22,8 +22,31 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
end
end
+ let(:not_owned_worker_class) do
+ Class.new(worker_class) do
+ def self.name
+ 'TestNotOwnedWithContextWorker'
+ end
+
+ feature_category_not_owned!
+ end
+ end
+
+ let(:mailer_class) do
+ Class.new(ApplicationMailer) do
+ def self.name
+ 'TestMailer'
+ end
+
+ def test_mail
+ end
+ end
+ end
+
before do
- stub_const('TestWithContextWorker', worker_class)
+ stub_const(worker_class.name, worker_class)
+ stub_const(not_owned_worker_class.name, not_owned_worker_class)
+ stub_const(mailer_class.name, mailer_class)
end
describe "#call" do
@@ -58,6 +81,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
expect(job1['meta.feature_category']).to eq('issue_tracking')
expect(job2['meta.feature_category']).to eq('issue_tracking')
end
+
+ it 'takes the feature category from the caller if the worker is not owned' do
+ TestNotOwnedWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (_) { { feature_category: 'code_review' } }
+ )
+
+ job1 = TestNotOwnedWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestNotOwnedWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.feature_category']).to eq('code_review')
+ expect(job2['meta.feature_category']).to eq('code_review')
+ end
+
+ it 'does not set any explicit feature category for mailers', :sidekiq_mailers do
+ expect(Gitlab::ApplicationContext).not_to receive(:with_context)
+
+ TestMailer.test_mail.deliver_later
+ end
end
context 'when the feature category is already set in the surrounding block' do
@@ -76,6 +119,22 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
expect(job1['meta.feature_category']).to eq('issue_tracking')
expect(job2['meta.feature_category']).to eq('issue_tracking')
end
+
+ it 'takes the feature category from the caller if the worker is not owned' do
+ Gitlab::ApplicationContext.with_context(feature_category: 'authentication_and_authorization') do
+ TestNotOwnedWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (_) { {} }
+ )
+ end
+
+ job1 = TestNotOwnedWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestNotOwnedWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.feature_category']).to eq('authentication_and_authorization')
+ expect(job2['meta.feature_category']).to eq('authentication_and_authorization')
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
index f736a7db774..377ff6fd166 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
- let(:worker_class) do
+ let(:test_worker) do
Class.new do
def self.name
"TestWorker"
@@ -23,6 +23,16 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
end
end
+ let(:not_owned_worker) do
+ Class.new(test_worker) do
+ def self.name
+ "NotOwnedWorker"
+ end
+
+ feature_category_not_owned!
+ end
+ end
+
let(:other_worker) do
Class.new do
def self.name
@@ -37,7 +47,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
end
before do
- stub_const("TestWorker", worker_class)
+ stub_const("TestWorker", test_worker)
+ stub_const("NotOwnedWorker", not_owned_worker)
stub_const("OtherWorker", other_worker)
end
@@ -57,10 +68,24 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
expect(TestWorker.contexts['identifier'].keys).not_to include('meta.user')
end
- it 'takes the feature category from the worker' do
- TestWorker.perform_async('identifier', 1)
+ context 'feature category' do
+ it 'takes the feature category from the worker' do
+ Gitlab::ApplicationContext.with_context(feature_category: 'authentication_and_authorization') do
+ TestWorker.perform_async('identifier', 1)
+ end
+
+ expect(TestWorker.contexts['identifier']).to include('meta.feature_category' => 'foo')
+ end
- expect(TestWorker.contexts['identifier']).to include('meta.feature_category' => 'foo')
+ context 'when the worker is not owned' do
+ it 'takes the feature category from the surrounding context' do
+ Gitlab::ApplicationContext.with_context(feature_category: 'authentication_and_authorization') do
+ NotOwnedWorker.perform_async('identifier', 1)
+ end
+
+ expect(NotOwnedWorker.contexts['identifier']).to include('meta.feature_category' => 'authentication_and_authorization')
+ end
+ end
end
it "doesn't fail for unknown workers" do
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 8285cf960d2..e687c8e8cf7 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -28,9 +28,8 @@ RSpec.describe Gitlab::SidekiqMiddleware do
stub_const('TestWorker', worker_class)
end
- shared_examples "a middleware chain" do |load_balancing_enabled|
+ shared_examples "a middleware chain" do
before do
- allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(load_balancing_enabled)
configurator.call(chain)
end
@@ -45,10 +44,10 @@ RSpec.describe Gitlab::SidekiqMiddleware do
end
end
- shared_examples "a middleware chain for mailer" do |load_balancing_enabled|
+ shared_examples "a middleware chain for mailer" do
let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper }
- it_behaves_like "a middleware chain", load_balancing_enabled
+ it_behaves_like "a middleware chain"
end
describe '.server_configurator' do
@@ -58,13 +57,13 @@ RSpec.describe Gitlab::SidekiqMiddleware do
let(:all_sidekiq_middlewares) do
[
::Gitlab::SidekiqMiddleware::Monitor,
+ ::Labkit::Middleware::Sidekiq::Server,
::Gitlab::SidekiqMiddleware::ServerMetrics,
::Gitlab::SidekiqMiddleware::ArgumentsLogger,
::Gitlab::SidekiqMiddleware::MemoryKiller,
::Gitlab::SidekiqMiddleware::RequestStoreMiddleware,
::Gitlab::SidekiqMiddleware::ExtraDoneLogMetadata,
::Gitlab::SidekiqMiddleware::BatchLoader,
- ::Labkit::Middleware::Sidekiq::Server,
::Gitlab::SidekiqMiddleware::InstrumentationLogger,
::Gitlab::SidekiqMiddleware::AdminMode::Server,
::Gitlab::SidekiqVersioning::Middleware,
@@ -105,25 +104,8 @@ RSpec.describe Gitlab::SidekiqMiddleware do
end
context "all optional middlewares on" do
- context "when load balancing is enabled" do
- before do
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
- end
-
- it_behaves_like "a middleware chain", true
- it_behaves_like "a middleware chain for mailer", true
- end
-
- context "when load balancing is disabled" do
- let(:disabled_sidekiq_middlewares) do
- [
- Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
- ]
- end
-
- it_behaves_like "a middleware chain", false
- it_behaves_like "a middleware chain for mailer", false
- end
+ it_behaves_like "a middleware chain"
+ it_behaves_like "a middleware chain for mailer"
end
context "all optional middlewares off" do
@@ -135,36 +117,16 @@ RSpec.describe Gitlab::SidekiqMiddleware do
)
end
- context "when load balancing is enabled" do
- let(:disabled_sidekiq_middlewares) do
- [
- Gitlab::SidekiqMiddleware::ServerMetrics,
- Gitlab::SidekiqMiddleware::ArgumentsLogger,
- Gitlab::SidekiqMiddleware::MemoryKiller
- ]
- end
-
- before do
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
- end
-
- it_behaves_like "a middleware chain", true
- it_behaves_like "a middleware chain for mailer", true
+ let(:disabled_sidekiq_middlewares) do
+ [
+ Gitlab::SidekiqMiddleware::ServerMetrics,
+ Gitlab::SidekiqMiddleware::ArgumentsLogger,
+ Gitlab::SidekiqMiddleware::MemoryKiller
+ ]
end
- context "when load balancing is disabled" do
- let(:disabled_sidekiq_middlewares) do
- [
- Gitlab::SidekiqMiddleware::ServerMetrics,
- Gitlab::SidekiqMiddleware::ArgumentsLogger,
- Gitlab::SidekiqMiddleware::MemoryKiller,
- Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
- ]
- end
-
- it_behaves_like "a middleware chain", false
- it_behaves_like "a middleware chain for mailer", false
- end
+ it_behaves_like "a middleware chain"
+ it_behaves_like "a middleware chain for mailer"
end
end
@@ -186,30 +148,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
]
end
- context "when load balancing is disabled" do
- let(:disabled_sidekiq_middlewares) do
- [
- Gitlab::Database::LoadBalancing::SidekiqClientMiddleware
- ]
- end
-
- it_behaves_like "a middleware chain", false
- it_behaves_like "a middleware chain for mailer", false
-
- # Sidekiq documentation states that the worker class could be a string
- # or a class reference. We should test for both
- context "worker_class as string value" do
- let(:worker_args) { [worker_class.to_s, { 'args' => job_args }, queue, redis_pool] }
- let(:middleware_expected_args) { [worker_class.to_s, hash_including({ 'args' => job_args }), queue, redis_pool] }
-
- it_behaves_like "a middleware chain", false
- it_behaves_like "a middleware chain for mailer", false
- end
- end
-
- context "when load balancing is enabled" do
- it_behaves_like "a middleware chain", true
- it_behaves_like "a middleware chain for mailer", true
- end
+ it_behaves_like "a middleware chain"
+ it_behaves_like "a middleware chain for mailer"
end
end
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index b30143ed196..d4391d3023a 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(item).to include('queue' => 'post_receive', 'args' => [i])
end
- expect(score).to eq(i.succ.hours.from_now.to_i)
+ expect(score).to be_within(schedule_jitter).of(i.succ.hours.from_now.to_i)
end
end
end
@@ -84,7 +84,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(item).to include('queue' => 'another_queue', 'args' => [i])
end
- expect(score).to eq(i.succ.hours.from_now.to_i)
+ expect(score).to be_within(schedule_jitter).of(i.succ.hours.from_now.to_i)
end
end
end
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
set_after.each.with_index do |(item, score), i|
expect(item).to include('queue' => 'new_queue', 'args' => [i])
- expect(score).to eq(i.succ.hours.from_now.to_i)
+ expect(score).to be_within(schedule_jitter).of(i.succ.hours.from_now.to_i)
end
end
end
@@ -173,6 +173,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
context 'scheduled jobs' do
let(:set_name) { 'schedule' }
+ let(:schedule_jitter) { 0 }
def create_jobs(include_post_receive: true)
AuthorizedProjectsWorker.perform_in(1.hour, 0)
@@ -186,12 +187,14 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
context 'retried jobs' do
let(:set_name) { 'retry' }
+ # Account for Sidekiq retry jitter
+ # https://github.com/mperham/sidekiq/blob/3575ccb44c688dd08bfbfd937696260b12c622fb/lib/sidekiq/job_retry.rb#L217
+ let(:schedule_jitter) { 10 }
# Try to mimic as closely as possible what Sidekiq will actually
# do to retry a job.
def retry_in(klass, time, args)
- # In Sidekiq 6, this argument will become a JSON string
- message = { 'class' => klass, 'args' => [args], 'retry' => true }
+ message = { 'class' => klass.name, 'args' => [args], 'retry' => true }.to_json
allow(klass).to receive(:sidekiq_retry_in_block).and_return(proc { time })
diff --git a/spec/lib/gitlab/sidekiq_versioning/manager_spec.rb b/spec/lib/gitlab/sidekiq_versioning/manager_spec.rb
deleted file mode 100644
index 84161d9236e..00000000000
--- a/spec/lib/gitlab/sidekiq_versioning/manager_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::SidekiqVersioning::Manager do
- before do
- Sidekiq::Manager.prepend described_class
- end
-
- describe '#initialize' do
- it 'listens on all expanded queues' do
- manager = Sidekiq::Manager.new(queues: %w[post_receive repository_fork cronjob unknown])
-
- queues = manager.options[:queues]
-
- expect(queues).to include('post_receive')
- expect(queues).to include('repository_fork')
- expect(queues).to include('cronjob')
- expect(queues).to include('cronjob:import_stuck_project_import_jobs')
- expect(queues).to include('cronjob:jira_import_stuck_jira_import_jobs')
- expect(queues).to include('cronjob:stuck_merge_jobs')
- expect(queues).to include('unknown')
- end
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_versioning_spec.rb b/spec/lib/gitlab/sidekiq_versioning_spec.rb
index ed9650fc166..afafd04d87d 100644
--- a/spec/lib/gitlab/sidekiq_versioning_spec.rb
+++ b/spec/lib/gitlab/sidekiq_versioning_spec.rb
@@ -29,12 +29,6 @@ RSpec.describe Gitlab::SidekiqVersioning, :redis do
end
describe '.install!' do
- it 'prepends SidekiqVersioning::Manager into Sidekiq::Manager' do
- described_class.install!
-
- expect(Sidekiq::Manager).to include(Gitlab::SidekiqVersioning::Manager)
- end
-
it 'registers all versionless and versioned queues with Redis' do
described_class.install!
diff --git a/spec/lib/gitlab/slash_commands/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/issue_move_spec.rb
index 5fffbb2d4cc..aa1341b4148 100644
--- a/spec/lib/gitlab/slash_commands/issue_move_spec.rb
+++ b/spec/lib/gitlab/slash_commands/issue_move_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::SlashCommands::IssueMove, service: true do
end
end
- context 'when the user cannot see the target project' do
+ context 'when the user cannot see the target project', :sidekiq_inline do
it 'returns not found' do
message = "issue move #{issue.iid} #{other_project.full_path}"
other_project.team.truncate
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index 628eb380396..a3808b0f0e2 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -5,23 +5,96 @@ require 'spec_helper'
RSpec.describe ::Gitlab::SubscriptionPortal do
using RSpec::Parameterized::TableSyntax
- where(:method_name, :test, :development, :result) do
- :default_subscriptions_url | false | false | 'https://customers.gitlab.com'
- :default_subscriptions_url | false | true | 'https://customers.stg.gitlab.com'
- :default_subscriptions_url | true | false | 'https://customers.stg.gitlab.com'
- :payment_form_url | false | false | 'https://customers.gitlab.com/payment_forms/cc_validation'
- :payment_form_url | false | true | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
- :payment_form_url | true | false | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
+ let(:env_value) { nil }
+
+ before do
+ stub_env('CUSTOMER_PORTAL_URL', env_value)
+ stub_feature_flags(new_customersdot_staging_url: false)
end
- with_them do
- subject { described_class.method(method_name).call }
+ describe '.default_subscriptions_url' do
+ where(:test, :development, :result) do
+ false | false | 'https://customers.gitlab.com'
+ false | true | 'https://customers.stg.gitlab.com'
+ true | false | 'https://customers.stg.gitlab.com'
+ end
before do
allow(Rails).to receive_message_chain(:env, :test?).and_return(test)
allow(Rails).to receive_message_chain(:env, :development?).and_return(development)
end
- it { is_expected.to eq(result) }
+ with_them do
+ subject { described_class.default_subscriptions_url }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '.subscriptions_url' do
+ subject { described_class.subscriptions_url }
+
+ context 'when CUSTOMER_PORTAL_URL ENV is unset' do
+ it { is_expected.to eq('https://customers.stg.gitlab.com') }
+ end
+
+ context 'when CUSTOMER_PORTAL_URL ENV is set' do
+ let(:env_value) { 'https://customers.example.com' }
+
+ it { is_expected.to eq(env_value) }
+ end
+ end
+
+ describe '.subscriptions_comparison_url' do
+ subject { described_class.subscriptions_comparison_url }
+
+ link_match = %r{\Ahttps://about\.gitlab\.((cn/pricing/saas)|(com/pricing/gitlab-com))/feature-comparison\z}
+
+ it { is_expected.to match(link_match) }
+ end
+
+ context 'url methods' do
+ where(:method_name, :result) do
+ :default_subscriptions_url | 'https://customers.stg.gitlab.com'
+ :payment_form_url | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
+ :subscriptions_graphql_url | 'https://customers.stg.gitlab.com/graphql'
+ :subscriptions_more_minutes_url | 'https://customers.stg.gitlab.com/buy_pipeline_minutes'
+ :subscriptions_more_storage_url | 'https://customers.stg.gitlab.com/buy_storage'
+ :subscriptions_manage_url | 'https://customers.stg.gitlab.com/subscriptions'
+ :subscriptions_plans_url | 'https://customers.stg.gitlab.com/plans'
+ :subscriptions_instance_review_url | 'https://customers.stg.gitlab.com/instance_review'
+ :subscriptions_gitlab_plans_url | 'https://customers.stg.gitlab.com/gitlab_plans'
+ end
+
+ with_them do
+ subject { described_class.send(method_name) }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '.add_extra_seats_url' do
+ subject { described_class.add_extra_seats_url(group_id) }
+
+ let(:group_id) { 153 }
+
+ it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") }
+ end
+
+ describe '.upgrade_subscription_url' do
+ subject { described_class.upgrade_subscription_url(group_id, plan_id) }
+
+ let(:group_id) { 153 }
+ let(:plan_id) { 5 }
+
+ it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") }
+ end
+
+ describe '.renew_subscription_url' do
+ subject { described_class.renew_subscription_url(group_id) }
+
+ let(:group_id) { 153 }
+
+ it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/renew") }
end
end
diff --git a/spec/lib/gitlab/tracking/docs/helper_spec.rb b/spec/lib/gitlab/tracking/docs/helper_spec.rb
deleted file mode 100644
index 5f7965502f1..00000000000
--- a/spec/lib/gitlab/tracking/docs/helper_spec.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Tracking::Docs::Helper do
- let_it_be(:klass) do
- Class.new do
- include Gitlab::Tracking::Docs::Helper
- end
- end
-
- describe '#auto_generated_comment' do
- it 'renders information about missing description' do
- expect(klass.new.auto_generated_comment).to match /This documentation is auto generated by a script/
- end
- end
-
- describe '#render_description' do
- context 'description is empty' do
- it 'renders information about missing description' do
- object = double(description: '')
-
- expect(klass.new.render_description(object)).to eq('Missing description')
- end
- end
-
- context 'description is present' do
- it 'render description' do
- object = double(description: 'some description')
-
- expect(klass.new.render_description(object)).to eq('some description')
- end
- end
- end
-
- describe '#render_event_taxonomy' do
- it 'render table with event taxonomy' do
- attributes = {
- category: 'epics',
- action: 'promote',
- label: nil,
- property_description: 'String with issue id',
- value_description: 'Integer issue id'
- }
- object = double(attributes: attributes)
- event_taxonomy = <<~MD.chomp
- | category | action | label | property | value |
- |---|---|---|---|---|
- | `epics` | `promote` | `` | `String with issue id` | `Integer issue id` |
- MD
-
- expect(klass.new.render_event_taxonomy(object)).to eq(event_taxonomy)
- end
- end
-
- describe '#md_link_to' do
- it 'render link in md format' do
- expect(klass.new.md_link_to('zelda', 'link')).to eq('[zelda](link)')
- end
- end
-
- describe '#render_owner' do
- it 'render information about group owning event' do
- object = double(product_group: "group::product intelligence")
-
- expect(klass.new.render_owner(object)).to eq("Owner: `group::product intelligence`")
- end
- end
-
- describe '#render_tiers' do
- it 'render information about tiers' do
- object = double(tiers: %w[bronze silver gold])
-
- expect(klass.new.render_tiers(object)).to eq("Tiers: `bronze`, `silver`, `gold`")
- end
- end
-
- describe '#render_yaml_definition_path' do
- it 'render relative location of yaml definition' do
- object = double(yaml_path: 'config/events/button_click.yaml')
-
- expect(klass.new.render_yaml_definition_path(object)).to eq("YAML definition: `config/events/button_click.yaml`")
- end
- end
-
- describe '#backtick' do
- it 'wraps string in backticks chars' do
- expect(klass.new.backtick('test')).to eql("`test`")
- end
- end
-end
diff --git a/spec/lib/gitlab/tracking/docs/renderer_spec.rb b/spec/lib/gitlab/tracking/docs/renderer_spec.rb
deleted file mode 100644
index 386aea6c23a..00000000000
--- a/spec/lib/gitlab/tracking/docs/renderer_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Tracking::Docs::Renderer do
- describe 'contents' do
- let(:dictionary_path) { described_class::DICTIONARY_PATH }
- let(:items) { Gitlab::Tracking::EventDefinition.definitions.first(10).to_h }
-
- it 'generates dictionary for given items' do
- generated_dictionary = described_class.new(items).contents
- table_of_contents_items = items.values.map { |item| "#{item.category} #{item.action}"}
-
- generated_dictionary_keys = RDoc::Markdown
- .parse(generated_dictionary)
- .table_of_contents
- .select { |metric_doc| metric_doc.level == 3 }
- .map { |item| item.text.match(%r{<code>(.*)</code>})&.captures&.first }
-
- expect(generated_dictionary_keys).to match_array(table_of_contents_items)
- end
- end
-end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index ca7a6b6b1c3..8ded80dd191 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Tracking::StandardContext do
let_it_be(:project) { create(:project) }
let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:user) { create(:user) }
let(:snowplow_context) { subject.to_context }
@@ -87,8 +88,8 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
end
- it 'does not contain user id' do
- expect(snowplow_context.to_json[:data].keys).not_to include(:user_id)
+ it 'contains user id' do
+ expect(snowplow_context.to_json[:data].keys).to include(:user_id)
end
it 'contains namespace and project ids' do
@@ -104,8 +105,18 @@ RSpec.describe Gitlab::Tracking::StandardContext do
stub_feature_flags(add_namespace_and_project_to_snowplow_tracking: false)
end
- it 'does not contain any ids' do
- expect(snowplow_context.to_json[:data].keys).not_to include(:user_id, :project_id, :namespace_id)
+ it 'does not contain project or namespace ids' do
+ expect(snowplow_context.to_json[:data].keys).not_to include(:project_id, :namespace_id)
+ end
+ end
+
+ context 'without add_actor_based_user_to_snowplow_tracking feature' do
+ before do
+ stub_feature_flags(add_actor_based_user_to_snowplow_tracking: false)
+ end
+
+ it 'does not contain user_id' do
+ expect(snowplow_context.to_json[:data].keys).not_to include(:user_id)
end
end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 02e66458f46..dacaae55676 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::Tracking do
other_context = double(:context)
project = build_stubbed(:project)
- user = double(:user)
+ user = build_stubbed(:user)
expect(Gitlab::Tracking::StandardContext)
.to receive(:new)
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 6406c0b5458..522f69062fb 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -49,6 +49,37 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
expect { described_class.definitions }.not_to raise_error
end
+ describe '#with_instrumentation_class' do
+ let(:metric_status) { 'active' }
+ let(:all_definitions) do
+ metrics_definitions = [
+ { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'data_available' },
+ { key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'implemented' },
+ { key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'deprecated' },
+ { key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: metric_status },
+ { key_path: 'metric5', status: 'active' },
+ { key_path: 'metric_missing_status' }
+ ]
+ metrics_definitions.map { |definition| described_class.new(definition[:key_path], definition.symbolize_keys) }
+ end
+
+ before do
+ allow(described_class).to receive(:all).and_return(all_definitions)
+ end
+
+ it 'includes definitions with instrumentation_class' do
+ expect(described_class.with_instrumentation_class.count).to eq(4)
+ end
+
+ context 'with removed metric' do
+ let(:metric_status) { 'removed' }
+
+ it 'excludes removed definitions' do
+ expect(described_class.with_instrumentation_class.count).to eq(3)
+ end
+ end
+ end
+
describe '#key' do
subject { definition.key }
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/active_user_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/active_user_count_metric_spec.rb
new file mode 100644
index 00000000000..f0ee6c38f2e
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/active_user_count_metric_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ActiveUserCountMetric do
+ before do
+ create(:user)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do
+ let(:expected_value) { ::User.active.count }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_associating_milestones_to_releases_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_associating_milestones_to_releases_metric_spec.rb
new file mode 100644
index 00000000000..e2bb99c832a
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_associating_milestones_to_releases_metric_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersAssociatingMilestonesToReleasesMetric do
+ let_it_be(:release) { create(:release, created_at: 3.days.ago) }
+ let_it_be(:release_with_milestone) { create(:release, :with_milestones, created_at: 3.days.ago) }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' } do
+ let(:expected_value) { 1 }
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index 4996b0a0089..222198a58ac 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -6,97 +6,62 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
describe '.track_unique_project_event' do
using RSpec::Parameterized::TableSyntax
- where(:template, :config_source, :expected_event) do
- # Implicit Auto DevOps usage
- 'Auto-DevOps.gitlab-ci.yml' | :auto_devops_source | 'p_ci_templates_implicit_auto_devops'
- 'Jobs/Build.gitlab-ci.yml' | :auto_devops_source | 'p_ci_templates_implicit_auto_devops_build'
- 'Jobs/Deploy.gitlab-ci.yml' | :auto_devops_source | 'p_ci_templates_implicit_auto_devops_deploy'
- 'Security/SAST.gitlab-ci.yml' | :auto_devops_source | 'p_ci_templates_implicit_security_sast'
- 'Security/Secret-Detection.gitlab-ci.yml' | :auto_devops_source | 'p_ci_templates_implicit_security_secret_detection'
- # Explicit include:template usage
- '5-Minute-Production-App.gitlab-ci.yml' | :repository_source | 'p_ci_templates_5_min_production_app'
- 'Auto-DevOps.gitlab-ci.yml' | :repository_source | 'p_ci_templates_auto_devops'
- 'AWS/CF-Provision-and-Deploy-EC2.gitlab-ci.yml' | :repository_source | 'p_ci_templates_aws_cf_deploy_ec2'
- 'AWS/Deploy-ECS.gitlab-ci.yml' | :repository_source | 'p_ci_templates_aws_deploy_ecs'
- 'Jobs/Build.gitlab-ci.yml' | :repository_source | 'p_ci_templates_auto_devops_build'
- 'Jobs/Deploy.gitlab-ci.yml' | :repository_source | 'p_ci_templates_auto_devops_deploy'
- 'Jobs/Deploy.latest.gitlab-ci.yml' | :repository_source | 'p_ci_templates_auto_devops_deploy_latest'
- 'Security/SAST.gitlab-ci.yml' | :repository_source | 'p_ci_templates_security_sast'
- 'Security/Secret-Detection.gitlab-ci.yml' | :repository_source | 'p_ci_templates_security_secret_detection'
- 'Terraform/Base.latest.gitlab-ci.yml' | :repository_source | 'p_ci_templates_terraform_base_latest'
- end
-
- with_them do
- it_behaves_like 'tracking unique hll events' do
- subject(:request) { described_class.track_unique_project_event(project_id: project_id, template: template, config_source: config_source) }
+ let(:project_id) { 1 }
- let(:project_id) { 1 }
- let(:target_id) { expected_event }
- let(:expected_type) { instance_of(Integer) }
+ shared_examples 'tracks template' do
+ it "has an event defined for template" do
+ expect do
+ described_class.track_unique_project_event(
+ project_id: project_id,
+ template: template_path,
+ config_source: config_source
+ )
+ end.not_to raise_error
end
- end
- context 'known_events coverage tests' do
- let(:project_id) { 1 }
- let(:config_source) { :repository_source }
+ it "tracks template" do
+ expanded_template_name = described_class.expand_template_name(template_path)
+ expected_template_event_name = described_class.ci_template_event_name(expanded_template_name, config_source)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(expected_template_event_name, values: project_id)
- # These tests help guard against missing "explicit" events in known_events/ci_templates.yml
- context 'explicit include:template events' do
- described_class::TEMPLATE_TO_EVENT.keys.each do |template|
- it "does not raise error for #{template}" do
- expect do
- described_class.track_unique_project_event(project_id: project_id, template: template, config_source: config_source)
- end.not_to raise_error
- end
- end
+ described_class.track_unique_project_event(project_id: project_id, template: template_path, config_source: config_source)
end
+ end
- # This test is to help guard against missing "implicit" events in known_events/ci_templates.yml
- it 'does not raise error for any template in an implicit Auto DevOps pipeline' do
- project = create(:project, :auto_devops)
- pipeline = double(project: project)
- command = double
- result = Gitlab::Ci::YamlProcessor.new(
- Gitlab::Ci::Pipeline::Chain::Config::Content::AutoDevops.new(pipeline, command).content,
- project: project,
- user: double,
- sha: 'd310cc759caaa20cd05a9e0983d6017896d9c34c'
- ).execute
+ context 'with explicit includes' do
+ let(:config_source) { :repository_source }
- config_source = :auto_devops_source
+ (described_class.ci_templates - ['Verify/Browser-Performance.latest.gitlab-ci.yml', 'Verify/Browser-Performance.gitlab-ci.yml']).each do |template|
+ context "for #{template}" do
+ let(:template_path) { template }
- result.included_templates.each do |template|
- expect do
- described_class.track_unique_project_event(project_id: project.id, template: template, config_source: config_source)
- end.not_to raise_error
+ include_examples 'tracks template'
end
end
end
- context 'templates outside of TEMPLATE_TO_EVENT' do
- let(:project_id) { 1 }
- let(:config_source) { :repository_source }
-
- described_class.ci_templates.each do |template|
- next if described_class::TEMPLATE_TO_EVENT.key?(template)
-
- it "has an event defined for #{template}" do
- expect do
- described_class.track_unique_project_event(
- project_id: project_id,
- template: template,
- config_source: config_source
- )
- end.not_to raise_error
- end
+ context 'with implicit includes' do
+ let(:config_source) { :auto_devops_source }
- it "tracks #{template}" do
- expected_template_event_name = described_class.ci_template_event_name(template, :repository_source)
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(expected_template_event_name, values: project_id)
+ [
+ ['', ['Auto-DevOps.gitlab-ci.yml']],
+ ['Jobs', described_class.ci_templates('lib/gitlab/ci/templates/Jobs')],
+ ['Security', described_class.ci_templates('lib/gitlab/ci/templates/Security')]
+ ].each do |directory, templates|
+ templates.each do |template|
+ context "for #{template}" do
+ let(:template_path) { File.join(directory, template) }
- described_class.track_unique_project_event(project_id: project_id, template: template, config_source: config_source)
+ include_examples 'tracks template'
+ end
end
end
end
+
+ it 'expands short template names' do
+ expect do
+ described_class.track_unique_project_event(project_id: 1, template: 'Dependency-Scanning.gitlab-ci.yml', config_source: :repository_source)
+ end.not_to raise_error
+ end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 427dd4a205e..0ec805714e3 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'epics_usage',
'epic_boards_usage',
'secure',
+ 'importer',
'network_policies'
)
end
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
index e0063194f9b..ee0cfb1407e 100644
--- a/spec/lib/gitlab/usage_data_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::UsageDataMetrics do
let(:metric_files_key_paths) do
Gitlab::Usage::MetricDefinition
.definitions
- .select { |k, v| v.attributes[:data_source] == 'redis_hll' && v.key_path.starts_with?('redis_hll_counters') }
+ .select { |k, v| v.attributes[:data_source] == 'redis_hll' && v.key_path.starts_with?('redis_hll_counters') && v.available? }
.keys
.sort
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index a70b68a181f..833bf260019 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -101,11 +101,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
- cluster = create(:cluster, user: user)
- create(:clusters_applications_cert_manager, :installed, cluster: cluster)
- create(:clusters_applications_helm, :installed, cluster: cluster)
- create(:clusters_applications_ingress, :installed, cluster: cluster)
- create(:clusters_applications_knative, :installed, cluster: cluster)
+ create(:cluster, user: user)
create(:cluster, :disabled, user: user)
create(:cluster_provider_gcp, :created)
create(:cluster_provider_aws, :created)
@@ -118,10 +114,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
expect(described_class.usage_activity_by_stage_configure({})).to include(
- clusters_applications_cert_managers: 2,
- clusters_applications_helm: 2,
- clusters_applications_ingress: 2,
- clusters_applications_knative: 2,
clusters_management_project: 2,
clusters_disabled: 4,
clusters_enabled: 12,
@@ -136,10 +128,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
project_clusters_enabled: 10
)
expect(described_class.usage_activity_by_stage_configure(described_class.monthly_time_range_db_params)).to include(
- clusters_applications_cert_managers: 1,
- clusters_applications_helm: 1,
- clusters_applications_ingress: 1,
- clusters_applications_knative: 1,
clusters_management_project: 1,
clusters_disabled: 2,
clusters_enabled: 6,
@@ -392,7 +380,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
user = create(:user, dashboard: 'operations')
cluster = create(:cluster, user: user)
project = create(:project, creator: user)
- create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ create(:clusters_integrations_prometheus, cluster: cluster)
create(:project_tracing_setting)
create(:project_error_tracking_setting)
create(:incident)
@@ -402,7 +390,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(described_class.usage_activity_by_stage_monitor({})).to include(
clusters: 2,
- clusters_applications_prometheus: 2,
+ clusters_integrations_prometheus: 2,
operations_dashboard_default_dashboard: 2,
projects_with_tracing_enabled: 2,
projects_with_error_tracking_enabled: 2,
@@ -414,7 +402,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
data_28_days = described_class.usage_activity_by_stage_monitor(described_class.monthly_time_range_db_params)
expect(data_28_days).to include(
clusters: 1,
- clusters_applications_prometheus: 1,
+ clusters_integrations_prometheus: 1,
operations_dashboard_default_dashboard: 1,
projects_with_tracing_enabled: 1,
projects_with_error_tracking_enabled: 1,
@@ -469,7 +457,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
for_defined_days_back do
user = create(:user)
create(:deployment, :failed, user: user)
- create(:release, author: user)
+ release = create(:release, author: user)
+ create(:milestone, project: release.project, releases: [release])
create(:deployment, :success, user: user)
end
@@ -477,13 +466,15 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
deployments: 2,
failed_deployments: 2,
releases: 2,
- successful_deployments: 2
+ successful_deployments: 2,
+ releases_with_milestones: 2
)
expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(
deployments: 1,
failed_deployments: 1,
releases: 1,
- successful_deployments: 1
+ successful_deployments: 1,
+ releases_with_milestones: 1
)
end
end
@@ -499,7 +490,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:ci_pipeline, :repository_source, user: user)
create(:ci_pipeline_schedule, owner: user)
create(:ci_trigger, owner: user)
- create(:clusters_applications_runner, :installed)
end
expect(described_class.usage_activity_by_stage_verify({})).to include(
@@ -510,8 +500,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
ci_pipeline_config_repository: 2,
ci_pipeline_schedules: 2,
ci_pipelines: 2,
- ci_triggers: 2,
- clusters_applications_runner: 2
+ ci_triggers: 2
)
expect(described_class.usage_activity_by_stage_verify(described_class.monthly_time_range_db_params)).to include(
ci_builds: 1,
@@ -521,8 +510,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
ci_pipeline_config_repository: 1,
ci_pipeline_schedules: 1,
ci_pipelines: 1,
- ci_triggers: 1,
- clusters_applications_runner: 1
+ ci_triggers: 1
)
end
end
@@ -604,17 +592,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:clusters_platforms_eks]).to eq(1)
expect(count_data[:clusters_platforms_gke]).to eq(1)
expect(count_data[:clusters_platforms_user]).to eq(1)
- expect(count_data[:clusters_applications_helm]).to eq(1)
- expect(count_data[:clusters_applications_ingress]).to eq(1)
- expect(count_data[:clusters_applications_cert_managers]).to eq(1)
- expect(count_data[:clusters_applications_crossplane]).to eq(1)
- expect(count_data[:clusters_applications_prometheus]).to eq(1)
- expect(count_data[:clusters_applications_runner]).to eq(1)
- expect(count_data[:clusters_applications_knative]).to eq(1)
- expect(count_data[:clusters_applications_elastic_stack]).to eq(1)
+ expect(count_data[:clusters_integrations_elastic_stack]).to eq(1)
+ expect(count_data[:clusters_integrations_prometheus]).to eq(1)
expect(count_data[:grafana_integrated_projects]).to eq(2)
- expect(count_data[:clusters_applications_jupyter]).to eq(1)
- expect(count_data[:clusters_applications_cilium]).to eq(1)
expect(count_data[:clusters_management_project]).to eq(1)
expect(count_data[:kubernetes_agents]).to eq(2)
expect(count_data[:kubernetes_agents_with_token]).to eq(1)
@@ -662,13 +642,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.data[:counts] }
it 'gathers usage data' do
- expect(subject[:projects_with_expiration_policy_enabled]).to eq 18
+ expect(subject[:projects_with_expiration_policy_enabled]).to eq 19
expect(subject[:projects_with_expiration_policy_disabled]).to eq 5
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_5]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 12
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 13
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_25]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_50]).to eq 1
@@ -676,9 +656,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_7d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_14d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_30d]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_60d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 14
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 14
+ expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 15
expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_7d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_14d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1month]).to eq 1
@@ -743,7 +724,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(counts_monthly[:projects_with_alerts_created]).to eq(1)
expect(counts_monthly[:projects]).to eq(1)
expect(counts_monthly[:packages]).to eq(1)
- expect(counts_monthly[:promoted_issues]).to eq(1)
+ expect(counts_monthly[:promoted_issues]).to eq(Gitlab::UsageData::DEPRECATED_VALUE)
end
end
@@ -1093,6 +1074,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'gathers service_ping_features_enabled' do
expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled)
end
+
+ it 'gathers user_cap_feature_enabled' do
+ expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap)
+ end
end
end
@@ -1438,48 +1423,4 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
end
-
- describe '.snowplow_event_counts' do
- let_it_be(:time_period) { { collector_tstamp: 8.days.ago..1.day.ago } }
-
- context 'when self-monitoring project exists' do
- let_it_be(:project) { create(:project) }
-
- before do
- stub_application_setting(self_monitoring_project: project)
- end
-
- context 'and product_analytics FF is enabled for it' do
- before do
- stub_feature_flags(product_analytics_tracking: true)
-
- create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
- create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 2.days.ago)
- create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 9.days.ago)
-
- create(:product_analytics_event, project: project, se_category: 'foo', se_action: 'bar', collector_tstamp: 2.days.ago)
- end
-
- it 'returns promoted_issues for the time period' do
- expect(described_class.snowplow_event_counts(time_period)[:promoted_issues]).to eq(1)
- end
- end
-
- context 'and product_analytics FF is disabled' do
- before do
- stub_feature_flags(product_analytics_tracking: false)
- end
-
- it 'returns an empty hash' do
- expect(described_class.snowplow_event_counts(time_period)).to eq({})
- end
- end
- end
-
- context 'when self-monitoring project does not exist' do
- it 'returns an empty hash' do
- expect(described_class.snowplow_event_counts(time_period)).to eq({})
- end
- end
- end
end
diff --git a/spec/lib/gitlab/utils/delegator_override/error_spec.rb b/spec/lib/gitlab/utils/delegator_override/error_spec.rb
new file mode 100644
index 00000000000..59b67676eff
--- /dev/null
+++ b/spec/lib/gitlab/utils/delegator_override/error_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Utils::DelegatorOverride::Error do
+ let(:error) { described_class.new('foo', 'Target', '/path/to/target', 'Delegator', '/path/to/delegator') }
+
+ describe '#to_s' do
+ subject { error.to_s }
+
+ it { is_expected.to eq("Delegator#foo is overriding Target#foo. delegator_location: /path/to/delegator target_location: /path/to/target") }
+ end
+end
diff --git a/spec/lib/gitlab/utils/delegator_override/validator_spec.rb b/spec/lib/gitlab/utils/delegator_override/validator_spec.rb
new file mode 100644
index 00000000000..4cd1b18de82
--- /dev/null
+++ b/spec/lib/gitlab/utils/delegator_override/validator_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Utils::DelegatorOverride::Validator do
+ let(:delegator_class) do
+ Class.new(::SimpleDelegator) do
+ extend(::Gitlab::Utils::DelegatorOverride)
+
+ def foo
+ end
+ end.prepend(ee_delegator_extension)
+ end
+
+ let(:ee_delegator_extension) do
+ Module.new do
+ extend(::Gitlab::Utils::DelegatorOverride)
+
+ def bar
+ end
+ end
+ end
+
+ let(:target_class) do
+ Class.new do
+ def foo
+ end
+
+ def bar
+ end
+ end
+ end
+
+ let(:validator) { described_class.new(delegator_class) }
+
+ describe '#add_allowlist' do
+ it 'adds a method name to the allowlist' do
+ validator.add_allowlist([:foo])
+
+ expect(validator.allowed_method_names).to contain_exactly(:foo)
+ end
+ end
+
+ describe '#add_target' do
+ it 'adds the target class' do
+ validator.add_target(target_class)
+
+ expect(validator.target_classes).to contain_exactly(target_class)
+ end
+ end
+
+ describe '#expand_on_ancestors' do
+ it 'adds the allowlist in the ancestors' do
+ ancestor_validator = described_class.new(ee_delegator_extension)
+ ancestor_validator.add_allowlist([:bar])
+ validator.expand_on_ancestors( { ee_delegator_extension => ancestor_validator })
+
+ expect(validator.allowed_method_names).to contain_exactly(:bar)
+ end
+ end
+
+ describe '#validate_overrides!' do
+ before do
+ validator.add_target(target_class)
+ end
+
+ it 'does not raise an error when the overrides are allowed' do
+ validator.add_allowlist([:foo])
+ ancestor_validator = described_class.new(ee_delegator_extension)
+ ancestor_validator.add_allowlist([:bar])
+ validator.expand_on_ancestors( { ee_delegator_extension => ancestor_validator })
+
+ expect { validator.validate_overrides! }.not_to raise_error
+ end
+
+ it 'raises an error when there is an override' do
+ expect { validator.validate_overrides! }
+ .to raise_error(described_class::UnexpectedDelegatorOverrideError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils/delegator_override_spec.rb b/spec/lib/gitlab/utils/delegator_override_spec.rb
new file mode 100644
index 00000000000..af4c7fa5d8e
--- /dev/null
+++ b/spec/lib/gitlab/utils/delegator_override_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Utils::DelegatorOverride do
+ let(:delegator_class) do
+ Class.new(::SimpleDelegator) do
+ extend(::Gitlab::Utils::DelegatorOverride)
+
+ def foo
+ end
+ end
+ end
+
+ let(:target_class) do
+ Class.new do
+ def foo
+ end
+
+ def bar
+ end
+ end
+ end
+
+ let(:dummy_module) do
+ Module.new do
+ def foobar
+ end
+ end
+ end
+
+ before do
+ stub_env('STATIC_VERIFICATION', 'true')
+ end
+
+ describe '.delegator_target' do
+ subject { delegator_class.delegator_target(target_class) }
+
+ it 'sets the delegator target to the validator' do
+ expect(described_class.validator(delegator_class))
+ .to receive(:add_target).with(target_class)
+
+ subject
+ end
+
+ context 'when the class does not inherit SimpleDelegator' do
+ let(:delegator_class) do
+ Class.new do
+ extend(::Gitlab::Utils::DelegatorOverride)
+ end
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /not a subclass of 'SimpleDelegator' class/)
+ end
+ end
+ end
+
+ describe '.delegator_override' do
+ subject { delegator_class.delegator_override(:foo) }
+
+ it 'adds the method name to the allowlist' do
+ expect(described_class.validator(delegator_class))
+ .to receive(:add_allowlist).with([:foo])
+
+ subject
+ end
+ end
+
+ describe '.delegator_override_with' do
+ subject { delegator_class.delegator_override_with(dummy_module) }
+
+ it 'adds the method names of the module to the allowlist' do
+ expect(described_class.validator(delegator_class))
+ .to receive(:add_allowlist).with([:foobar])
+
+ subject
+ end
+ end
+
+ describe '.verify!' do
+ subject { described_class.verify! }
+
+ it 'does not raise an error when an override is in allowlist' do
+ delegator_class.delegator_target(target_class)
+ delegator_class.delegator_override(:foo)
+
+ expect { subject }.not_to raise_error
+ end
+
+ it 'raises an error when there is an override' do
+ delegator_class.delegator_target(target_class)
+
+ expect { subject }.to raise_error(Gitlab::Utils::DelegatorOverride::Validator::UnexpectedDelegatorOverrideError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/view/presenter/base_spec.rb b/spec/lib/gitlab/view/presenter/base_spec.rb
index 97d5e2b280d..a7083bd2722 100644
--- a/spec/lib/gitlab/view/presenter/base_spec.rb
+++ b/spec/lib/gitlab/view/presenter/base_spec.rb
@@ -18,11 +18,43 @@ RSpec.describe Gitlab::View::Presenter::Base do
describe '.presents' do
it 'exposes #subject with the given keyword' do
- presenter_class.presents(:foo)
+ presenter_class.presents(Object, as: :foo)
presenter = presenter_class.new(project)
expect(presenter.foo).to eq(project)
end
+
+ it 'raises an error when symbol is passed' do
+ expect { presenter_class.presents(:foo) }.to raise_error(ArgumentError)
+ end
+
+ context 'when the presenter class inherits Presenter::Delegated' do
+ let(:presenter_class) do
+ Class.new(::Gitlab::View::Presenter::Delegated) do
+ include(::Gitlab::View::Presenter::Base)
+ end
+ end
+
+ it 'sets the delegator target' do
+ expect(presenter_class).to receive(:delegator_target).with(Object)
+
+ presenter_class.presents(Object, as: :foo)
+ end
+ end
+
+ context 'when the presenter class inherits Presenter::Simple' do
+ let(:presenter_class) do
+ Class.new(::Gitlab::View::Presenter::Simple) do
+ include(::Gitlab::View::Presenter::Base)
+ end
+ end
+
+ it 'does not set the delegator target' do
+ expect(presenter_class).not_to receive(:delegator_target).with(Object)
+
+ presenter_class.presents(Object, as: :foo)
+ end
+ end
end
describe '#can?' do
diff --git a/spec/lib/gitlab/with_feature_category_spec.rb b/spec/lib/gitlab/with_feature_category_spec.rb
deleted file mode 100644
index b6fe1c84b26..00000000000
--- a/spec/lib/gitlab/with_feature_category_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require_relative "../../../lib/gitlab/with_feature_category"
-
-RSpec.describe Gitlab::WithFeatureCategory do
- describe ".feature_category_for_action" do
- let(:base_controller) do
- Class.new do
- include ::Gitlab::WithFeatureCategory
- end
- end
-
- let(:controller) do
- Class.new(base_controller) do
- feature_category :foo, %w(update edit)
- feature_category :bar, %w(index show)
- feature_category :quux, %w(destroy)
- end
- end
-
- let(:subclass) do
- Class.new(controller) do
- feature_category :baz, %w(subclass_index)
- end
- end
-
- it "is nil when nothing was defined" do
- expect(base_controller.feature_category_for_action("hello")).to be_nil
- end
-
- it "returns the expected category", :aggregate_failures do
- expect(controller.feature_category_for_action("update")).to eq(:foo)
- expect(controller.feature_category_for_action("index")).to eq(:bar)
- expect(controller.feature_category_for_action("destroy")).to eq(:quux)
- end
-
- it "returns the expected category for categories defined in subclasses" do
- expect(subclass.feature_category_for_action("subclass_index")).to eq(:baz)
- end
-
- it "raises an error when defining for the controller and for individual actions" do
- expect do
- Class.new(base_controller) do
- feature_category :hello
- feature_category :goodbye, [:world]
- end
- end.to raise_error(ArgumentError, "hello is defined for all actions, but other categories are set")
- end
-
- it "raises an error when multiple calls define the same action" do
- expect do
- Class.new(base_controller) do
- feature_category :hello, [:world]
- feature_category :goodbye, ["world"]
- end
- end.to raise_error(ArgumentError, "Actions have multiple feature categories: world")
- end
-
- it "does not raise an error when multiple calls define the same action and feature category" do
- expect do
- Class.new(base_controller) do
- feature_category :hello, [:world]
- feature_category :hello, ["world"]
- end
- end.not_to raise_error
- end
- end
-end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 09f90a3e5b6..8ba56af561d 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -244,13 +244,15 @@ RSpec.describe Gitlab::Workhorse do
GitalyServer: {
features: { 'gitaly-feature-enforce-requests-limits' => 'true' },
address: Gitlab::GitalyClient.address('default'),
- token: Gitlab::GitalyClient.token('default')
+ token: Gitlab::GitalyClient.token('default'),
+ sidechannel: false
}
}
end
before do
allow(Gitlab.config.gitaly).to receive(:enabled).and_return(true)
+ stub_feature_flags(workhorse_use_sidechannel: false)
end
it 'includes a Repository param' do
@@ -332,6 +334,46 @@ RSpec.describe Gitlab::Workhorse do
it { expect { subject }.to raise_exception('Unsupported action: download') }
end
+
+ context 'when workhorse_use_sidechannel flag is set' do
+ context 'when a feature flag is set globally' do
+ before do
+ stub_feature_flags(workhorse_use_sidechannel: true)
+ end
+
+ it 'sets the flag to true' do
+ response = described_class.git_http_ok(repository, Gitlab::GlRepository::PROJECT, user, action)
+
+ expect(response.dig(:GitalyServer, :sidechannel)).to eq(true)
+ end
+ end
+
+ context 'when a feature flag is set for a single project' do
+ before do
+ stub_feature_flags(workhorse_use_sidechannel: project)
+ end
+
+ it 'sets the flag to true for that project' do
+ response = described_class.git_http_ok(repository, Gitlab::GlRepository::PROJECT, user, action)
+
+ expect(response.dig(:GitalyServer, :sidechannel)).to eq(true)
+ end
+
+ it 'sets the flag to false for other projects' do
+ other_project = create(:project, :public, :repository)
+ response = described_class.git_http_ok(other_project.repository, Gitlab::GlRepository::PROJECT, user, action)
+
+ expect(response.dig(:GitalyServer, :sidechannel)).to eq(false)
+ end
+
+ it 'sets the flag to false when there is no project' do
+ snippet = create(:personal_snippet, :repository)
+ response = described_class.git_http_ok(snippet.repository, Gitlab::GlRepository::SNIPPET, user, action)
+
+ expect(response.dig(:GitalyServer, :sidechannel)).to eq(false)
+ end
+ end
+ end
end
context 'when receive_max_input_size has been updated' do
diff --git a/spec/lib/gitlab/email/smime/certificate_spec.rb b/spec/lib/gitlab/x509/certificate_spec.rb
index f7bb933e348..a5b192dd051 100644
--- a/spec/lib/gitlab/email/smime/certificate_spec.rb
+++ b/spec/lib/gitlab/x509/certificate_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Email::Smime::Certificate do
+RSpec.describe Gitlab::X509::Certificate do
include SmimeHelper
# cert generation is an expensive operation and they are used read-only,
diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb
index 6d50922904e..c89f6a21b35 100644
--- a/spec/lib/peek/views/active_record_spec.rb
+++ b/spec/lib/peek/views/active_record_spec.rb
@@ -53,154 +53,82 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
allow(connection_primary_2).to receive(:transaction_open?).and_return(true)
allow(connection_unknown).to receive(:transaction_open?).and_return(false)
allow(::Gitlab::Database).to receive(:db_config_name).and_return('the_db_config_name')
+
+ allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_replica).and_return(:replica)
+ allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_primary_1).and_return(:primary)
+ allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_primary_2).and_return(:primary)
+ allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_unknown).and_return(nil)
end
- context 'when database load balancing is not enabled' do
- it 'subscribes and store data into peek views' do
- Timecop.freeze(2021, 2, 23, 10, 0) do
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2)
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3)
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 4.seconds, '4', event_4)
- end
+ it 'includes db role data and db_config_name name' do
+ Timecop.freeze(2021, 2, 23, 10, 0) do
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2)
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3)
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 4.seconds, '4', event_4)
+ end
- expect(subject.results).to match(
- calls: 4,
- summary: {
- "Cached" => 1,
- "In a transaction" => 1
- },
- duration: '10000.00ms',
- warnings: ["active-record duration: 10000.0 over 3000"],
- details: contain_exactly(
- a_hash_including(
- start: be_a(Time),
- cached: '',
- transaction: '',
- duration: 1000.0,
- sql: 'SELECT * FROM users WHERE id = 10',
- db_config_name: "Config name: the_db_config_name"
- ),
- a_hash_including(
- start: be_a(Time),
- cached: 'Cached',
- transaction: '',
- duration: 2000.0,
- sql: 'SELECT * FROM users WHERE id = 10',
- db_config_name: "Config name: the_db_config_name"
- ),
- a_hash_including(
- start: be_a(Time),
- cached: '',
- transaction: 'In a transaction',
- duration: 3000.0,
- sql: 'UPDATE users SET admin = true WHERE id = 10',
- db_config_name: "Config name: the_db_config_name"
- ),
- a_hash_including(
- start: be_a(Time),
- cached: '',
- transaction: '',
- duration: 4000.0,
- sql: 'SELECT VERSION()',
- db_config_name: "Config name: the_db_config_name"
- )
+ expect(subject.results).to match(
+ calls: 4,
+ summary: {
+ "Cached" => 1,
+ "In a transaction" => 1,
+ "Role: Primary" => 2,
+ "Role: Replica" => 1,
+ "Role: Unknown" => 1
+ },
+ duration: '10000.00ms',
+ warnings: ["active-record duration: 10000.0 over 3000"],
+ details: contain_exactly(
+ a_hash_including(
+ start: be_a(Time),
+ cached: '',
+ transaction: '',
+ duration: 1000.0,
+ sql: 'SELECT * FROM users WHERE id = 10',
+ db_role: 'Role: Primary',
+ db_config_name: "Config name: the_db_config_name"
+ ),
+ a_hash_including(
+ start: be_a(Time),
+ cached: 'Cached',
+ transaction: '',
+ duration: 2000.0,
+ sql: 'SELECT * FROM users WHERE id = 10',
+ db_role: 'Role: Replica',
+ db_config_name: "Config name: the_db_config_name"
+ ),
+ a_hash_including(
+ start: be_a(Time),
+ cached: '',
+ transaction: 'In a transaction',
+ duration: 3000.0,
+ sql: 'UPDATE users SET admin = true WHERE id = 10',
+ db_role: 'Role: Primary',
+ db_config_name: "Config name: the_db_config_name"
+ ),
+ a_hash_including(
+ start: be_a(Time),
+ cached: '',
+ transaction: '',
+ duration: 4000.0,
+ sql: 'SELECT VERSION()',
+ db_role: 'Role: Unknown',
+ db_config_name: "Config name: the_db_config_name"
)
)
- end
-
- context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
- before do
- stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
- end
-
- it 'does not include db_config_name field' do
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
-
- expect(subject.results[:details][0][:db_config_name]).to be_nil
- end
- end
+ )
end
- context 'when database load balancing is enabled' do
+ context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_replica).and_return(:replica)
- allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_primary_1).and_return(:primary)
- allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_primary_2).and_return(:primary)
- allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_unknown).and_return(nil)
+ stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
end
- it 'includes db role data and db_config_name name' do
- Timecop.freeze(2021, 2, 23, 10, 0) do
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2)
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3)
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 4.seconds, '4', event_4)
- end
-
- expect(subject.results).to match(
- calls: 4,
- summary: {
- "Cached" => 1,
- "In a transaction" => 1,
- "Role: Primary" => 2,
- "Role: Replica" => 1,
- "Role: Unknown" => 1
- },
- duration: '10000.00ms',
- warnings: ["active-record duration: 10000.0 over 3000"],
- details: contain_exactly(
- a_hash_including(
- start: be_a(Time),
- cached: '',
- transaction: '',
- duration: 1000.0,
- sql: 'SELECT * FROM users WHERE id = 10',
- db_role: 'Role: Primary',
- db_config_name: "Config name: the_db_config_name"
- ),
- a_hash_including(
- start: be_a(Time),
- cached: 'Cached',
- transaction: '',
- duration: 2000.0,
- sql: 'SELECT * FROM users WHERE id = 10',
- db_role: 'Role: Replica',
- db_config_name: "Config name: the_db_config_name"
- ),
- a_hash_including(
- start: be_a(Time),
- cached: '',
- transaction: 'In a transaction',
- duration: 3000.0,
- sql: 'UPDATE users SET admin = true WHERE id = 10',
- db_role: 'Role: Primary',
- db_config_name: "Config name: the_db_config_name"
- ),
- a_hash_including(
- start: be_a(Time),
- cached: '',
- transaction: '',
- duration: 4000.0,
- sql: 'SELECT VERSION()',
- db_role: 'Role: Unknown',
- db_config_name: "Config name: the_db_config_name"
- )
- )
- )
- end
-
- context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
- before do
- stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
- end
-
- it 'does not include db_config_name field' do
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
+ it 'does not include db_config_name field' do
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
- expect(subject.results[:details][0][:db_config_name]).to be_nil
- end
+ expect(subject.results[:details][0][:db_config_name]).to be_nil
end
end
end
diff --git a/spec/lib/rouge/formatters/html_gitlab_spec.rb b/spec/lib/rouge/formatters/html_gitlab_spec.rb
index d45c8c2a8c5..4bc9b256dce 100644
--- a/spec/lib/rouge/formatters/html_gitlab_spec.rb
+++ b/spec/lib/rouge/formatters/html_gitlab_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Rouge::Formatters::HTMLGitlab do
describe '#format' do
- subject { described_class.format(tokens, options) }
+ subject { described_class.format(tokens, **options) }
let(:lang) { 'ruby' }
let(:lexer) { Rouge::Lexer.find_fancy(lang) }
diff --git a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
new file mode 100644
index 00000000000..4b77a09117a
--- /dev/null
+++ b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::Menus::ScopeMenu do
+ let(:group) { build(:group) }
+ let(:user) { group.owner }
+ let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
+
+ describe '#extra_nav_link_html_options' do
+ subject { described_class.new(context).extra_nav_link_html_options }
+
+ specify { is_expected.to match(hash_including(class: 'context-header has-tooltip', title: context.group.name)) }
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
index 5040ef9b0ff..980ab2f7c71 100644
--- a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
@@ -12,4 +12,10 @@ RSpec.describe Sidebars::Projects::Menus::ScopeMenu do
specify { is_expected.to match(hash_including(class: 'shortcuts-project rspec-project-link')) }
end
+
+ describe '#extra_nav_link_html_options' do
+ subject { described_class.new(context).extra_nav_link_html_options }
+
+ specify { is_expected.to match(hash_including(class: 'context-header has-tooltip', title: context.project.name)) }
+ end
end
diff --git a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
deleted file mode 100644
index dad95760306..00000000000
--- a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('migrate_issue_trackers_data')
-
-RSpec.describe MigrateIssueTrackersData do
- let(:services) { table(:services) }
- let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
- let(:migration_name) { migration_class.to_s.demodulize }
-
- let(:properties) do
- {
- 'url' => 'http://example.com'
- }
- end
-
- let!(:jira_integration) do
- services.create!(type: 'JiraService', properties: properties, category: 'issue_tracker')
- end
-
- let!(:jira_integration_nil) do
- services.create!(type: 'JiraService', properties: nil, category: 'issue_tracker')
- end
-
- let!(:bugzilla_integration) do
- services.create!(type: 'BugzillaService', properties: properties, category: 'issue_tracker')
- end
-
- let!(:youtrack_integration) do
- services.create!(type: 'YoutrackService', properties: properties, category: 'issue_tracker')
- end
-
- let!(:youtrack_integration_empty) do
- services.create!(type: 'YoutrackService', properties: '', category: 'issue_tracker')
- end
-
- let!(:gitlab_service) do
- services.create!(type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
- end
-
- let!(:gitlab_service_empty) do
- services.create!(type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
- end
-
- let!(:other_service) do
- services.create!(type: 'OtherService', properties: properties, category: 'other_category')
- end
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 2)
- end
-
- it 'schedules background migrations at correct time' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_integration.id, bugzilla_integration.id)
- expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_integration.id, gitlab_service.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb b/spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb
deleted file mode 100644
index 731bc923910..00000000000
--- a/spec/migrations/20191015154408_drop_merge_requests_require_code_owner_approval_from_projects_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('drop_merge_requests_require_code_owner_approval_from_projects')
-
-RSpec.describe DropMergeRequestsRequireCodeOwnerApprovalFromProjects do
- let(:projects_table) { table(:projects) }
-
- subject(:migration) { described_class.new }
-
- describe "without running the migration" do
- it "project_table has a :merge_requests_require_code_owner_approval column" do
- expect(projects_table.column_names)
- .to include("merge_requests_require_code_owner_approval")
- end
-
- it "project_table has a :projects_requiring_code_owner_approval index" do
- expect(ActiveRecord::Base.connection.indexes(:projects).collect(&:name))
- .to include("projects_requiring_code_owner_approval")
- end
- end
-
- describe '#up' do
- context "without running "
- before do
- migrate!
- end
-
- it "drops the :merge_requests_require_code_owner_approval column" do
- expect(projects_table.column_names)
- .not_to include("merge_requests_require_code_owner_approval")
- end
-
- it "drops the :projects_requiring_code_owner_approval index" do
- expect(ActiveRecord::Base.connection.indexes(:projects).collect(&:name))
- .not_to include("projects_requiring_code_owner_approval")
- end
- end
-
- describe "#down" do
- before do
- migration.up
- migration.down
- end
-
- it "project_table has a :merge_requests_require_code_owner_approval column" do
- expect(projects_table.column_names)
- .to include("merge_requests_require_code_owner_approval")
- end
-
- it "project_table has a :projects_requiring_code_owner_approval index" do
- expect(ActiveRecord::Base.connection.indexes(:projects).collect(&:name))
- .to include("projects_requiring_code_owner_approval")
- end
- end
-end
diff --git a/spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb b/spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb
deleted file mode 100644
index 222a000c134..00000000000
--- a/spec/migrations/20191125114345_add_admin_mode_protected_path_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('add_admin_mode_protected_path')
-
-RSpec.describe AddAdminModeProtectedPath do
- subject(:migration) { described_class.new }
-
- let(:admin_mode_endpoint) { '/admin/session' }
- let(:application_settings) { table(:application_settings) }
-
- context 'no settings available' do
- it 'makes no changes' do
- expect { migrate! }.not_to change { application_settings.count }
- end
- end
-
- context 'protected_paths is null' do
- before do
- application_settings.create!(protected_paths: nil)
- end
-
- it 'makes no changes' do
- expect { migrate! }.not_to change { application_settings.first.protected_paths }
- end
- end
-
- it 'appends admin mode endpoint' do
- application_settings.create!(protected_paths: '{a,b,c}')
-
- protected_paths_before = %w[a b c]
- protected_paths_after = protected_paths_before.dup << admin_mode_endpoint
-
- expect { migrate! }.to change { application_settings.first.protected_paths }.from(protected_paths_before).to(protected_paths_after)
- end
-
- it 'new default includes admin mode endpoint' do
- settings_before = application_settings.create!
-
- expect(settings_before.protected_paths).not_to include(admin_mode_endpoint)
-
- migrate!
-
- application_settings.reset_column_information
- settings_after = application_settings.create!
-
- expect(settings_after.protected_paths).to include(admin_mode_endpoint)
- end
-end
diff --git a/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb b/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb
deleted file mode 100644
index aba3a902888..00000000000
--- a/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('delete_legacy_triggers')
-
-RSpec.describe DeleteLegacyTriggers, schema: 2019_11_25_140458 do
- let(:ci_trigger_table) { table(:ci_triggers) }
- let(:user) { table(:users).create!(name: 'test', email: 'test@example.com', projects_limit: 1) }
-
- before do
- @trigger_with_user = ci_trigger_table.create!(owner_id: user.id)
- ci_trigger_table.create!(owner_id: nil)
- ci_trigger_table.create!(owner_id: nil)
- end
-
- it 'removes legacy triggers which has null owner_id' do
- expect do
- migrate!
- end.to change(ci_trigger_table, :count).by(-2)
-
- expect(ci_trigger_table.all).to eq([@trigger_with_user])
- end
-end
diff --git a/spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb b/spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb
new file mode 100644
index 00000000000..2e7ce733373
--- /dev/null
+++ b/spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_temporary_columns_and_triggers_for_taggings')
+
+RSpec.describe DropTemporaryColumnsAndTriggersForTaggings do
+ let(:taggings_table) { table(:taggings) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(taggings_table.column_names).to include('id_convert_to_bigint')
+ expect(taggings_table.column_names).to include('taggable_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ taggings_table.reset_column_information
+ expect(taggings_table.column_names).not_to include('id_convert_to_bigint')
+ expect(taggings_table.column_names).not_to include('taggable_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb b/spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb
new file mode 100644
index 00000000000..ece5ed8251d
--- /dev/null
+++ b/spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('cleanup_bigint_conversion_for_ci_builds_metadata')
+
+RSpec.describe CleanupBigintConversionForCiBuildsMetadata do
+ let(:ci_builds_metadata) { table(:ci_builds_metadata) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ci_builds_metadata.column_names).to include('id_convert_to_bigint')
+ expect(ci_builds_metadata.column_names).to include('build_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ ci_builds_metadata.reset_column_information
+ expect(ci_builds_metadata.column_names).not_to include('id_convert_to_bigint')
+ expect(ci_builds_metadata.column_names).not_to include('build_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb b/spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb
new file mode 100644
index 00000000000..46a6d8d92ec
--- /dev/null
+++ b/spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('update_report_type_for_existing_approval_project_rules')
+
+RSpec.describe UpdateReportTypeForExistingApprovalProjectRules, :migration do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:group) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:project) { table(:projects).create!(namespace_id: group.id) }
+ let(:approval_project_rule) { table(:approval_project_rules).create!(name: rule_name, rule_type: rule_type, project_id: project.id) }
+ let(:rule_type) { 2 }
+ let(:rule_name) { 'Vulnerability-Check' }
+
+ context 'with rule_type set to :report_approver' do
+ where(:rule_name, :report_type) do
+ [
+ ['Vulnerability-Check', 1],
+ ['License-Check', 2],
+ ['Coverage-Check', 3]
+ ]
+ end
+
+ with_them do
+ context "with names associated with report type" do
+ it 'updates report_type' do
+ expect { migrate! }.to change { approval_project_rule.reload.report_type }.from(nil).to(report_type)
+ end
+ end
+ end
+ end
+
+ context 'with rule_type set to another value (e.g., :regular)' do
+ let(:rule_type) { 0 }
+
+ it 'does not update report_type' do
+ expect { migrate! }.not_to change { approval_project_rule.reload.report_type }
+ end
+ end
+
+ context 'with the rule name set to another value (e.g., Test Rule)' do
+ let(:rule_name) { 'Test Rule'}
+
+ it 'does not update report_type' do
+ expect { migrate! }.not_to change { approval_project_rule.reload.report_type }
+ end
+ end
+end
diff --git a/spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb b/spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb
new file mode 100644
index 00000000000..ee71322433d
--- /dev/null
+++ b/spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('cleanup_bigint_conversion_for_ci_builds')
+
+RSpec.describe CleanupBigintConversionForCiBuilds do
+ let(:ci_builds) { table(:ci_builds) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ci_builds.column_names).to include('id_convert_to_bigint')
+ expect(ci_builds.column_names).to include('stage_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ ci_builds.reset_column_information
+ expect(ci_builds.column_names).not_to include('id_convert_to_bigint')
+ expect(ci_builds.column_names).not_to include('stage_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb
new file mode 100644
index 00000000000..9addaaf2551
--- /dev/null
+++ b/spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid.rb')
+
+def create_background_migration_jobs(ids, status, created_at)
+ proper_status = case status
+ when :pending
+ Gitlab::Database::BackgroundMigrationJob.statuses['pending']
+ when :succeeded
+ Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
+ else
+ raise ArgumentError
+ end
+
+ background_migration_jobs.create!(
+ class_name: 'RecalculateVulnerabilitiesOccurrencesUuid',
+ arguments: Array(ids),
+ status: proper_status,
+ created_at: created_at
+ )
+end
+
+RSpec.describe RemoveOldPendingJobsForRecalculateVulnerabilitiesOccurrencesUuid, :migration do
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+ let_it_be(:before_target_date) { -Float::INFINITY..(DateTime.new(2021, 8, 17, 23, 59, 59)) }
+ let_it_be(:after_target_date) { (DateTime.new(2021, 8, 18, 0, 0, 0))..Float::INFINITY }
+
+ context 'when old RecalculateVulnerabilitiesOccurrencesUuid jobs are pending' do
+ before do
+ create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 5, 5, 0, 2))
+ create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 5, 5, 0, 4))
+
+ create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 8, 18, 0, 0))
+ create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 8, 18, 0, 2))
+ create_background_migration_jobs([7, 8, 9], :pending, DateTime.new(2021, 8, 18, 0, 4))
+ end
+
+ it 'removes old, pending jobs' do
+ migrate!
+
+ expect(background_migration_jobs.where(created_at: before_target_date).count).to eq(1)
+ expect(background_migration_jobs.where(created_at: after_target_date).count).to eq(3)
+ end
+ end
+end
diff --git a/spec/migrations/20210918202855_reschedule_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/migrations/20210918202855_reschedule_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb
new file mode 100644
index 00000000000..5a2531bb63f
--- /dev/null
+++ b/spec/migrations/20210918202855_reschedule_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210918202855_reschedule_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid.rb')
+
+RSpec.describe ReschedulePendingJobsForRecalculateVulnerabilitiesOccurrencesUuid, :migration do
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+
+ context 'when RecalculateVulnerabilitiesOccurrencesUuid jobs are pending' do
+ before do
+ background_migration_jobs.create!(
+ class_name: 'RecalculateVulnerabilitiesOccurrencesUuid',
+ arguments: [1, 2, 3],
+ status: Gitlab::Database::BackgroundMigrationJob.statuses['pending']
+ )
+ background_migration_jobs.create!(
+ class_name: 'RecalculateVulnerabilitiesOccurrencesUuid',
+ arguments: [4, 5, 6],
+ status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
+ )
+ end
+
+ it 'queues pending jobs' do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.length).to eq(1)
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['RecalculateVulnerabilitiesOccurrencesUuid', [1, 2, 3]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to be_nil
+ end
+ end
+end
diff --git a/spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb b/spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb
new file mode 100644
index 00000000000..cf326cf0c0a
--- /dev/null
+++ b/spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_int4_columns_for_ci_job_artifacts')
+
+RSpec.describe DropInt4ColumnsForCiJobArtifacts do
+ let(:ci_job_artifacts) { table(:ci_job_artifacts) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ci_job_artifacts.column_names).to include('id_convert_to_bigint')
+ expect(ci_job_artifacts.column_names).to include('job_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ ci_job_artifacts.reset_column_information
+ expect(ci_job_artifacts.column_names).not_to include('id_convert_to_bigint')
+ expect(ci_job_artifacts.column_names).not_to include('job_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb b/spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb
new file mode 100644
index 00000000000..00b922ee4f8
--- /dev/null
+++ b/spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_int4_column_for_ci_sources_pipelines')
+
+RSpec.describe DropInt4ColumnForCiSourcesPipelines do
+ let(:ci_sources_pipelines) { table(:ci_sources_pipelines) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ci_sources_pipelines.column_names).to include('source_job_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ ci_sources_pipelines.reset_column_information
+ expect(ci_sources_pipelines.column_names).not_to include('source_job_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb b/spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb
new file mode 100644
index 00000000000..412556fc283
--- /dev/null
+++ b/spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_int4_column_for_events')
+
+RSpec.describe DropInt4ColumnForEvents do
+ let(:events) { table(:events) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(events.column_names).to include('id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ events.reset_column_information
+ expect(events.column_names).not_to include('id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb b/spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb
new file mode 100644
index 00000000000..2b286e3e5e0
--- /dev/null
+++ b/spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_int4_column_for_push_event_payloads')
+
+RSpec.describe DropInt4ColumnForPushEventPayloads do
+ let(:push_event_payloads) { table(:push_event_payloads) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(push_event_payloads.column_names).to include('event_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ push_event_payloads.reset_column_information
+ expect(push_event_payloads.column_names).not_to include('event_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb b/spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb
new file mode 100644
index 00000000000..d07d9a71b06
--- /dev/null
+++ b/spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('schedule_populate_topics_total_projects_count_cache')
+
+RSpec.describe SchedulePopulateTopicsTotalProjectsCountCache do
+ let(:topics) { table(:topics) }
+ let!(:topic_1) { topics.create!(name: 'Topic1') }
+ let!(:topic_2) { topics.create!(name: 'Topic2') }
+ let!(:topic_3) { topics.create!(name: 'Topic3') }
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ it 'schedules BackfillProjectsWithCoverage background jobs', :aggregate_failures do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, topic_1.id, topic_2.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, topic_3.id, topic_3.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_default_and_free_plans_spec.rb b/spec/migrations/add_default_and_free_plans_spec.rb
deleted file mode 100644
index 7256e4928af..00000000000
--- a/spec/migrations/add_default_and_free_plans_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('add_default_and_free_plans')
-
-RSpec.describe AddDefaultAndFreePlans do
- describe 'migrate' do
- let(:plans) { table(:plans) }
-
- context 'when on Gitlab.com' do
- before do
- expect(Gitlab).to receive(:com?) { true }
- end
-
- it 'creates free and default plans' do
- expect { migrate! }.to change { plans.count }.by 2
-
- expect(plans.last(2).pluck(:name)).to eq %w[free default]
- end
- end
-
- context 'when on self-hosted' do
- before do
- expect(Gitlab).to receive(:com?) { false }
- end
-
- it 'creates only a default plan' do
- expect { migrate! }.to change { plans.count }.by 1
-
- expect(plans.last.name).to eq 'default'
- end
- end
- end
-end
diff --git a/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb b/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb
deleted file mode 100644
index 795de51d387..00000000000
--- a/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddUniqueConstraintToApprovalsUserIdAndMergeRequestId do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
- let(:approvals) { table(:approvals) }
-
- describe '#up' do
- before do
- namespaces.create!(id: 1, name: 'ns', path: 'ns')
- projects.create!(id: 1, namespace_id: 1)
- merge_requests.create!(id: 1, target_branch: 'master', source_branch: 'feature-1', target_project_id: 1)
- merge_requests.create!(id: 2, target_branch: 'master', source_branch: 'feature-2', target_project_id: 1)
- end
-
- it 'deletes duplicate records and keeps the first one' do
- first_approval = approvals.create!(id: 1, merge_request_id: 1, user_id: 1)
- approvals.create!(id: 2, merge_request_id: 1, user_id: 1)
-
- migration.up
-
- expect(approvals.all.to_a).to contain_exactly(first_approval)
- end
-
- it 'does not delete unique records' do
- unique_approvals = [
- approvals.create(id: 1, merge_request_id: 1, user_id: 1),
- approvals.create(id: 2, merge_request_id: 1, user_id: 2),
- approvals.create(id: 3, merge_request_id: 2, user_id: 1)
- ]
-
- migration.up
-
- expect(approvals.all.to_a).to contain_exactly(*unique_approvals)
- end
-
- it 'creates unique index' do
- migration.up
-
- expect(migration.index_exists?(:approvals, [:user_id, :merge_request_id], unique: true)).to be_truthy
- end
- end
-
- describe '#down' do
- it 'removes unique index' do
- migration.up
- migration.down
-
- expect(migration.index_exists?(:approvals, [:user_id, :merge_request_id], unique: true)).to be_falsey
- end
- end
-end
diff --git a/spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb b/spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb
deleted file mode 100644
index ea6599fc122..00000000000
--- a/spec/migrations/backfill_and_add_not_null_constraint_to_released_at_column_on_releases_table_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillAndAddNotNullConstraintToReleasedAtColumnOnReleasesTable do
- let(:releases) { table(:releases) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- subject(:migration) { described_class.new }
-
- it 'fills released_at with the value of created_at' do
- created_at_a = Time.zone.parse('2019-02-10T08:00:00Z')
- created_at_b = Time.zone.parse('2019-03-10T18:00:00Z')
- namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
- release_a = releases.create!(project_id: project.id, created_at: created_at_a)
- release_b = releases.create!(project_id: project.id, created_at: created_at_b)
-
- disable_migrations_output { migration.up }
-
- release_a.reload
- release_b.reload
- expect(release_a.released_at).to eq(created_at_a)
- expect(release_b.released_at).to eq(created_at_b)
- end
-end
diff --git a/spec/migrations/backfill_operations_feature_flags_active_spec.rb b/spec/migrations/backfill_operations_feature_flags_active_spec.rb
deleted file mode 100644
index a28f648c75a..00000000000
--- a/spec/migrations/backfill_operations_feature_flags_active_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillOperationsFeatureFlagsActive do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:flags) { table(:operations_feature_flags) }
-
- def setup
- namespace = namespaces.create!(name: 'foo', path: 'foo')
- projects.create!(namespace_id: namespace.id)
- end
-
- it 'executes successfully when there are no flags in the table' do
- setup
-
- disable_migrations_output { migrate! }
-
- expect(flags.count).to eq(0)
- end
-
- it 'updates active to true' do
- project = setup
- flag = flags.create!(project_id: project.id, name: 'test_flag', active: false)
-
- disable_migrations_output { migrate! }
-
- expect(flag.reload.active).to eq(true)
- end
-
- it 'updates active to true for multiple flags' do
- project = setup
- flag_a = flags.create!(project_id: project.id, name: 'test_flag', active: false)
- flag_b = flags.create!(project_id: project.id, name: 'other_flag', active: false)
-
- disable_migrations_output { migrate! }
-
- expect(flag_a.reload.active).to eq(true)
- expect(flag_b.reload.active).to eq(true)
- end
-
- it 'leaves active true if it is already true' do
- project = setup
- flag = flags.create!(project_id: project.id, name: 'test_flag', active: true)
-
- disable_migrations_output { migrate! }
-
- expect(flag.reload.active).to eq(true)
- end
-end
diff --git a/spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb b/spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb
deleted file mode 100644
index 6e8bcfc050d..00000000000
--- a/spec/migrations/backfill_releases_table_updated_at_and_add_not_null_constraints_to_timestamps_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillReleasesTableUpdatedAtAndAddNotNullConstraintsToTimestamps do
- let(:releases) { table(:releases) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- subject(:migration) { described_class.new }
-
- it 'fills null updated_at rows with the value of created_at' do
- created_at_a = Time.zone.parse('2014-03-11T04:30:00Z')
- created_at_b = Time.zone.parse('2019-09-10T12:00:00Z')
- namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
- release_a = releases.create!(project_id: project.id,
- released_at: Time.zone.parse('2014-12-10T06:00:00Z'),
- created_at: created_at_a)
- release_b = releases.create!(project_id: project.id,
- released_at: Time.zone.parse('2019-09-11T06:00:00Z'),
- created_at: created_at_b)
- release_a.update!(updated_at: nil)
- release_b.update!(updated_at: nil)
-
- disable_migrations_output { migrate! }
-
- release_a.reload
- release_b.reload
- expect(release_a.updated_at).to eq(created_at_a)
- expect(release_b.updated_at).to eq(created_at_b)
- end
-
- it 'does not change updated_at columns with a value' do
- created_at_a = Time.zone.parse('2014-03-11T04:30:00Z')
- updated_at_a = Time.zone.parse('2015-01-16T10:00:00Z')
- created_at_b = Time.zone.parse('2019-09-10T12:00:00Z')
- namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
- release_a = releases.create!(project_id: project.id,
- released_at: Time.zone.parse('2014-12-10T06:00:00Z'),
- created_at: created_at_a,
- updated_at: updated_at_a)
- release_b = releases.create!(project_id: project.id,
- released_at: Time.zone.parse('2019-09-11T06:00:00Z'),
- created_at: created_at_b)
- release_b.update!(updated_at: nil)
-
- disable_migrations_output { migrate! }
-
- release_a.reload
- release_b.reload
- expect(release_a.updated_at).to eq(updated_at_a)
- expect(release_b.updated_at).to eq(created_at_b)
- end
-end
diff --git a/spec/migrations/backport_enterprise_schema_spec.rb b/spec/migrations/backport_enterprise_schema_spec.rb
deleted file mode 100644
index de6821001b4..00000000000
--- a/spec/migrations/backport_enterprise_schema_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe BackportEnterpriseSchema, schema: 20190329085614 do
- include MigrationsHelpers
-
- def drop_if_exists(table)
- active_record_base.connection.drop_table(table) if active_record_base.connection.table_exists?(table)
- end
-
- describe '#up' do
- it 'creates new EE tables' do
- migrate!
-
- expect(active_record_base.connection.table_exists?(:epics)).to be true
- expect(active_record_base.connection.table_exists?(:geo_nodes)).to be true
- end
-
- context 'missing EE columns' do
- before do
- drop_if_exists(:epics)
-
- active_record_base.connection.create_table "epics" do |t|
- t.integer :group_id, null: false, index: true
- t.integer :author_id, null: false, index: true
- end
- end
-
- after do
- drop_if_exists(:epics)
- end
-
- it 'flags an error' do
- expect { migrate! }.to raise_error(/Your database is missing.*that is present for GitLab EE/)
- end
- end
- end
-end
diff --git a/spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb b/spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb
deleted file mode 100644
index 24e6f3480f9..00000000000
--- a/spec/migrations/change_outbound_local_requests_whitelist_default_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ChangeOutboundLocalRequestsWhitelistDefault do
- let(:application_settings) { table(:application_settings) }
-
- it 'defaults to empty array' do
- setting = application_settings.create!
- setting_with_value = application_settings.create!(outbound_local_requests_whitelist: '{a,b}')
-
- expect(application_settings.where(outbound_local_requests_whitelist: nil).count).to eq(1)
-
- migrate!
-
- expect(application_settings.where(outbound_local_requests_whitelist: nil).count).to eq(0)
- expect(setting.reload.outbound_local_requests_whitelist).to eq([])
- expect(setting_with_value.reload.outbound_local_requests_whitelist).to eq(%w[a b])
- end
-end
diff --git a/spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb b/spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb
deleted file mode 100644
index 5e3118b0dea..00000000000
--- a/spec/migrations/change_packages_size_defaults_in_project_statistics_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ChangePackagesSizeDefaultsInProjectStatistics do
- let(:project_statistics) { table(:project_statistics) }
- let(:projects) { table(:projects) }
-
- it 'removes null packages_size' do
- stats_to_migrate = 10
-
- stats_to_migrate.times do |i|
- p = projects.create!(name: "project #{i}", namespace_id: 1)
- project_statistics.create!(project_id: p.id, namespace_id: p.namespace_id)
- end
-
- expect { migrate! }
- .to change { ProjectStatistics.where(packages_size: nil).count }
- .from(stats_to_migrate)
- .to(0)
- end
-
- it 'defaults packages_size to 0' do
- project = projects.create!(name: 'a new project', namespace_id: 2)
- stat = project_statistics.create!(project_id: project.id, namespace_id: project.namespace_id)
-
- expect(stat.packages_size).to be_nil
-
- migrate!
-
- stat.reload
- expect(stat.packages_size).to eq(0)
- end
-end
diff --git a/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb b/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
deleted file mode 100644
index 8de30af13fd..00000000000
--- a/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanUpNoteableIdForNotesOnCommits do
- let(:notes) { table(:notes) }
-
- before do
- notes.create!(noteable_type: 'Commit', commit_id: '3d0a182204cece4857f81c6462720e0ad1af39c9', noteable_id: 3, note: 'Test')
- notes.create!(noteable_type: 'Commit', commit_id: '3d0a182204cece4857f81c6462720e0ad1af39c9', noteable_id: 3, note: 'Test')
- notes.create!(noteable_type: 'Commit', commit_id: '3d0a182204cece4857f81c6462720e0ad1af39c9', noteable_id: 3, note: 'Test')
-
- notes.create!(noteable_type: 'Issue', noteable_id: 1, note: 'Test')
- notes.create!(noteable_type: 'MergeRequest', noteable_id: 1, note: 'Test')
- notes.create!(noteable_type: 'Snippet', noteable_id: 1, note: 'Test')
- end
-
- it 'clears noteable_id for notes on commits' do
- expect { migrate! }.to change { dirty_notes_on_commits.count }.from(3).to(0)
- end
-
- it 'does not clear noteable_id for other notes' do
- expect { migrate! }.not_to change { other_notes.count }
- end
-
- def dirty_notes_on_commits
- notes.where(noteable_type: 'Commit').where.not(noteable_id: nil)
- end
-
- def other_notes
- notes.where("noteable_type != 'Commit' AND noteable_id IS NOT NULL")
- end
-end
diff --git a/spec/migrations/cleanup_legacy_artifact_migration_spec.rb b/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
deleted file mode 100644
index 6362965cc31..00000000000
--- a/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('cleanup_legacy_artifact_migration')
-
-RSpec.describe CleanupLegacyArtifactMigration, :redis do
- let(:migration) { spy('migration') }
-
- context 'when still legacy artifacts exist' do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:jobs) { table(:ci_builds) }
- let(:job_artifacts) { table(:ci_job_artifacts) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
- let(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
- let(:archive_file_type) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::ARCHIVE_FILE_TYPE }
- let(:metadata_file_type) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::METADATA_FILE_TYPE }
- let(:local_store) { ::ObjectStorage::Store::LOCAL }
- let(:remote_store) { ::ObjectStorage::Store::REMOTE }
- let(:legacy_location) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::LEGACY_PATH_FILE_LOCATION }
-
- before do
- jobs.create!(id: 1, commit_id: pipeline.id, project_id: project.id, status: :success, artifacts_file: 'archive.zip')
- jobs.create!(id: 2, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_metadata: 'metadata.gz')
- jobs.create!(id: 3, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_file: 'archive.zip', artifacts_metadata: 'metadata.gz')
- jobs.create!(id: 4, commit_id: pipeline.id, project_id: project.id, status: :running)
- jobs.create!(id: 5, commit_id: pipeline.id, project_id: project.id, status: :success, artifacts_file: 'archive.zip', artifacts_file_store: remote_store, artifacts_metadata: 'metadata.gz')
- jobs.create!(id: 6, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_file: 'archive.zip', artifacts_metadata: 'metadata.gz')
- end
-
- it 'steals sidekiq jobs from MigrateLegacyArtifacts background migration' do
- expect(Gitlab::BackgroundMigration).to receive(:steal).with('MigrateLegacyArtifacts')
-
- migrate!
- end
-
- it 'migrates legacy artifacts to ci_job_artifacts table' do
- migrate!
-
- expect(job_artifacts.order(:job_id, :file_type).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
- .to eq([[project.id, 1, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
- [project.id, 3, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
- [project.id, 3, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location],
- [project.id, 5, archive_file_type, remote_store, nil, nil, 'archive.zip', nil, legacy_location],
- [project.id, 5, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location],
- [project.id, 6, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
- [project.id, 6, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location]])
- end
- end
-end
diff --git a/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb b/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb
deleted file mode 100644
index 3093cd85ced..00000000000
--- a/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe DropProjectCiCdSettingsMergeTrainsEnabled do
- let!(:project_ci_cd_setting) { table(:project_ci_cd_settings) }
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(project_ci_cd_setting.column_names).to include("merge_trains_enabled")
- }
-
- migration.after -> {
- project_ci_cd_setting.reset_column_information
- expect(project_ci_cd_setting.column_names).not_to include("merge_trains_enabled")
- }
- end
- end
-end
diff --git a/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb b/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
deleted file mode 100644
index 62bd0dafb8e..00000000000
--- a/spec/migrations/encrypt_feature_flags_clients_tokens_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe EncryptFeatureFlagsClientsTokens do
- let(:migration) { described_class.new }
- let(:feature_flags_clients) { table(:operations_feature_flags_clients) }
- let(:projects) { table(:projects) }
- let(:plaintext) { "secret-token" }
- let(:ciphertext) { Gitlab::CryptoHelper.aes256_gcm_encrypt(plaintext, nonce: Gitlab::CryptoHelper::AES256_GCM_IV_STATIC) }
-
- describe '#up' do
- it 'keeps plaintext token the same and populates token_encrypted if not present' do
- project = projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
- feature_flags_client = feature_flags_clients.create!(project_id: project.id, token: plaintext)
-
- migration.up
-
- expect(feature_flags_client.reload.token).to eq(plaintext)
- expect(feature_flags_client.reload.token_encrypted).to eq(ciphertext)
- end
- end
-
- describe '#down' do
- it 'decrypts encrypted token and saves it' do
- project = projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
- feature_flags_client = feature_flags_clients.create!(project_id: project.id, token_encrypted: ciphertext)
-
- migration.down
-
- expect(feature_flags_client.reload.token).to eq(plaintext)
- expect(feature_flags_client.reload.token_encrypted).to eq(ciphertext)
- end
- end
-end
diff --git a/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb b/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
deleted file mode 100644
index 2e233816b8b..00000000000
--- a/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe EncryptPlaintextAttributesOnApplicationSettings do
- let(:migration) { described_class.new }
- let(:application_settings) { table(:application_settings) }
- let(:plaintext) { 'secret-token' }
-
- plaintext_attributes = %w[
- akismet_api_key
- elasticsearch_aws_secret_access_key
- recaptcha_private_key
- recaptcha_site_key
- slack_app_secret
- slack_app_verification_token
- ].freeze
-
- describe '#up' do
- it 'encrypts token and saves it' do
- application_setting = application_settings.create!
- application_setting.update_columns(
- plaintext_attributes.each_with_object({}) do |plaintext_attribute, attributes|
- attributes[plaintext_attribute] = plaintext
- end
- )
-
- migration.up
-
- application_setting.reload
- plaintext_attributes.each do |plaintext_attribute|
- expect(application_setting[plaintext_attribute]).not_to be_nil
- expect(application_setting["encrypted_#{plaintext_attribute}"]).not_to be_nil
- expect(application_setting["encrypted_#{plaintext_attribute}_iv"]).not_to be_nil
- end
- end
- end
-
- describe '#down' do
- it 'decrypts encrypted token and saves it' do
- application_setting = application_settings.create!(
- plaintext_attributes.each_with_object({}) do |plaintext_attribute, attributes|
- attributes[plaintext_attribute] = plaintext
- end
- )
-
- migration.down
-
- application_setting.reload
- plaintext_attributes.each do |plaintext_attribute|
- expect(application_setting[plaintext_attribute]).to eq(plaintext)
- expect(application_setting["encrypted_#{plaintext_attribute}"]).to be_nil
- expect(application_setting["encrypted_#{plaintext_attribute}_iv"]).to be_nil
- end
- end
- end
-end
diff --git a/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb b/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
deleted file mode 100644
index 49698f60964..00000000000
--- a/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe EnqueueResetMergeStatusSecondRun do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
- let(:merge_requests) { table(:merge_requests) }
-
- def create_merge_request(id, extra_params = {})
- params = {
- id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}"
- }.merge(extra_params)
-
- merge_requests.create!(params)
- end
-
- it 'correctly schedules background migrations' do
- create_merge_request(1, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(2, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(3, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(4, state: 'merged', merge_status: 'can_be_merged')
- create_merge_request(5, state: 'opened', merge_status: 'unchecked')
-
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(5.minutes, 1, 2)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(10.minutes, 3, 4)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(15.minutes, 5, 5)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(3)
- end
- end
- end
-end
diff --git a/spec/migrations/enqueue_reset_merge_status_spec.rb b/spec/migrations/enqueue_reset_merge_status_spec.rb
deleted file mode 100644
index d62c99b80bc..00000000000
--- a/spec/migrations/enqueue_reset_merge_status_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe EnqueueResetMergeStatus do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
- let(:merge_requests) { table(:merge_requests) }
-
- def create_merge_request(id, extra_params = {})
- params = {
- id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}"
- }.merge(extra_params)
-
- merge_requests.create!(params)
- end
-
- it 'correctly schedules background migrations' do
- create_merge_request(1, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(2, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(3, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(4, state: 'merged', merge_status: 'can_be_merged')
- create_merge_request(5, state: 'opened', merge_status: 'unchecked')
-
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(5.minutes, 1, 2)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(10.minutes, 3, 4)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(15.minutes, 5, 5)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(3)
- end
- end
- end
-end
diff --git a/spec/migrations/fill_productivity_analytics_start_date_spec.rb b/spec/migrations/fill_productivity_analytics_start_date_spec.rb
deleted file mode 100644
index b348067a752..00000000000
--- a/spec/migrations/fill_productivity_analytics_start_date_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FillProductivityAnalyticsStartDate do
- let(:settings_table) { table('application_settings') }
- let(:metrics_table) { table('merge_request_metrics') }
-
- before do
- settings_table.create!
- end
-
- context 'with NO productivity analytics data available' do
- it 'sets start_date to NOW' do
- expect { migrate! }.to change {
- settings_table.first&.productivity_analytics_start_date
- }.to(be_like_time(Time.now))
- end
- end
-
- context 'with productivity analytics data available' do
- before do
- ActiveRecord::Base.transaction do
- ActiveRecord::Base.connection.execute('ALTER TABLE merge_request_metrics DISABLE TRIGGER ALL')
- metrics_table.create!(merged_at: Time.parse('2019-09-09'), commits_count: nil, merge_request_id: 3)
- metrics_table.create!(merged_at: Time.parse('2019-10-10'), commits_count: 5, merge_request_id: 1)
- metrics_table.create!(merged_at: Time.parse('2019-11-11'), commits_count: 10, merge_request_id: 2)
- ActiveRecord::Base.connection.execute('ALTER TABLE merge_request_metrics ENABLE TRIGGER ALL')
- end
- end
-
- it 'set start_date to earliest merged_at value with PA data available' do
- expect { migrate! }.to change {
- settings_table.first&.productivity_analytics_start_date
- }.to(be_like_time(Time.parse('2019-10-10')))
- end
- end
-end
diff --git a/spec/migrations/fix_max_pages_size_spec.rb b/spec/migrations/fix_max_pages_size_spec.rb
deleted file mode 100644
index 97cf026df5c..00000000000
--- a/spec/migrations/fix_max_pages_size_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixMaxPagesSize do
- let(:application_settings) { table(:application_settings) }
- let!(:default_setting) { application_settings.create! }
- let!(:max_possible_setting) { application_settings.create!(max_pages_size: described_class::MAX_SIZE) }
- let!(:higher_than_maximum_setting) { application_settings.create!(max_pages_size: described_class::MAX_SIZE + 1) }
-
- it 'correctly updates settings only if needed' do
- migrate!
-
- expect(default_setting.reload.max_pages_size).to eq(100)
- expect(max_possible_setting.reload.max_pages_size).to eq(described_class::MAX_SIZE)
- expect(higher_than_maximum_setting.reload.max_pages_size).to eq(described_class::MAX_SIZE)
- end
-end
diff --git a/spec/migrations/fix_null_type_labels_spec.rb b/spec/migrations/fix_null_type_labels_spec.rb
deleted file mode 100644
index 4f902b92393..00000000000
--- a/spec/migrations/fix_null_type_labels_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixNullTypeLabels do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:labels) { table(:labels) }
-
- before do
- group = namespaces.create!(name: 'labels-test-project', path: 'labels-test-project', type: 'Group')
- project = projects.create!(namespace_id: group.id, name: 'labels-test-group', path: 'labels-test-group')
-
- @template_label = labels.create!(title: 'template', template: true)
- @project_label = labels.create!(title: 'project label', project_id: project.id, type: 'ProjectLabel')
- @group_label = labels.create!(title: 'group_label', group_id: group.id, type: 'GroupLabel')
- @broken_label_1 = labels.create!(title: 'broken 1', project_id: project.id)
- @broken_label_2 = labels.create!(title: 'broken 2', project_id: project.id)
- end
-
- describe '#up' do
- it 'fix labels with type missing' do
- migration.up
-
- # Labels that requires type change
- expect(@broken_label_1.reload.type).to eq('ProjectLabel')
- expect(@broken_label_2.reload.type).to eq('ProjectLabel')
- # Labels out of scope
- expect(@template_label.reload.type).to be_nil
- expect(@project_label.reload.type).to eq('ProjectLabel')
- expect(@group_label.reload.type).to eq('GroupLabel')
- end
- end
-end
diff --git a/spec/migrations/fix_pool_repository_source_project_id_spec.rb b/spec/migrations/fix_pool_repository_source_project_id_spec.rb
deleted file mode 100644
index 2ee4c458c3c..00000000000
--- a/spec/migrations/fix_pool_repository_source_project_id_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPoolRepositorySourceProjectId do
- let(:projects) { table(:projects) }
- let(:pool_repositories) { table(:pool_repositories) }
- let(:shards) { table(:shards) }
-
- it 'fills in source_project_ids' do
- shard = shards.create!(name: 'default')
-
- # gitaly is a project with a pool repository that has a source_project_id
- gitaly = projects.create!(name: 'gitaly', path: 'gitlab-org/gitaly', namespace_id: 1)
- pool_repository = pool_repositories.create!(shard_id: shard.id, source_project_id: gitaly.id)
- gitaly.update_column(:pool_repository_id, pool_repository.id)
-
- # gitlab is a project with a pool repository that's missing a source_project_id
- pool_repository_without_source_project = pool_repositories.create!(shard_id: shard.id, source_project_id: nil)
- gitlab = projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1, pool_repository_id: pool_repository_without_source_project.id)
- projects.create!(name: 'gitlab-fork-1', path: 'my-org-1/gitlab-ce', namespace_id: 1, pool_repository_id: pool_repository_without_source_project.id)
-
- migrate!
-
- expect(pool_repositories.find(pool_repository_without_source_project.id).source_project_id).to eq(gitlab.id)
- expect(pool_repositories.find(pool_repository.id).source_project_id).to eq(gitaly.id)
- end
-end
diff --git a/spec/migrations/fix_wrong_pages_access_level_spec.rb b/spec/migrations/fix_wrong_pages_access_level_spec.rb
deleted file mode 100644
index 00a620b4426..00000000000
--- a/spec/migrations/fix_wrong_pages_access_level_spec.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixWrongPagesAccessLevel, :sidekiq_might_not_need_inline, schema: 20190628185004 do
- using RSpec::Parameterized::TableSyntax
-
- let(:migration_class) { described_class::MIGRATION }
- let(:migration_name) { migration_class.to_s.demodulize }
-
- project_class = ::Gitlab::BackgroundMigration::FixPagesAccessLevel::Project
- feature_class = ::Gitlab::BackgroundMigration::FixPagesAccessLevel::ProjectFeature
-
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:features_table) { table(:project_features) }
-
- let(:subgroup) do
- root_group = namespaces_table.create!(path: "group", name: "group")
- namespaces_table.create!(path: "subgroup", name: "group", parent_id: root_group.id)
- end
-
- def create_project_feature(path, project_visibility, pages_access_level)
- project = projects_table.create!(path: path, visibility_level: project_visibility,
- namespace_id: subgroup.id)
- features_table.create!(project_id: project.id, pages_access_level: pages_access_level)
- end
-
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- first_id = create_project_feature("project1", project_class::PRIVATE, feature_class::PRIVATE).id
- last_id = create_project_feature("project2", project_class::PRIVATE, feature_class::PUBLIC).id
-
- migrate!
-
- expect(migration_name).to be_scheduled_delayed_migration(2.minutes, first_id, last_id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
-
- def expect_migration
- expect do
- perform_enqueued_jobs do
- migrate!
- end
- end
- end
-
- where(:project_visibility, :pages_access_level, :access_control_is_enabled,
- :pages_deployed, :resulting_pages_access_level) do
- # update settings for public projects regardless of access_control being enabled
- project_class::PUBLIC | feature_class::PUBLIC | true | true | feature_class::ENABLED
- project_class::PUBLIC | feature_class::PUBLIC | false | true | feature_class::ENABLED
- # don't update public level for private and internal projects
- project_class::PRIVATE | feature_class::PUBLIC | true | true | feature_class::PUBLIC
- project_class::INTERNAL | feature_class::PUBLIC | true | true | feature_class::PUBLIC
-
- # if access control is disabled but pages are deployed we make them public
- project_class::INTERNAL | feature_class::ENABLED | false | true | feature_class::PUBLIC
- # don't change anything if one of the conditions is not satisfied
- project_class::INTERNAL | feature_class::ENABLED | true | true | feature_class::ENABLED
- project_class::INTERNAL | feature_class::ENABLED | true | false | feature_class::ENABLED
-
- # private projects
- # if access control is enabled update pages_access_level to private regardless of deployment
- project_class::PRIVATE | feature_class::ENABLED | true | true | feature_class::PRIVATE
- project_class::PRIVATE | feature_class::ENABLED | true | false | feature_class::PRIVATE
- # if access control is disabled and pages are deployed update pages_access_level to public
- project_class::PRIVATE | feature_class::ENABLED | false | true | feature_class::PUBLIC
- # if access control is disabled but pages aren't deployed update pages_access_level to private
- project_class::PRIVATE | feature_class::ENABLED | false | false | feature_class::PRIVATE
- end
-
- with_them do
- let!(:project_feature) do
- create_project_feature("projectpath", project_visibility, pages_access_level)
- end
-
- before do
- tested_path = File.join(Settings.pages.path, "group/subgroup/projectpath", "public")
- allow(Dir).to receive(:exist?).with(tested_path).and_return(pages_deployed)
-
- stub_pages_setting(access_control: access_control_is_enabled)
- end
-
- it "sets proper pages_access_level" do
- expect(project_feature.reload.pages_access_level).to eq(pages_access_level)
-
- perform_enqueued_jobs do
- migrate!
- end
-
- expect(project_feature.reload.pages_access_level).to eq(resulting_pages_access_level)
- end
- end
-end
diff --git a/spec/migrations/generate_lets_encrypt_private_key_spec.rb b/spec/migrations/generate_lets_encrypt_private_key_spec.rb
deleted file mode 100644
index 8525a7bbd1c..00000000000
--- a/spec/migrations/generate_lets_encrypt_private_key_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe GenerateLetsEncryptPrivateKey do
- describe '#up' do
- it 'does not fail' do
- expect do
- described_class.new.up
- end.not_to raise_error
- end
- end
-end
diff --git a/spec/migrations/insert_project_hooks_plan_limits_spec.rb b/spec/migrations/insert_project_hooks_plan_limits_spec.rb
deleted file mode 100644
index 365dd679d76..00000000000
--- a/spec/migrations/insert_project_hooks_plan_limits_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe InsertProjectHooksPlanLimits do
- let(:migration) { described_class.new }
- let(:plans) { table(:plans) }
- let(:plan_limits) { table(:plan_limits) }
-
- before do
- plans.create!(id: 34, name: 'free')
- plans.create!(id: 2, name: 'bronze')
- plans.create!(id: 3, name: 'silver')
- plans.create!(id: 4, name: 'gold')
- plan_limits.create!(plan_id: 34, ci_active_jobs: 5)
- end
-
- context 'when on Gitlab.com' do
- before do
- expect(Gitlab).to receive(:com?).at_most(:twice).and_return(true)
- end
-
- describe '#up' do
- it 'updates the project_hooks plan limits' do
- migration.up
-
- expect(plan_limits.pluck(:plan_id, :project_hooks, :ci_active_jobs))
- .to match_array([[34, 10, 5], [2, 20, 0], [3, 30, 0], [4, 100, 0]])
- end
- end
-
- describe '#down' do
- it 'updates the project_hooks plan limits to 0' do
- migration.up
- migration.down
-
- expect(plan_limits.pluck(:plan_id, :project_hooks, :ci_active_jobs))
- .to match_array([[34, 0, 5], [2, 0, 0], [3, 0, 0], [4, 0, 0]])
- end
- end
- end
-
- context 'when on self-hosted' do
- before do
- expect(Gitlab).to receive(:com?).and_return(false)
- end
-
- describe '#up' do
- it 'does not update the plan limits' do
- migration.up
-
- expect(plan_limits.pluck(:plan_id, :project_hooks, :ci_active_jobs))
- .to match_array([[34, 0, 5]])
- end
- end
-
- describe '#down' do
- it 'does not update the plan limits' do
- migration.down
-
- expect(plan_limits.pluck(:plan_id, :project_hooks, :ci_active_jobs))
- .to match_array([[34, 0, 5]])
- end
- end
- end
-end
diff --git a/spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb b/spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb
deleted file mode 100644
index a836fb4bfb9..00000000000
--- a/spec/migrations/migrate_auto_dev_ops_domain_to_cluster_domain_spec.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateAutoDevOpsDomainToClusterDomain do
- include MigrationHelpers::ClusterHelpers
-
- let(:migration) { described_class.new }
- let(:project_auto_devops_table) { table(:project_auto_devops) }
- let(:clusters_table) { table(:clusters) }
- let(:cluster_projects_table) { table(:cluster_projects) }
-
- # Following lets are needed by MigrationHelpers::ClusterHelpers
- let(:cluster_kubernetes_namespaces_table) { table(:clusters_kubernetes_namespaces) }
- let(:projects_table) { table(:projects) }
- let(:namespaces_table) { table(:namespaces) }
- let(:provider_gcp_table) { table(:cluster_providers_gcp) }
- let(:platform_kubernetes_table) { table(:cluster_platforms_kubernetes) }
-
- before do
- setup_cluster_projects_with_domain(quantity: 20, domain: domain)
- end
-
- context 'with ProjectAutoDevOps with no domain' do
- let(:domain) { nil }
-
- it 'does not update cluster project' do
- migrate!
-
- expect(clusters_without_domain.count).to eq(clusters_table.count)
- end
- end
-
- context 'with ProjectAutoDevOps with domain' do
- let(:domain) { 'example-domain.com' }
-
- it 'updates all cluster projects' do
- migrate!
-
- expect(clusters_with_domain.count).to eq(clusters_table.count)
- end
- end
-
- context 'when only some ProjectAutoDevOps have domain set' do
- let(:domain) { 'example-domain.com' }
-
- before do
- setup_cluster_projects_with_domain(quantity: 25, domain: nil)
- end
-
- it 'only updates specific cluster projects' do
- migrate!
-
- expect(clusters_with_domain.count).to eq(20)
-
- project_auto_devops_with_domain.each do |project_auto_devops|
- cluster_project = find_cluster_project(project_auto_devops.project_id)
- cluster = find_cluster(cluster_project.cluster_id)
-
- expect(cluster.domain).to be_present
- end
-
- expect(clusters_without_domain.count).to eq(25)
-
- project_auto_devops_without_domain.each do |project_auto_devops|
- cluster_project = find_cluster_project(project_auto_devops.project_id)
- cluster = find_cluster(cluster_project.cluster_id)
-
- expect(cluster.domain).not_to be_present
- end
- end
- end
-
- def setup_cluster_projects_with_domain(quantity:, domain:)
- create_cluster_project_list(quantity)
-
- cluster_projects = cluster_projects_table.last(quantity)
-
- cluster_projects.each do |cluster_project|
- specific_domain = "#{cluster_project.id}-#{domain}" if domain
-
- project_auto_devops_table.create!(
- project_id: cluster_project.project_id,
- enabled: true,
- domain: specific_domain
- )
- end
- end
-
- def find_cluster_project(project_id)
- cluster_projects_table.find_by(project_id: project_id)
- end
-
- def find_cluster(cluster_id)
- clusters_table.find_by(id: cluster_id)
- end
-
- def project_auto_devops_with_domain
- project_auto_devops_table.where.not("domain IS NULL OR domain = ''")
- end
-
- def project_auto_devops_without_domain
- project_auto_devops_table.where("domain IS NULL OR domain = ''")
- end
-
- def clusters_with_domain
- clusters_table.where.not("domain IS NULL OR domain = ''")
- end
-
- def clusters_without_domain
- clusters_table.where("domain IS NULL OR domain = ''")
- end
-end
diff --git a/spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb b/spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb
deleted file mode 100644
index 121ff3d6622..00000000000
--- a/spec/migrations/migrate_code_owner_approval_status_to_protected_branches_in_batches_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateCodeOwnerApprovalStatusToProtectedBranchesInBatches do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:protected_branches) { table(:protected_branches) }
-
- let(:namespace) do
- namespaces.create!(
- path: 'gitlab-instance-administrators',
- name: 'GitLab Instance Administrators'
- )
- end
-
- let(:project) do
- projects.create!(
- namespace_id: namespace.id,
- name: 'GitLab Instance Administration'
- )
- end
-
- let!(:protected_branch_1) do
- protected_branches.create!(
- name: "branch name",
- project_id: project.id
- )
- end
-
- describe '#up' do
- context "when there's no projects needing approval" do
- it "doesn't change any protected branch records" do
- expect { migrate! }
- .not_to change { ProtectedBranch.where(code_owner_approval_required: true).count }
- end
- end
-
- context "when there's a project needing approval" do
- let!(:project_needing_approval) do
- projects.create!(
- namespace_id: namespace.id,
- name: 'GitLab Instance Administration',
- merge_requests_require_code_owner_approval: true
- )
- end
-
- let!(:protected_branch_2) do
- protected_branches.create!(
- name: "branch name",
- project_id: project_needing_approval.id
- )
- end
-
- it "changes N protected branch records" do
- expect { migrate! }
- .to change { ProtectedBranch.where(code_owner_approval_required: true).count }
- .by(1)
- end
- end
- end
-end
diff --git a/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb b/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
deleted file mode 100644
index e42baab9927..00000000000
--- a/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateDiscussionIdOnPromotedEpics do
- let(:migration_class) { described_class::MIGRATION }
- let(:migration_name) { migration_class.to_s.demodulize }
-
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:users) { table(:users) }
- let(:issues) { table(:issues) }
- let(:epics) { table(:epics) }
- let(:notes) { table(:notes) }
- let(:system_note_metadata) { table(:system_note_metadata) }
-
- let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
-
- def create_promotion_note(model, id)
- note = create_note(model, id, { system: true,
- note: 'promoted from issue XXX' })
- system_note_metadata.create!(note_id: note.id, action: 'moved')
- end
-
- def create_epic
- epics.create!(author_id: user.id, iid: epics.maximum(:iid).to_i + 1,
- group_id: namespace.id,
- title: 'Epic with discussion',
- title_html: 'Epic with discussion')
- end
-
- def create_note(model, id, extra_params = {})
- params = {
- note: 'note',
- noteable_id: model.id,
- noteable_type: model.class.name,
- discussion_id: id
- }.merge(extra_params)
-
- notes.create!(params)
- end
-
- context 'with promoted epic' do
- let(:epic1) { create_epic }
- let!(:note1) { create_promotion_note(epic1, 'id1') }
-
- it 'correctly schedules background migrations in batches' do
- create_note(epic1, 'id2')
- create_note(epic1, 'id3')
-
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(migration_name).to be_scheduled_delayed_migration(2.minutes, %w(id1 id2))
- expect(migration_name).to be_scheduled_delayed_migration(4.minutes, %w(id3))
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-
- it 'schedules only promoted epics' do
- issue = issues.create!(description: 'first', state: 'opened')
- create_promotion_note(issue, 'id2')
- create_note(create_epic, 'id3')
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(migration_name).to be_scheduled_delayed_migration(2.minutes, %w(id1))
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
- end
-end
diff --git a/spec/migrations/migrate_k8s_service_integration_spec.rb b/spec/migrations/migrate_k8s_service_integration_spec.rb
deleted file mode 100644
index ba6071b72e4..00000000000
--- a/spec/migrations/migrate_k8s_service_integration_spec.rb
+++ /dev/null
@@ -1,162 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateK8sServiceIntegration do
- context 'template service' do
- context 'with namespace' do
- let!(:service) do
- MigrateK8sServiceIntegration::Service.create!(
- active: true,
- template: true,
- category: 'deployment',
- type: 'KubernetesService',
- properties: "{\"namespace\":\"prod\",\"api_url\":\"https://sample.kubernetes.com\",\"ca_pem\":\"ca_pem-sample\",\"token\":\"token-sample\"}"
- )
- end
-
- let(:cluster) { MigrateK8sServiceIntegration::Cluster.instance_type.last! }
- let(:platform) { cluster.platform_kubernetes }
-
- it 'migrates the KubernetesService template to Platform::Kubernetes' do
- expect { migrate! }.to change { MigrateK8sServiceIntegration::Cluster.count }.by(1)
-
- expect(cluster).to be_enabled
- expect(cluster).to be_user
- expect(cluster).not_to be_managed
- expect(cluster.environment_scope).to eq('*')
- expect(platform.api_url).to eq('https://sample.kubernetes.com')
- expect(platform.ca_cert).to eq('ca_pem-sample')
- expect(platform.namespace).to eq('prod')
- expect(platform.token).to eq('token-sample')
- end
- end
-
- context 'without namespace' do
- let!(:service) do
- MigrateK8sServiceIntegration::Service.create!(
- active: true,
- template: true,
- category: 'deployment',
- type: 'KubernetesService',
- properties: "{\"namespace\":\"\",\"api_url\":\"https://sample.kubernetes.com\",\"ca_pem\":\"ca_pem-sample\",\"token\":\"token-sample\"}"
- )
- end
-
- let(:cluster) { MigrateK8sServiceIntegration::Cluster.instance_type.last! }
- let(:platform) { cluster.platform_kubernetes }
-
- it 'migrates the KubernetesService template to Platform::Kubernetes' do
- expect { migrate! }.to change { MigrateK8sServiceIntegration::Cluster.count }.by(1)
-
- expect(cluster).to be_enabled
- expect(cluster).to be_user
- expect(cluster).not_to be_managed
- expect(cluster.environment_scope).to eq('*')
- expect(platform.api_url).to eq('https://sample.kubernetes.com')
- expect(platform.ca_cert).to eq('ca_pem-sample')
- expect(platform.namespace).to be_nil
- expect(platform.token).to eq('token-sample')
- end
- end
-
- context 'with nullified parameters' do
- let!(:service) do
- MigrateK8sServiceIntegration::Service.create!(
- active: true,
- template: true,
- category: 'deployment',
- type: 'KubernetesService',
- properties: "{}"
- )
- end
-
- it 'does not migrate the KubernetesService' do
- expect { migrate! }.not_to change { MigrateK8sServiceIntegration::Cluster.count }
- end
- end
-
- context 'when disabled' do
- let!(:service) do
- MigrateK8sServiceIntegration::Service.create!(
- active: false,
- template: true,
- category: 'deployment',
- type: 'KubernetesService',
- properties: "{\"namespace\":\"prod\",\"api_url\":\"https://sample.kubernetes.com\",\"ca_pem\":\"ca_pem-sample\",\"token\":\"token-sample\"}"
- )
- end
-
- let(:cluster) { MigrateK8sServiceIntegration::Cluster.instance_type.last! }
- let(:platform) { cluster.platform_kubernetes }
-
- it 'migrates the KubernetesService template to Platform::Kubernetes' do
- expect { migrate! }.to change { MigrateK8sServiceIntegration::Cluster.count }.by(1)
-
- expect(cluster).not_to be_enabled
- expect(cluster).to be_user
- expect(cluster).not_to be_managed
- expect(cluster.environment_scope).to eq('*')
- expect(platform.api_url).to eq('https://sample.kubernetes.com')
- expect(platform.ca_cert).to eq('ca_pem-sample')
- expect(platform.namespace).to eq('prod')
- expect(platform.token).to eq('token-sample')
- end
- end
-
- context 'when an instance cluster already exists' do
- let!(:service) do
- MigrateK8sServiceIntegration::Service.create!(
- active: true,
- template: true,
- category: 'deployment',
- type: 'KubernetesService',
- properties: "{\"namespace\":\"prod\",\"api_url\":\"https://sample.kubernetes.com\",\"ca_pem\":\"ca_pem-sample\",\"token\":\"token-sample\"}"
- )
- end
-
- let!(:existing_cluster) do
- MigrateK8sServiceIntegration::Cluster.create!(
- name: 'test-cluster',
- cluster_type: :instance_type,
- managed: true,
- provider_type: :user,
- platform_type: :kubernetes
- )
- end
-
- let(:new_cluster) { MigrateK8sServiceIntegration::Cluster.instance_type.last! }
- let(:platform) { new_cluster.platform_kubernetes }
-
- it 'migrates the KubernetesService template to disabled Platform::Kubernetes' do
- expect { migrate! }.to change { MigrateK8sServiceIntegration::Cluster.count }.by(1)
-
- expect(new_cluster).not_to be_enabled
- expect(new_cluster).to be_user
- expect(new_cluster).not_to be_managed
- expect(new_cluster.environment_scope).to eq('*')
- expect(platform.api_url).to eq('https://sample.kubernetes.com')
- expect(platform.ca_cert).to eq('ca_pem-sample')
- expect(platform.namespace).to eq('prod')
- expect(platform.token).to eq('token-sample')
- end
- end
- end
-
- context 'non-template service' do
- let!(:service) do
- MigrateK8sServiceIntegration::Service.create!(
- active: true,
- template: false,
- category: 'deployment',
- type: 'KubernetesService',
- properties: "{\"namespace\":\"prod\",\"api_url\":\"https://sample.kubernetes.com\",\"ca_pem\":\"ca_pem-sample\",\"token\":\"token-sample\"}"
- )
- end
-
- it 'does not migrate the KubernetesService' do
- expect { migrate! }.not_to change { MigrateK8sServiceIntegration::Cluster.count }
- end
- end
-end
diff --git a/spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb b/spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb
deleted file mode 100644
index 3d8685c7619..00000000000
--- a/spec/migrations/migrate_legacy_managed_clusters_to_unmanaged_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateLegacyManagedClustersToUnmanaged do
- let(:cluster_type) { 'project_type' }
- let(:created_at) { 1.hour.ago }
-
- let!(:cluster) do
- table(:clusters).create!(
- name: 'cluster',
- cluster_type: described_class::Cluster.cluster_types[cluster_type],
- managed: true,
- created_at: created_at
- )
- end
-
- it 'marks the cluster as unmanaged' do
- migrate!
- expect(cluster.reload).not_to be_managed
- end
-
- context 'cluster is not project type' do
- let(:cluster_type) { 'group_type' }
-
- it 'does not update the cluster' do
- migrate!
- expect(cluster.reload).to be_managed
- end
- end
-
- context 'cluster has a kubernetes namespace associated' do
- before do
- table(:clusters_kubernetes_namespaces).create!(
- cluster_id: cluster.id,
- namespace: 'namespace'
- )
- end
-
- it 'does not update the cluster' do
- migrate!
- expect(cluster.reload).to be_managed
- end
- end
-
- context 'cluster was recently created' do
- let(:created_at) { 2.minutes.ago }
-
- it 'does not update the cluster' do
- migrate!
- expect(cluster.reload).to be_managed
- end
- end
-end
diff --git a/spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb b/spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb
deleted file mode 100644
index b753b84ae55..00000000000
--- a/spec/migrations/migrate_managed_clusters_with_no_token_to_unmanaged_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateManagedClustersWithNoTokenToUnmanaged do
- let(:cluster_type) { 'project_type' }
- let(:created_at) { Date.new(2018, 11, 1).midnight }
-
- let!(:cluster) do
- table(:clusters).create!(
- name: 'cluster',
- cluster_type: described_class::Cluster.cluster_types[cluster_type],
- managed: true,
- created_at: created_at
- )
- end
-
- let!(:kubernetes_namespace) do
- table(:clusters_kubernetes_namespaces).create!(
- cluster_id: cluster.id,
- namespace: 'namespace'
- )
- end
-
- it 'marks the cluster as unmanaged' do
- migrate!
- expect(cluster.reload).not_to be_managed
- end
-
- context 'cluster is not project type' do
- let(:cluster_type) { 'group_type' }
-
- it 'does not update the cluster' do
- migrate!
- expect(cluster.reload).to be_managed
- end
- end
-
- context 'kubernetes namespace has a service account token' do
- before do
- kubernetes_namespace.update!(encrypted_service_account_token: "TOKEN")
- end
-
- it 'does not update the cluster' do
- migrate!
- expect(cluster.reload).to be_managed
- end
- end
-
- context 'cluster was created after the cutoff' do
- let(:created_at) { Date.new(2019, 1, 1).midnight }
-
- it 'does not update the cluster' do
- migrate!
- expect(cluster.reload).to be_managed
- end
- end
-end
diff --git a/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb b/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
deleted file mode 100644
index 5caf03992dd..00000000000
--- a/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
+++ /dev/null
@@ -1,135 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateOpsFeatureFlagsScopesTargetUserIds do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:flags) { table(:operations_feature_flags) }
- let(:scopes) { table(:operations_feature_flag_scopes) }
-
- def setup
- namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
- flags.create!(project_id: project.id, active: true, name: 'test_flag')
- end
-
- it 'migrates successfully when there are no scopes in the database' do
- setup
-
- disable_migrations_output { migrate! }
-
- expect(scopes.count).to eq(0)
- end
-
- it 'migrates a disabled scope with gradualRolloutUserId and userWithId strategies' do
- flag = setup
- scope = scopes.create!(feature_flag_id: flag.id, active: false, strategies: [
- { name: 'gradualRolloutUserId', parameters: { groupId: 'default', percentage: '50' } },
- { name: 'userWithId', parameters: { userIds: '5' } }
- ])
-
- disable_migrations_output { migrate! }
-
- scope.reload
- expect(scope.active).to eq(true)
- expect(scope.strategies).to eq([{ 'name' => 'userWithId', 'parameters' => { 'userIds' => '5' } }])
- end
-
- it 'migrates a disabled scope with default and userWithId strategies' do
- flag = setup
- scope = scopes.create!(feature_flag_id: flag.id, active: false, strategies: [
- { name: 'default', parameters: {} },
- { name: 'userWithId', parameters: { userIds: 'amy@gmail.com,karen@gmail.com' } }
- ])
-
- disable_migrations_output { migrate! }
-
- scope.reload
- expect(scope.active).to eq(true)
- expect(scope.strategies).to eq([{ 'name' => 'userWithId', 'parameters' => { 'userIds' => 'amy@gmail.com,karen@gmail.com' } }])
- end
-
- it 'migrates an enabled scope with default and userWithId strategies' do
- flag = setup
- scope = scopes.create!(feature_flag_id: flag.id, active: true, strategies: [
- { name: 'default', parameters: {} },
- { name: 'userWithId', parameters: { userIds: 'tim' } }
- ])
-
- disable_migrations_output { migrate! }
-
- scope.reload
- expect(scope.active).to eq(true)
- expect(scope.strategies).to eq([{ 'name' => 'default', 'parameters' => {} }])
- end
-
- it 'does not alter an enabled scope with gradualRolloutUserId and userWithId strategies' do
- flag = setup
- scope = scopes.create!(feature_flag_id: flag.id, active: true, strategies: [
- { name: 'gradualRolloutUserId', parameters: { groupId: 'default', percentage: '50' } },
- { name: 'userWithId', parameters: { userIds: '5' } }
- ])
-
- disable_migrations_output { migrate! }
-
- scope.reload
- expect(scope.active).to eq(true)
- expect(scope.strategies).to eq([
- { 'name' => 'gradualRolloutUserId', 'parameters' => { 'groupId' => 'default', 'percentage' => '50' } },
- { 'name' => 'userWithId', 'parameters' => { 'userIds' => '5' } }
- ])
- end
-
- it 'does not alter a disabled scope without a userWithId strategy' do
- flag = setup
- scope = scopes.create!(feature_flag_id: flag.id, active: false, strategies: [
- { name: 'gradualRolloutUserId', parameters: { percentage: '60' } }
- ])
-
- disable_migrations_output { migrate! }
-
- scope.reload
- expect(scope.active).to eq(false)
- expect(scope.strategies).to eq([
- { 'name' => 'gradualRolloutUserId', 'parameters' => { 'percentage' => '60' } }
- ])
- end
-
- it 'does not alter an enabled scope without a userWithId strategy' do
- flag = setup
- scope = scopes.create!(feature_flag_id: flag.id, active: true, strategies: [
- { name: 'default', parameters: {} }
- ])
-
- disable_migrations_output { migrate! }
-
- scope.reload
- expect(scope.active).to eq(true)
- expect(scope.strategies).to eq([
- { 'name' => 'default', 'parameters' => {} }
- ])
- end
-
- it 'migrates multiple scopes' do
- flag = setup
- scope_a = scopes.create!(feature_flag_id: flag.id, active: false, strategies: [
- { name: 'gradualRolloutUserId', parameters: { groupId: 'default', percentage: '50' } },
- { name: 'userWithId', parameters: { userIds: '5,6,7' } }
- ])
- scope_b = scopes.create!(feature_flag_id: flag.id, active: false, environment_scope: 'production', strategies: [
- { name: 'default', parameters: {} },
- { name: 'userWithId', parameters: { userIds: 'lisa,carol' } }
- ])
-
- disable_migrations_output { migrate! }
-
- scope_a.reload
- scope_b.reload
- expect(scope_a.active).to eq(true)
- expect(scope_a.strategies).to eq([{ 'name' => 'userWithId', 'parameters' => { 'userIds' => '5,6,7' } }])
- expect(scope_b.active).to eq(true)
- expect(scope_b.strategies).to eq([{ 'name' => 'userWithId', 'parameters' => { 'userIds' => 'lisa,carol' } }])
- end
-end
diff --git a/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb b/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
deleted file mode 100644
index 4db819f2fa1..00000000000
--- a/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateStorageMigratorSidekiqQueue, :redis do
- include Gitlab::Database::MigrationHelpers
- include StubWorker
-
- context 'when there are jobs in the queues' do
- it 'correctly migrates queue when migrating up' do
- Sidekiq::Testing.disable! do
- stub_worker(queue: :storage_migrator).perform_async(1, 5)
-
- described_class.new.up
-
- expect(sidekiq_queue_length('storage_migrator')).to eq 0
- expect(sidekiq_queue_length('hashed_storage:hashed_storage_migrator')).to eq 1
- end
- end
-
- it 'correctly migrates queue when migrating down' do
- Sidekiq::Testing.disable! do
- stub_worker(queue: :'hashed_storage:hashed_storage_migrator').perform_async(1, 5)
-
- described_class.new.down
-
- expect(sidekiq_queue_length('storage_migrator')).to eq 1
- expect(sidekiq_queue_length('hashed_storage:hashed_storage_migrator')).to eq 0
- end
- end
- end
-
- context 'when there are no jobs in the queues' do
- it 'does not raise error when migrating up' do
- expect { described_class.new.up }.not_to raise_error
- end
-
- it 'does not raise error when migrating down' do
- expect { described_class.new.down }.not_to raise_error
- end
- end
-end
diff --git a/spec/migrations/move_limits_from_plans_spec.rb b/spec/migrations/move_limits_from_plans_spec.rb
deleted file mode 100644
index 92ac804733f..00000000000
--- a/spec/migrations/move_limits_from_plans_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MoveLimitsFromPlans do
- let(:plans) { table(:plans) }
- let(:plan_limits) { table(:plan_limits) }
-
- let!(:gold_plan) { plans.create!(name: 'gold', title: 'Gold', active_pipelines_limit: 20, pipeline_size_limit: 21, active_jobs_limit: 22) }
- let!(:silver_plan) { plans.create!(name: 'silver', title: 'Silver', active_pipelines_limit: 30, pipeline_size_limit: 31, active_jobs_limit: 32) }
- let!(:bronze_plan) { plans.create!(name: 'bronze', title: 'Bronze', active_pipelines_limit: 40, pipeline_size_limit: 41, active_jobs_limit: 42) }
- let!(:free_plan) { plans.create!(name: 'free', title: 'Free', active_pipelines_limit: 50, pipeline_size_limit: 51, active_jobs_limit: 52) }
- let!(:other_plan) { plans.create!(name: 'other', title: 'Other', active_pipelines_limit: nil, pipeline_size_limit: nil, active_jobs_limit: 0) }
-
- describe 'migrate' do
- it 'populates plan_limits from all the records in plans' do
- expect { migrate! }.to change { plan_limits.count }.by 5
- end
-
- it 'copies plan limits and plan.id into to plan_limits table' do
- migrate!
-
- new_data = plan_limits.pluck(:plan_id, :ci_active_pipelines, :ci_pipeline_size, :ci_active_jobs)
- expected_data = [
- [gold_plan.id, 20, 21, 22],
- [silver_plan.id, 30, 31, 32],
- [bronze_plan.id, 40, 41, 42],
- [free_plan.id, 50, 51, 52],
- [other_plan.id, 0, 0, 0]
- ]
- expect(new_data).to contain_exactly(*expected_data)
- end
- end
-end
diff --git a/spec/migrations/nullify_users_role_spec.rb b/spec/migrations/nullify_users_role_spec.rb
deleted file mode 100644
index 11056d9cf0c..00000000000
--- a/spec/migrations/nullify_users_role_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe NullifyUsersRole do
- let(:users) { table(:users) }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
-
- users.create!(role: 0, updated_at: '2019-11-04 12:08:00', projects_limit: 0, email: '1')
- users.create!(role: 1, updated_at: '2019-11-04 12:08:00', projects_limit: 0, email: '2')
- users.create!(role: 0, updated_at: '2019-11-06 12:08:00', projects_limit: 0, email: '3')
-
- migrate!
- end
-
- it 'nullifies the role of the user with updated_at < 2019-11-05 12:08:00 and a role of 0' do
- expect(users.where(role: nil).count).to eq(1)
- expect(users.find_by(role: nil).email).to eq('1')
- end
-
- it 'leaves the user with role of 1' do
- expect(users.where(role: 1).count).to eq(1)
- expect(users.find_by(role: 1).email).to eq('2')
- end
-
- it 'leaves the user with updated_at > 2019-11-05 12:08:00' do
- expect(users.where(role: 0).count).to eq(1)
- expect(users.find_by(role: 0).email).to eq('3')
- end
-end
diff --git a/spec/migrations/populate_project_statistics_packages_size_spec.rb b/spec/migrations/populate_project_statistics_packages_size_spec.rb
deleted file mode 100644
index af9237f4bd6..00000000000
--- a/spec/migrations/populate_project_statistics_packages_size_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe PopulateProjectStatisticsPackagesSize do
- let(:project_statistics) { table(:project_statistics) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:packages) { table(:packages_packages) }
- let(:package_files) { table(:packages_package_files) }
-
- let(:file_size) { 1.kilobyte }
- let(:repo_size) { 2.megabytes }
- let(:lfs_size) { 3.gigabytes }
- let(:artifacts_size) { 4.terabytes }
- let(:storage_size) { repo_size + lfs_size + artifacts_size }
-
- let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
- let(:package) { packages.create!(project_id: project.id, name: 'a package', package_type: 1) }
- let(:project) { projects.create!(namespace_id: namespace.id) }
-
- let!(:statistics) { project_statistics.create!(project_id: project.id, namespace_id: namespace.id, storage_size: storage_size, repository_size: repo_size, lfs_objects_size: lfs_size, build_artifacts_size: artifacts_size) }
- let!(:package_file) { package_files.create!(package_id: package.id, file: 'a file.txt', file_name: 'a file.txt', size: file_size)}
-
- it 'backfills ProjectStatistics packages_size' do
- expect { migrate! }
- .to change { statistics.reload.packages_size }
- .from(nil).to(file_size)
- end
-
- it 'updates ProjectStatistics storage_size' do
- expect { migrate! }
- .to change { statistics.reload.storage_size }
- .by(file_size)
- end
-end
diff --git a/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb b/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb
deleted file mode 100644
index 7dc3f5a1004..00000000000
--- a/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe PopulateRuleTypeOnApprovalMergeRequestRules do
- let(:migration) { described_class.new }
-
- describe '#up' do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
- let(:approval_rules) { table(:approval_merge_request_rules) }
-
- # We use integers here since at the time of writing CE does not yet have the
- # appropriate models and enum definitions.
- let(:regular_rule_type) { 1 }
- let(:code_owner_rule_type) { 2 }
-
- before do
- namespaces.create!(id: 11, name: 'gitlab', path: 'gitlab')
- projects.create!(id: 101, namespace_id: 11, name: 'gitlab', path: 'gitlab')
- merge_requests.create!(id: 1, target_project_id: 101, source_project_id: 101, target_branch: 'feature', source_branch: 'master')
-
- approval_rules.create!(id: 1, merge_request_id: 1, name: "Default", code_owner: false, rule_type: regular_rule_type)
- approval_rules.create!(id: 2, merge_request_id: 1, name: "Code Owners", code_owner: true, rule_type: regular_rule_type)
- end
-
- it 'backfills ApprovalMergeRequestRules code_owner rule_type' do
- expect(approval_rules.where(rule_type: regular_rule_type).pluck(:id)).to contain_exactly(1, 2)
- expect(approval_rules.where(rule_type: code_owner_rule_type).pluck(:id)).to be_empty
-
- migrate!
-
- expect(approval_rules.where(rule_type: regular_rule_type).pluck(:id)).to contain_exactly(1)
- expect(approval_rules.where(rule_type: code_owner_rule_type).pluck(:id)).to contain_exactly(2)
- end
- end
-end
diff --git a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb b/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb
new file mode 100644
index 00000000000..77824a743fb
--- /dev/null
+++ b/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RecreateIndexSecurityCiBuildsOnNameAndIdParserFeatures, :migration do
+ let(:db) { described_class.new }
+ let(:pg_class) { table(:pg_class) }
+ let(:pg_index) { table(:pg_index) }
+ let(:async_indexes) { table(:postgres_async_indexes) }
+
+ it "recreates index" do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be false
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be true
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be false
+ }
+
+ migration.after -> {
+ expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be true
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be false
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be true
+ }
+ end
+ end
+end
diff --git a/spec/migrations/remove_empty_github_service_templates_spec.rb b/spec/migrations/remove_empty_github_service_templates_spec.rb
deleted file mode 100644
index ad84187c298..00000000000
--- a/spec/migrations/remove_empty_github_service_templates_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveEmptyGithubServiceTemplates do
- subject(:migration) { described_class.new }
-
- let(:services) do
- table(:services).tap do |klass|
- klass.class_eval do
- serialize :properties, JSON
- end
- end
- end
-
- before do
- services.delete_all
-
- create_service(properties: nil)
- create_service(properties: {})
- create_service(properties: { some: :value })
- create_service(properties: {}, template: false)
- create_service(properties: {}, type: 'SomeType')
- end
-
- def all_service_properties
- services.where(template: true, type: 'GithubService').pluck(:properties)
- end
-
- it 'correctly migrates up and down service templates' do
- reversible_migration do |migration|
- migration.before -> do
- expect(services.count).to eq(5)
-
- expect(all_service_properties)
- .to match(a_collection_containing_exactly(nil, {}, { 'some' => 'value' }))
- end
-
- migration.after -> do
- expect(services.count).to eq(4)
-
- expect(all_service_properties)
- .to match(a_collection_containing_exactly(nil, { 'some' => 'value' }))
- end
- end
- end
-
- def create_service(params)
- data = { template: true, type: 'GithubService' }
- data.merge!(params)
-
- services.create!(data)
- end
-end
diff --git a/spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb b/spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb
new file mode 100644
index 00000000000..f595261ff90
--- /dev/null
+++ b/spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveScheduleAndStatusFromPendingAlertEscalations do
+ let(:escalations) { table(:incident_management_pending_alert_escalations) }
+ let(:schedule_index) { 'index_incident_management_pending_alert_escalations_on_schedule' }
+ let(:schedule_foreign_key) { 'fk_rails_fcbfd9338b' }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(escalations.column_names).to include('schedule_id', 'status')
+ expect(escalations_indexes).to include(schedule_index)
+ expect(escalations_constraints).to include(schedule_foreign_key)
+ }
+
+ migration.after -> {
+ escalations.reset_column_information
+ expect(escalations.column_names).not_to include('schedule_id', 'status')
+ expect(escalations_indexes).not_to include(schedule_index)
+ expect(escalations_constraints).not_to include(schedule_foreign_key)
+ }
+ end
+ end
+
+ private
+
+ def escalations_indexes
+ ActiveRecord::Base.connection.indexes(:incident_management_pending_alert_escalations).collect(&:name)
+ end
+
+ def escalations_constraints
+ ActiveRecord::Base.connection.foreign_keys(:incident_management_pending_alert_escalations).collect(&:name)
+ end
+end
diff --git a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb b/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
deleted file mode 100644
index 45bd5073d55..00000000000
--- a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleFillValidTimeForPagesDomainCertificates do
- let(:migration_class) { described_class::MIGRATION }
- let(:migration_name) { migration_class.to_s.demodulize }
-
- let(:domains_table) { table(:pages_domains) }
-
- let(:certificate) do
- File.read('spec/fixtures/passphrase_x509_certificate.crt')
- end
-
- before do
- domains_table.create!(domain: "domain1.example.com", verification_code: "123")
- domains_table.create!(domain: "domain2.example.com", verification_code: "123", certificate: '')
- domains_table.create!(domain: "domain3.example.com", verification_code: "123", certificate: certificate)
- domains_table.create!(domain: "domain4.example.com", verification_code: "123", certificate: certificate)
- end
-
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- first_id = domains_table.find_by_domain("domain3.example.com").id
- last_id = domains_table.find_by_domain("domain4.example.com").id
-
- expect(migration_name).to be_scheduled_delayed_migration(5.minutes, first_id, last_id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
-
- it 'sets certificate valid_not_before/not_after', :sidekiq_might_not_need_inline do
- perform_enqueued_jobs do
- migrate!
-
- domain = domains_table.find_by_domain("domain3.example.com")
- expect(domain.certificate_valid_not_before)
- .to eq(Time.parse("2018-03-23 14:02:08 UTC"))
- expect(domain.certificate_valid_not_after)
- .to eq(Time.parse("2019-03-23 14:02:08 UTC"))
- end
- end
-end
diff --git a/spec/migrations/schedule_pages_metadata_migration_spec.rb b/spec/migrations/schedule_pages_metadata_migration_spec.rb
deleted file mode 100644
index 96fbc1f9f51..00000000000
--- a/spec/migrations/schedule_pages_metadata_migration_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SchedulePagesMetadataMigration do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
-
- namespaces.create!(id: 11, name: 'gitlab', path: 'gitlab-org')
- projects.create!(id: 111, namespace_id: 11, name: 'Project 111')
- projects.create!(id: 114, namespace_id: 11, name: 'Project 114')
- end
-
- it 'schedules pages metadata migration' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, 111, 111)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 114, 114)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb b/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
deleted file mode 100644
index 3caab64a72d..00000000000
--- a/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SchedulePopulateMergeRequestAssigneesTable do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
- let(:merge_requests) { table(:merge_requests) }
-
- def create_merge_request(id)
- params = {
- id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}"
- }
-
- merge_requests.create!(params)
- end
-
- it 'correctly schedules background migrations' do
- create_merge_request(1)
- create_merge_request(2)
- create_merge_request(3)
-
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(8.minutes, 1, 2)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(16.minutes, 3, 3)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb b/spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb
new file mode 100644
index 00000000000..601935db8db
--- /dev/null
+++ b/spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SchedulePopulateStatusColumnOfSecurityScans do
+ before do
+ allow(Gitlab).to receive(:ee?).and_return(ee?)
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
+ context 'when the Gitlab instance is CE' do
+ let(:ee?) { false }
+
+ it 'does not run the migration' do
+ expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size }
+ end
+ end
+
+ context 'when the Gitlab instance is EE' do
+ let(:ee?) { true }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:pipelines) { table(:ci_pipelines) }
+ let(:builds) { table(:ci_builds) }
+ let(:security_scans) { table(:security_scans) }
+
+ let(:namespace) { namespaces.create!(name: "foo", path: "bar") }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
+ let(:ci_build) { builds.create!(commit_id: pipeline.id, retried: false, type: 'Ci::Build') }
+
+ let!(:security_scan_1) { security_scans.create!(build_id: ci_build.id, scan_type: 1) }
+ let!(:security_scan_2) { security_scans.create!(build_id: ci_build.id, scan_type: 2) }
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules the background jobs', :aggregate_failures do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to be(2)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, security_scan_1.id, security_scan_1.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, security_scan_2.id, security_scan_2.id)
+ end
+ end
+end
diff --git a/spec/migrations/schedule_sync_issuables_state_id_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_spec.rb
deleted file mode 100644
index 5a7105a0c84..00000000000
--- a/spec/migrations/schedule_sync_issuables_state_id_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleSyncIssuablesStateId do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
- let(:issues) { table(:issues) }
- let(:migration) { described_class.new }
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- shared_examples 'scheduling migrations' do
- before do
- Sidekiq::Worker.clear_all
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'correctly schedules issuable sync background migration' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(migration).to be_scheduled_delayed_migration(120.seconds, resource_1.id, resource_2.id)
- expect(migration).to be_scheduled_delayed_migration(240.seconds, resource_3.id, resource_4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
- end
-
- describe '#up' do
- context 'issues' do
- it 'migrates state column to integer', :sidekiq_might_not_need_inline do
- opened_issue = issues.create!(description: 'first', state: 'opened')
- closed_issue = issues.create!(description: 'second', state: 'closed')
- invalid_state_issue = issues.create!(description: 'fourth', state: 'not valid')
-
- migrate!
-
- expect(opened_issue.reload.state_id).to eq(Issue.available_states[:opened])
- expect(closed_issue.reload.state_id).to eq(Issue.available_states[:closed])
- expect(invalid_state_issue.reload.state_id).to be_nil
- end
-
- it_behaves_like 'scheduling migrations' do
- let(:migration) { described_class::ISSUES_MIGRATION }
- let!(:resource_1) { issues.create!(description: 'first', state: 'opened') }
- let!(:resource_2) { issues.create!(description: 'second', state: 'closed') }
- let!(:resource_3) { issues.create!(description: 'third', state: 'closed') }
- let!(:resource_4) { issues.create!(description: 'fourth', state: 'closed') }
- end
- end
-
- context 'merge requests' do
- it 'migrates state column to integer', :sidekiq_might_not_need_inline do
- opened_merge_request = merge_requests.create!(state: 'opened', target_project_id: project.id, target_branch: 'feature1', source_branch: 'master')
- closed_merge_request = merge_requests.create!(state: 'closed', target_project_id: project.id, target_branch: 'feature2', source_branch: 'master')
- merged_merge_request = merge_requests.create!(state: 'merged', target_project_id: project.id, target_branch: 'feature3', source_branch: 'master')
- locked_merge_request = merge_requests.create!(state: 'locked', target_project_id: project.id, target_branch: 'feature4', source_branch: 'master')
-
- migrate!
-
- expect(opened_merge_request.reload.state_id).to eq(MergeRequest.available_states[:opened])
- expect(closed_merge_request.reload.state_id).to eq(MergeRequest.available_states[:closed])
- expect(merged_merge_request.reload.state_id).to eq(MergeRequest.available_states[:merged])
- expect(locked_merge_request.reload.state_id).to eq(MergeRequest.available_states[:locked])
- end
-
- it_behaves_like 'scheduling migrations' do
- let(:migration) { described_class::MERGE_REQUESTS_MIGRATION }
- let!(:resource_1) { merge_requests.create!(state: 'opened', target_project_id: project.id, target_branch: 'feature1', source_branch: 'master') }
- let!(:resource_2) { merge_requests.create!(state: 'closed', target_project_id: project.id, target_branch: 'feature2', source_branch: 'master') }
- let!(:resource_3) { merge_requests.create!(state: 'merged', target_project_id: project.id, target_branch: 'feature3', source_branch: 'master') }
- let!(:resource_4) { merge_requests.create!(state: 'locked', target_project_id: project.id, target_branch: 'feature4', source_branch: 'master') }
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
deleted file mode 100644
index d8eaaa1df04..00000000000
--- a/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleSyncIssuablesStateIdWhereNil do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
- let(:issues) { table(:issues) }
- let(:migration) { described_class.new }
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- shared_examples 'scheduling migrations' do
- before do
- Sidekiq::Worker.clear_all
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'correctly schedules issuable sync background migration' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(migration).to be_scheduled_delayed_migration(120.seconds, resource_1.id, resource_3.id)
- expect(migration).to be_scheduled_delayed_migration(240.seconds, resource_5.id, resource_5.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
- end
-
- describe '#up' do
- context 'issues' do
- it_behaves_like 'scheduling migrations' do
- let(:migration) { described_class::ISSUES_MIGRATION }
- let!(:resource_1) { issues.create!(description: 'first', state: 'opened', state_id: nil) }
- let!(:resource_2) { issues.create!(description: 'second', state: 'closed', state_id: 2) }
- let!(:resource_3) { issues.create!(description: 'third', state: 'closed', state_id: nil) }
- let!(:resource_4) { issues.create!(description: 'fourth', state: 'closed', state_id: 2) }
- let!(:resource_5) { issues.create!(description: 'fifth', state: 'closed', state_id: nil) }
- end
- end
-
- context 'merge requests' do
- it_behaves_like 'scheduling migrations' do
- let(:migration) { described_class::MERGE_REQUESTS_MIGRATION }
- let!(:resource_1) { merge_requests.create!(state: 'opened', state_id: nil, target_project_id: project.id, target_branch: 'feature1', source_branch: 'master') }
- let!(:resource_2) { merge_requests.create!(state: 'closed', state_id: 2, target_project_id: project.id, target_branch: 'feature2', source_branch: 'master') }
- let!(:resource_3) { merge_requests.create!(state: 'merged', state_id: nil, target_project_id: project.id, target_branch: 'feature3', source_branch: 'master') }
- let!(:resource_4) { merge_requests.create!(state: 'locked', state_id: 3, target_project_id: project.id, target_branch: 'feature4', source_branch: 'master') }
- let!(:resource_5) { merge_requests.create!(state: 'locked', state_id: nil, target_project_id: project.id, target_branch: 'feature4', source_branch: 'master') }
- end
- end
- end
-end
diff --git a/spec/migrations/set_issue_id_for_all_versions_spec.rb b/spec/migrations/set_issue_id_for_all_versions_spec.rb
deleted file mode 100644
index 78bc4bbce1c..00000000000
--- a/spec/migrations/set_issue_id_for_all_versions_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SetIssueIdForAllVersions do
- let(:projects) { table(:projects) }
- let(:issues) { table(:issues) }
- let(:designs) { table(:design_management_designs) }
- let(:designs_versions) { table(:design_management_designs_versions) }
- let(:versions) { table(:design_management_versions) }
-
- before do
- @project = projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
-
- @issue_1 = issues.create!(description: 'first', project_id: @project.id)
- @issue_2 = issues.create!(description: 'second', project_id: @project.id)
-
- @design_1 = designs.create!(issue_id: @issue_1.id, filename: 'homepage-1.jpg', project_id: @project.id)
- @design_2 = designs.create!(issue_id: @issue_2.id, filename: 'homepage-2.jpg', project_id: @project.id)
-
- @version_1 = versions.create!(sha: 'foo')
- @version_2 = versions.create!(sha: 'bar')
-
- designs_versions.create!(version_id: @version_1.id, design_id: @design_1.id)
- designs_versions.create!(version_id: @version_2.id, design_id: @design_2.id)
- end
-
- it 'correctly sets issue_id' do
- expect(versions.where(issue_id: nil).count).to eq(2)
-
- migrate!
-
- expect(versions.where(issue_id: nil).count).to eq(0)
- expect(versions.find(@version_1.id).issue_id).to eq(@issue_1.id)
- expect(versions.find(@version_2.id).issue_id).to eq(@issue_2.id)
- end
-end
diff --git a/spec/migrations/sync_issuables_state_id_spec.rb b/spec/migrations/sync_issuables_state_id_spec.rb
deleted file mode 100644
index 67403893f74..00000000000
--- a/spec/migrations/sync_issuables_state_id_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SyncIssuablesStateId do
- let(:migration) { described_class.new }
-
- describe '#up' do
- let(:issues) { table(:issues) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
- # These state_ids should be the same defined on Issue/MergeRequest models
- let(:state_ids) { { opened: 1, closed: 2, merged: 3, locked: 4 } }
-
- it 'migrates state column to state_id as integer' do
- opened_issue = issues.create!(description: 'first', state: 'opened')
- closed_issue = issues.create!(description: 'second', state: 'closed')
- unknown_state_issue = issues.create!(description: 'second', state: 'unknown')
- opened_merge_request = merge_requests.create!(state: 'opened', target_project_id: project.id, target_branch: 'feature1', source_branch: 'master')
- closed_merge_request = merge_requests.create!(state: 'closed', target_project_id: project.id, target_branch: 'feature2', source_branch: 'master')
- merged_merge_request = merge_requests.create!(state: 'merged', target_project_id: project.id, target_branch: 'feature3', source_branch: 'master')
- locked_merge_request = merge_requests.create!(state: 'locked', target_project_id: project.id, target_branch: 'feature4', source_branch: 'master')
- unknown_state_merge_request = merge_requests.create!(state: 'unknown', target_project_id: project.id, target_branch: 'feature4', source_branch: 'master')
-
- migrate!
-
- expect(opened_issue.reload.state_id).to eq(state_ids[:opened])
- expect(closed_issue.reload.state_id).to eq(state_ids[:closed])
- expect(unknown_state_issue.reload.state_id).to eq(state_ids[:closed])
- expect(opened_merge_request.reload.state_id).to eq(state_ids[:opened])
- expect(closed_merge_request.reload.state_id).to eq(state_ids[:closed])
- expect(merged_merge_request.reload.state_id).to eq(state_ids[:merged])
- expect(locked_merge_request.reload.state_id).to eq(state_ids[:locked])
- expect(unknown_state_merge_request.reload.state_id).to eq(state_ids[:closed])
- end
- end
-end
diff --git a/spec/migrations/truncate_user_fullname_spec.rb b/spec/migrations/truncate_user_fullname_spec.rb
deleted file mode 100644
index dc5bef06cdc..00000000000
--- a/spec/migrations/truncate_user_fullname_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe TruncateUserFullname do
- let(:users) { table(:users) }
-
- let(:user_short) { create_user(name: 'abc', email: 'test_short@example.com') }
- let(:user_long) { create_user(name: 'a' * 200 + 'z', email: 'test_long@example.com') }
-
- def create_user(params)
- users.create!(params.merge(projects_limit: 0))
- end
-
- it 'truncates user full name to the first 128 characters' do
- expect { migrate! }.to change { user_long.reload.name }.to('a' * 128)
- end
-
- it 'does not truncate short names' do
- expect { migrate! }.not_to change { user_short.reload.name.length }
- end
-end
diff --git a/spec/migrations/update_minimum_password_length_spec.rb b/spec/migrations/update_minimum_password_length_spec.rb
deleted file mode 100644
index e40d090fd77..00000000000
--- a/spec/migrations/update_minimum_password_length_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateMinimumPasswordLength do
- let(:application_settings) { table(:application_settings) }
- let(:application_setting) do
- application_settings.create!(
- minimum_password_length: ApplicationSetting::DEFAULT_MINIMUM_PASSWORD_LENGTH
- )
- end
-
- before do
- stub_const('ApplicationSetting::DEFAULT_MINIMUM_PASSWORD_LENGTH', 10)
- allow(Devise).to receive(:password_length).and_return(12..20)
- end
-
- it 'correctly migrates minimum_password_length' do
- reversible_migration do |migration|
- migration.before -> {
- expect(application_setting.reload.minimum_password_length).to eq(10)
- }
-
- migration.after -> {
- expect(application_setting.reload.minimum_password_length).to eq(12)
- }
- end
- end
-end
diff --git a/spec/models/analytics/cycle_analytics/issue_stage_event_spec.rb b/spec/models/analytics/cycle_analytics/issue_stage_event_spec.rb
index 3e6d4ebd0a2..c0d5b9203b8 100644
--- a/spec/models/analytics/cycle_analytics/issue_stage_event_spec.rb
+++ b/spec/models/analytics/cycle_analytics/issue_stage_event_spec.rb
@@ -8,4 +8,6 @@ RSpec.describe Analytics::CycleAnalytics::IssueStageEvent do
it { is_expected.to validate_presence_of(:group_id) }
it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:start_event_timestamp) }
+
+ it_behaves_like 'StageEventModel'
end
diff --git a/spec/models/analytics/cycle_analytics/merge_request_stage_event_spec.rb b/spec/models/analytics/cycle_analytics/merge_request_stage_event_spec.rb
index 244c5c70286..82a7e66d62a 100644
--- a/spec/models/analytics/cycle_analytics/merge_request_stage_event_spec.rb
+++ b/spec/models/analytics/cycle_analytics/merge_request_stage_event_spec.rb
@@ -8,4 +8,6 @@ RSpec.describe Analytics::CycleAnalytics::MergeRequestStageEvent do
it { is_expected.to validate_presence_of(:group_id) }
it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:start_event_timestamp) }
+
+ it_behaves_like 'StageEventModel'
end
diff --git a/spec/models/application_record_spec.rb b/spec/models/application_record_spec.rb
index efb92ddaea0..f0212da3041 100644
--- a/spec/models/application_record_spec.rb
+++ b/spec/models/application_record_spec.rb
@@ -194,7 +194,7 @@ RSpec.describe ApplicationRecord do
end
context 'with database load balancing' do
- let(:session) { double(:session) }
+ let(:session) { Gitlab::Database::LoadBalancing::Session.new }
before do
allow(::Gitlab::Database::LoadBalancing::Session).to receive(:current).and_return(session)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 3e264867703..8ad83da61f3 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -77,6 +77,9 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:dependency_proxy_ttl_group_policy_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.not_to allow_value(nil).for(:dependency_proxy_ttl_group_policy_worker_capacity) }
+
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
it { is_expected.to validate_presence_of(:max_artifacts_size) }
@@ -946,6 +949,10 @@ RSpec.describe ApplicationSetting do
throttle_unauthenticated_files_api_period_in_seconds
throttle_authenticated_files_api_requests_per_period
throttle_authenticated_files_api_period_in_seconds
+ throttle_unauthenticated_deprecated_api_requests_per_period
+ throttle_unauthenticated_deprecated_api_period_in_seconds
+ throttle_authenticated_deprecated_api_requests_per_period
+ throttle_authenticated_deprecated_api_period_in_seconds
throttle_authenticated_git_lfs_requests_per_period
throttle_authenticated_git_lfs_period_in_seconds
]
diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb
index 4cfec6b20b7..ea002a7b174 100644
--- a/spec/models/bulk_import_spec.rb
+++ b/spec/models/bulk_import_spec.rb
@@ -21,4 +21,18 @@ RSpec.describe BulkImport, type: :model do
expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed')
end
end
+
+ describe '.min_gl_version_for_project' do
+ it { expect(described_class.min_gl_version_for_project_migration).to be_a(Gitlab::VersionInfo) }
+ it { expect(described_class.min_gl_version_for_project_migration.to_s).to eq('14.4.0') }
+ end
+
+ describe '#source_version_info' do
+ it 'returns source_version as Gitlab::VersionInfo' do
+ bulk_import = build(:bulk_import, source_version: '9.13.2')
+
+ expect(bulk_import.source_version_info).to be_a(Gitlab::VersionInfo)
+ expect(bulk_import.source_version_info.to_s).to eq(bulk_import.source_version)
+ end
+ end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index c1cbe61885f..278d7f4bc56 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -179,7 +179,7 @@ RSpec.describe BulkImports::Entity, type: :model do
entity = create(:bulk_import_entity, :group_entity)
entity.create_pipeline_trackers!
- expect(entity.trackers.count).to eq(BulkImports::Groups::Stage.pipelines.count)
+ expect(entity.trackers.count).to eq(BulkImports::Groups::Stage.new(entity.bulk_import).pipelines.count)
expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Groups::Pipelines::GroupPipeline.to_s)
end
end
@@ -189,7 +189,7 @@ RSpec.describe BulkImports::Entity, type: :model do
entity = create(:bulk_import_entity, :project_entity)
entity.create_pipeline_trackers!
- expect(entity.trackers.count).to eq(BulkImports::Projects::Stage.pipelines.count)
+ expect(entity.trackers.count).to eq(BulkImports::Projects::Stage.new(entity.bulk_import).pipelines.count)
expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Projects::Pipelines::ProjectPipeline.to_s)
end
end
@@ -207,4 +207,40 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
end
end
+
+ describe '#pluralized_name' do
+ context 'when entity is group' do
+ it 'returns groups' do
+ entity = build(:bulk_import_entity, :group_entity)
+
+ expect(entity.pluralized_name).to eq('groups')
+ end
+ end
+
+ context 'when entity is project' do
+ it 'returns projects' do
+ entity = build(:bulk_import_entity, :project_entity)
+
+ expect(entity.pluralized_name).to eq('projects')
+ end
+ end
+ end
+
+ describe '#export_relations_url_path' do
+ context 'when entity is group' do
+ it 'returns group export relations url' do
+ entity = build(:bulk_import_entity, :group_entity)
+
+ expect(entity.export_relations_url_path).to eq("/groups/#{entity.encoded_source_full_path}/export_relations")
+ end
+ end
+
+ context 'when entity is project' do
+ it 'returns project export relations url' do
+ entity = build(:bulk_import_entity, :project_entity)
+
+ expect(entity.export_relations_url_path).to eq("/projects/#{entity.encoded_source_full_path}/export_relations")
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/file_transfer/group_config_spec.rb b/spec/models/bulk_imports/file_transfer/group_config_spec.rb
index 1e566a7b042..8660114b719 100644
--- a/spec/models/bulk_imports/file_transfer/group_config_spec.rb
+++ b/spec/models/bulk_imports/file_transfer/group_config_spec.rb
@@ -23,10 +23,8 @@ RSpec.describe BulkImports::FileTransfer::GroupConfig do
end
describe '#export_path' do
- it 'returns correct export path' do
- expect(::Gitlab::ImportExport).to receive(:storage_path).and_return('storage_path')
-
- expect(subject.export_path).to eq("storage_path/#{exportable.full_path}/#{hex}")
+ it 'returns tmpdir location' do
+ expect(subject.export_path).to include(File.join(Dir.tmpdir, 'bulk_imports'))
end
end
diff --git a/spec/models/bulk_imports/file_transfer/project_config_spec.rb b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
index db037528ec1..3bd79333f0c 100644
--- a/spec/models/bulk_imports/file_transfer/project_config_spec.rb
+++ b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
@@ -23,10 +23,8 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
end
describe '#export_path' do
- it 'returns correct export path' do
- expect(::Gitlab::ImportExport).to receive(:storage_path).and_return('storage_path')
-
- expect(subject.export_path).to eq("storage_path/#{exportable.disk_path}/#{hex}")
+ it 'returns tmpdir location' do
+ expect(subject.export_path).to include(File.join(Dir.tmpdir, 'bulk_imports'))
end
end
@@ -51,4 +49,46 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
expect(subject.relation_excluded_keys('project')).to include('creator_id')
end
end
+
+ describe '#tree_relation?' do
+ context 'when it is a tree relation' do
+ it 'returns true' do
+ expect(subject.tree_relation?('labels')).to eq(true)
+ end
+ end
+
+ context 'when it is not a tree relation' do
+ it 'returns false' do
+ expect(subject.tree_relation?('example')).to eq(false)
+ end
+ end
+ end
+
+ describe '#file_relation?' do
+ context 'when it is a file relation' do
+ it 'returns true' do
+ expect(subject.file_relation?('uploads')).to eq(true)
+ end
+ end
+
+ context 'when it is not a file relation' do
+ it 'returns false' do
+ expect(subject.file_relation?('example')).to eq(false)
+ end
+ end
+ end
+
+ describe '#tree_relation_definition_for' do
+ it 'returns relation definition' do
+ expected = { service_desk_setting: { except: [:outgoing_name, :file_template_project_id], include: [] } }
+
+ expect(subject.tree_relation_definition_for('service_desk_setting')).to eq(expected)
+ end
+
+ context 'when relation is not tree relation' do
+ it 'returns' do
+ expect(subject.tree_relation_definition_for('example')).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index 7f0a7d4f1ae..a72b628e329 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -66,7 +66,8 @@ RSpec.describe BulkImports::Tracker, type: :model do
describe '#pipeline_class' do
it 'returns the pipeline class' do
- pipeline_class = BulkImports::Groups::Stage.pipelines.first[1]
+ bulk_import = create(:bulk_import)
+ pipeline_class = BulkImports::Groups::Stage.new(bulk_import).pipelines.first[1]
tracker = create(:bulk_import_tracker, pipeline_name: pipeline_class)
expect(tracker.pipeline_class).to eq(pipeline_class)
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 6dd3c40f228..8f1ae9c5f02 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -17,6 +17,8 @@ RSpec.describe Ci::Bridge do
{ trigger: { project: 'my/project', branch: 'master' } }
end
+ it { is_expected.to respond_to(:runner_features) }
+
it 'has many sourced pipelines' do
expect(bridge).to have_many(:sourced_pipelines)
end
@@ -76,7 +78,7 @@ RSpec.describe Ci::Bridge do
bridge.enqueue!
- expect(::Ci::CreateCrossProjectPipelineWorker.jobs.last['args']).to eq([bridge.id])
+ expect(::Ci::CreateDownstreamPipelineWorker.jobs.last['args']).to eq([bridge.id])
end
end
@@ -85,7 +87,7 @@ RSpec.describe Ci::Bridge do
bridge.enqueue_waiting_for_resource!
- expect(::Ci::CreateCrossProjectPipelineWorker.jobs.last['args']).to eq([bridge.id])
+ expect(::Ci::CreateDownstreamPipelineWorker.jobs.last['args']).to match_array([bridge.id])
end
it 'raises error when the status is failed' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 1e06d566c80..2ebf75a1d8a 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -29,11 +29,13 @@ RSpec.describe Ci::Build do
it { is_expected.to have_one(:deployment) }
it { is_expected.to have_one(:runner_session) }
it { is_expected.to have_one(:trace_metadata) }
+ it { is_expected.to have_many(:terraform_state_versions).dependent(:nullify).inverse_of(:build) }
it { is_expected.to validate_presence_of(:ref) }
it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) }
+ it { is_expected.to respond_to(:runner_features) }
it { is_expected.to delegate_method(:merge_request?).to(:pipeline) }
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
@@ -345,10 +347,10 @@ RSpec.describe Ci::Build do
end
describe '#stick_build_if_status_changed' do
- it 'sticks the build if the status changed', :db_load_balancing do
+ it 'sticks the build if the status changed' do
job = create(:ci_build, :pending)
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:stick)
+ expect(ApplicationRecord.sticking).to receive(:stick)
.with(:build, job.id)
job.update!(status: :running)
@@ -1288,7 +1290,7 @@ RSpec.describe Ci::Build do
end
end
- describe 'state transition as a deployable' do
+ shared_examples_for 'state transition as a deployable' do
subject { build.send(event) }
let!(:build) { create(:ci_build, :with_deployment, :start_review_app, project: project, pipeline: pipeline) }
@@ -1397,6 +1399,36 @@ RSpec.describe Ci::Build do
end
end
+ it_behaves_like 'state transition as a deployable' do
+ context 'when transits to running' do
+ let(:event) { :run! }
+
+ context 'when deployment is already running state' do
+ before do
+ build.deployment.success!
+ end
+
+ it 'does not change deployment status and tracks an error' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception).with(
+ instance_of(Deployment::StatusSyncError), deployment_id: deployment.id, build_id: build.id)
+
+ with_cross_database_modification_prevented do
+ expect { subject }.not_to change { deployment.reload.status }
+ end
+ end
+ end
+ end
+ end
+
+ context 'when update_deployment_after_transaction_commit feature flag is disabled' do
+ before do
+ stub_feature_flags(update_deployment_after_transaction_commit: false)
+ end
+
+ it_behaves_like 'state transition as a deployable'
+ end
+
describe '#on_stop' do
subject { build.on_stop }
@@ -3946,7 +3978,7 @@ RSpec.describe Ci::Build do
end
it 'can drop the build' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
expect { build.drop! }.not_to raise_error
@@ -5288,4 +5320,10 @@ RSpec.describe Ci::Build do
expect(build.reload.queuing_entry).not_to be_present
end
end
+
+ it 'does not generate cross DB queries when a record is created via FactoryBot' do
+ with_cross_database_modification_prevented do
+ create(:ci_build)
+ end
+ end
end
diff --git a/spec/models/ci/build_trace_metadata_spec.rb b/spec/models/ci/build_trace_metadata_spec.rb
index 5e4645c5dc4..120e4289da2 100644
--- a/spec/models/ci/build_trace_metadata_spec.rb
+++ b/spec/models/ci/build_trace_metadata_spec.rb
@@ -88,14 +88,16 @@ RSpec.describe Ci::BuildTraceMetadata do
describe '#track_archival!' do
let(:trace_artifact) { create(:ci_job_artifact) }
let(:metadata) { create(:ci_build_trace_metadata) }
+ let(:checksum) { SecureRandom.hex }
it 'stores the artifact id and timestamp' do
expect(metadata.trace_artifact_id).to be_nil
- metadata.track_archival!(trace_artifact.id)
+ metadata.track_archival!(trace_artifact.id, checksum)
metadata.reload
expect(metadata.trace_artifact_id).to eq(trace_artifact.id)
+ expect(metadata.checksum).to eq(checksum)
expect(metadata.archived_at).to be_like_time(Time.current)
end
end
@@ -131,4 +133,29 @@ RSpec.describe Ci::BuildTraceMetadata do
end
end
end
+
+ describe '#remote_checksum_valid?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:metadata) do
+ build(:ci_build_trace_metadata,
+ checksum: checksum,
+ remote_checksum: remote_checksum)
+ end
+
+ subject { metadata.remote_checksum_valid? }
+
+ where(:checksum, :remote_checksum, :result) do
+ nil | nil | false
+ nil | 'a' | false
+ 'a' | nil | false
+ 'a' | 'b' | false
+ 'b' | 'a' | false
+ 'a' | 'a' | true
+ end
+
+ with_them do
+ it { is_expected.to eq(result) }
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 1007d64438f..98b55ccb76b 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -35,6 +35,8 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:triggered_pipelines) }
it { is_expected.to have_many(:pipeline_artifacts) }
+ it { is_expected.to have_many(:package_build_infos).dependent(:nullify).inverse_of(:pipeline) }
+ it { is_expected.to have_many(:package_file_build_infos).dependent(:nullify).inverse_of(:pipeline) }
it { is_expected.to have_one(:chat_data) }
it { is_expected.to have_one(:source_pipeline) }
@@ -1219,32 +1221,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
%w(test success),
%w(deploy running)])
end
-
- context 'when commit status is retried' do
- let!(:old_commit_status) do
- create(:commit_status, pipeline: pipeline,
- stage: 'build',
- name: 'mac',
- stage_idx: 0,
- status: 'success')
- end
-
- context 'when FF ci_remove_update_retried_from_process_pipeline is disabled' do
- before do
- stub_feature_flags(ci_remove_update_retried_from_process_pipeline: false)
-
- Ci::ProcessPipelineService
- .new(pipeline)
- .execute
- end
-
- it 'ignores the previous state' do
- expect(statuses).to eq([%w(build success),
- %w(test success),
- %w(deploy running)])
- end
- end
- end
end
context 'when there is a stage with warnings' do
@@ -2906,121 +2882,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#execute_hooks' do
+ describe 'hooks trigerring' do
let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
- let!(:build_a) { create_build('a', 0) }
- let!(:build_b) { create_build('b', 0) }
-
- let!(:hook) do
- create(:project_hook, pipeline_events: enabled)
- end
-
- before do
- WebHookWorker.drain
- end
-
- context 'with pipeline hooks enabled' do
- let(:enabled) { true }
-
- before do
- stub_full_request(hook.url, method: :post)
- end
-
- context 'with multiple builds', :sidekiq_inline do
- context 'when build is queued' do
- before do
- build_a.reload.enqueue
- build_b.reload.enqueue
- end
-
- it 'receives a pending event once' do
- expect(WebMock).to have_requested_pipeline_hook('pending').once
- end
-
- it 'builds hook data once' do
- create(:pipelines_email_integration)
-
- expect(Gitlab::DataBuilder::Pipeline).to receive(:build).once.and_call_original
-
- pipeline.execute_hooks
- end
- end
-
- context 'when build is run' do
- before do
- build_a.reload.enqueue
- build_a.reload.run!
- build_b.reload.enqueue
- build_b.reload.run!
- end
-
- it 'receives a running event once' do
- expect(WebMock).to have_requested_pipeline_hook('running').once
- end
- end
-
- context 'when all builds succeed' do
- before do
- build_a.success
-
- # We have to reload build_b as this is in next stage and it gets triggered by PipelineProcessWorker
- build_b.reload.success
- end
-
- it 'receives a success event once' do
- expect(WebMock).to have_requested_pipeline_hook('success').once
- end
- end
+ %i[
+ enqueue
+ request_resource
+ prepare
+ run
+ skip
+ drop
+ succeed
+ cancel
+ block
+ delay
+ ].each do |action|
+ context "when pipeline action is #{action}" do
+ let(:pipeline_action) { action }
- context 'when stage one failed' do
- let!(:build_b) { create_build('b', 1) }
-
- before do
- build_a.drop
- end
+ it 'schedules a new PipelineHooksWorker job' do
+ expect(PipelineHooksWorker).to receive(:perform_async).with(pipeline.id)
- it 'receives a failed event once' do
- expect(WebMock).to have_requested_pipeline_hook('failed').once
- end
+ pipeline.reload.public_send(pipeline_action)
end
-
- def have_requested_pipeline_hook(status)
- have_requested(:post, stubbed_hostname(hook.url)).with do |req|
- json_body = Gitlab::Json.parse(req.body)
- json_body['object_attributes']['status'] == status &&
- json_body['builds'].length == 2
- end
- end
- end
- end
-
- context 'with pipeline hooks disabled' do
- let(:enabled) { false }
-
- before do
- build_a.enqueue
- build_b.enqueue
- end
-
- it 'did not execute pipeline_hook after touched' do
- expect(WebMock).not_to have_requested(:post, hook.url)
end
-
- it 'does not build hook data' do
- expect(Gitlab::DataBuilder::Pipeline).not_to receive(:build)
-
- pipeline.execute_hooks
- end
- end
-
- def create_build(name, stage_idx)
- create(:ci_build,
- :created,
- pipeline: pipeline,
- name: name,
- stage: "stage:#{stage_idx}",
- stage_idx: stage_idx)
end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 0a43f785598..ac1a8247aaa 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -147,13 +147,20 @@ RSpec.describe Ci::Processable do
end
it 'releases a resource when build finished' do
- expect(build.resource_group).to receive(:release_resource_from).with(build).and_call_original
+ expect(build.resource_group).to receive(:release_resource_from).with(build).and_return(true).and_call_original
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(build.resource_group_id)
build.enqueue_waiting_for_resource!
build.success!
end
+ it 're-checks the resource group even if the processable does not retain a resource' do
+ expect(build.resource_group).to receive(:release_resource_from).with(build).and_return(false).and_call_original
+ expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(build.resource_group_id)
+
+ build.success!
+ end
+
context 'when build has prerequisites' do
before do
allow(build).to receive(:any_unmet_prerequisites?) { true }
diff --git a/spec/models/ci/resource_group_spec.rb b/spec/models/ci/resource_group_spec.rb
index 50a786419f2..aae16157fbf 100644
--- a/spec/models/ci/resource_group_spec.rb
+++ b/spec/models/ci/resource_group_spec.rb
@@ -85,4 +85,61 @@ RSpec.describe Ci::ResourceGroup do
end
end
end
+
+ describe '#upcoming_processables' do
+ subject { resource_group.upcoming_processables }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline_1) { create(:ci_pipeline, project: project) }
+ let_it_be(:pipeline_2) { create(:ci_pipeline, project: project) }
+
+ let!(:resource_group) { create(:ci_resource_group, process_mode: process_mode, project: project) }
+
+ Ci::HasStatus::STATUSES_ENUM.keys.each do |status|
+ let!("build_1_#{status}") { create(:ci_build, pipeline: pipeline_1, status: status, resource_group: resource_group) }
+ let!("build_2_#{status}") { create(:ci_build, pipeline: pipeline_2, status: status, resource_group: resource_group) }
+ end
+
+ context 'when process mode is unordered' do
+ let(:process_mode) { :unordered }
+
+ it 'returns correct jobs in an indeterministic order' do
+ expect(subject).to contain_exactly(build_1_waiting_for_resource, build_2_waiting_for_resource)
+ end
+ end
+
+ context 'when process mode is oldest_first' do
+ let(:process_mode) { :oldest_first }
+
+ it 'returns correct jobs in a specific order' do
+ expect(subject[0]).to eq(build_1_waiting_for_resource)
+ expect(subject[1..2]).to contain_exactly(build_1_created, build_1_scheduled)
+ expect(subject[3]).to eq(build_2_waiting_for_resource)
+ expect(subject[4..5]).to contain_exactly(build_2_created, build_2_scheduled)
+ end
+ end
+
+ context 'when process mode is newest_first' do
+ let(:process_mode) { :newest_first }
+
+ it 'returns correct jobs in a specific order' do
+ expect(subject[0]).to eq(build_2_waiting_for_resource)
+ expect(subject[1..2]).to contain_exactly(build_2_created, build_2_scheduled)
+ expect(subject[3]).to eq(build_1_waiting_for_resource)
+ expect(subject[4..5]).to contain_exactly(build_1_created, build_1_scheduled)
+ end
+ end
+
+ context 'when process mode is unknown' do
+ let(:process_mode) { :unordered }
+
+ before do
+ resource_group.update_column(:process_mode, 3)
+ end
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 31e854c852e..826332268c5 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -5,6 +5,20 @@ require 'spec_helper'
RSpec.describe Ci::Runner do
it_behaves_like 'having unique enum values'
+ describe 'groups association' do
+ # Due to other assoctions such as projects this whole spec is allowed to
+ # generate cross-database queries. So we have this temporary spec to
+ # validate that at least groups association does not generate cross-DB
+ # queries.
+ it 'does not create a cross-database query' do
+ runner = create(:ci_runner, :group)
+
+ with_cross_joins_prevented do
+ expect(runner.groups.count).to eq(1)
+ end
+ end
+ end
+
describe 'validation' do
it { is_expected.to validate_presence_of(:access_level) }
it { is_expected.to validate_presence_of(:runner_type) }
@@ -257,7 +271,7 @@ RSpec.describe Ci::Runner do
expect(subject).to be_truthy
expect(runner).to be_project_type
- expect(runner.projects).to eq([project])
+ expect(runner.runner_projects.pluck(:project_id)).to match_array([project.id])
expect(runner.only_for?(project)).to be_truthy
end
end
@@ -383,10 +397,7 @@ RSpec.describe Ci::Runner do
it 'sticks the runner to the primary and calls the original method' do
runner = create(:ci_runner)
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
- .and_return(true)
-
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:stick)
+ expect(ApplicationRecord.sticking).to receive(:stick)
.with(:runner, runner.id)
expect(Gitlab::Workhorse).to receive(:set_key_and_notify)
@@ -724,7 +735,7 @@ RSpec.describe Ci::Runner do
context 'with invalid runner' do
before do
- runner.projects = []
+ runner.runner_projects.delete_all
end
it 'still updates redis cache and database' do
diff --git a/spec/models/clusters/agents/group_authorization_spec.rb b/spec/models/clusters/agents/group_authorization_spec.rb
index 2a99fb26e3f..baeb8f5464e 100644
--- a/spec/models/clusters/agents/group_authorization_spec.rb
+++ b/spec/models/clusters/agents/group_authorization_spec.rb
@@ -7,4 +7,10 @@ RSpec.describe Clusters::Agents::GroupAuthorization do
it { is_expected.to belong_to(:group).class_name('::Group').required }
it { expect(described_class).to validate_jsonb_schema(['config']) }
+
+ describe '#config_project' do
+ let(:record) { create(:agent_group_authorization) }
+
+ it { expect(record.config_project).to eq(record.agent.project) }
+ end
end
diff --git a/spec/models/clusters/agents/implicit_authorization_spec.rb b/spec/models/clusters/agents/implicit_authorization_spec.rb
index 69aa55a350e..2d6c3ddb426 100644
--- a/spec/models/clusters/agents/implicit_authorization_spec.rb
+++ b/spec/models/clusters/agents/implicit_authorization_spec.rb
@@ -9,6 +9,6 @@ RSpec.describe Clusters::Agents::ImplicitAuthorization do
it { expect(subject.agent).to eq(agent) }
it { expect(subject.agent_id).to eq(agent.id) }
- it { expect(subject.project).to eq(agent.project) }
+ it { expect(subject.config_project).to eq(agent.project) }
it { expect(subject.config).to be_nil }
end
diff --git a/spec/models/clusters/agents/project_authorization_spec.rb b/spec/models/clusters/agents/project_authorization_spec.rb
index 134c70739ac..9ba259356c7 100644
--- a/spec/models/clusters/agents/project_authorization_spec.rb
+++ b/spec/models/clusters/agents/project_authorization_spec.rb
@@ -7,4 +7,10 @@ RSpec.describe Clusters::Agents::ProjectAuthorization do
it { is_expected.to belong_to(:project).class_name('Project').required }
it { expect(described_class).to validate_jsonb_schema(['config']) }
+
+ describe '#config_project' do
+ let(:record) { create(:agent_project_authorization) }
+
+ it { expect(record.config_project).to eq(record.agent.project) }
+ end
end
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
index 43e2eab3b9d..788430d53d3 100644
--- a/spec/models/clusters/applications/runner_spec.rb
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -96,8 +96,9 @@ RSpec.describe Clusters::Applications::Runner do
it 'creates a project runner' do
subject
+ runner_projects = Project.where(id: runner.runner_projects.pluck(:project_id))
expect(runner).to be_project_type
- expect(runner.projects).to eq [project]
+ expect(runner_projects).to match_array [project]
end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 63fe6923630..ac0ae17f8f7 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -799,7 +799,7 @@ eos
describe '#work_in_progress?' do
[
'squash! ', 'fixup! ', 'wip: ', 'WIP: ', '[WIP] ',
- 'draft: ', 'Draft - ', '[Draft] ', '(draft) ', 'Draft: '
+ 'draft: ', '[Draft] ', '(draft) ', 'Draft: '
].each do |wip_prefix|
it "detects the '#{wip_prefix}' prefix" do
commit.message = "#{wip_prefix}#{commit.message}"
@@ -814,22 +814,18 @@ eos
expect(commit).to be_work_in_progress
end
- it "detects WIP for a commit just saying 'draft'" do
+ it "does not detect WIP for a commit just saying 'draft'" do
commit.message = "draft"
- expect(commit).to be_work_in_progress
- end
-
- it "doesn't detect WIP for a commit that begins with 'FIXUP! '" do
- commit.message = "FIXUP! #{commit.message}"
-
expect(commit).not_to be_work_in_progress
end
- it "doesn't detect WIP for words starting with WIP" do
- commit.message = "Wipout #{commit.message}"
+ ["FIXUP!", "Draft - ", "Wipeout"].each do |draft_prefix|
+ it "doesn't detect '#{draft_prefix}' at the start of the title as a draft" do
+ commit.message = "#{draft_prefix} #{commit.message}"
- expect(commit).not_to be_work_in_progress
+ expect(commit).not_to be_work_in_progress
+ end
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 7134a387e65..20afddd8470 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -123,6 +123,16 @@ RSpec.describe CommitStatus do
end
end
+ describe '.scheduled_at_before' do
+ let!(:never_scheduled) { create(:commit_status) }
+ let!(:stale_scheduled) { create(:commit_status, scheduled_at: 1.day.ago) }
+ let!(:fresh_scheduled) { create(:commit_status, scheduled_at: 1.minute.ago) }
+
+ subject { CommitStatus.scheduled_at_before(1.hour.ago) }
+
+ it { is_expected.to contain_exactly(stale_scheduled) }
+ end
+
describe '#processed' do
subject { commit_status.processed }
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index 209ee1264d5..172986c142c 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe BulkInsertSafe do
t.binary :sha_value, null: false, limit: 20
t.jsonb :jsonb_value, null: false
t.belongs_to :bulk_insert_parent_item, foreign_key: true, null: true
+ t.timestamps null: true
t.index :name, unique: true
end
@@ -179,29 +180,26 @@ RSpec.describe BulkInsertSafe do
end
context 'with returns option set' do
+ let(:items) { bulk_insert_item_class.valid_list(1) }
+
+ subject(:bulk_insert) { bulk_insert_item_class.bulk_insert!(items, returns: returns) }
+
context 'when is set to :ids' do
- it 'return an array with the primary key values for all inserted records' do
- items = bulk_insert_item_class.valid_list(1)
+ let(:returns) { :ids }
- expect(bulk_insert_item_class.bulk_insert!(items, returns: :ids)).to contain_exactly(a_kind_of(Integer))
- end
+ it { is_expected.to contain_exactly(a_kind_of(Integer)) }
end
context 'when is set to nil' do
- it 'returns an empty array' do
- items = bulk_insert_item_class.valid_list(1)
+ let(:returns) { nil }
- expect(bulk_insert_item_class.bulk_insert!(items, returns: nil)).to eq([])
- end
+ it { is_expected.to eq([]) }
end
- context 'when is set to anything else' do
- it 'raises an error' do
- items = bulk_insert_item_class.valid_list(1)
+ context 'when is set to a list of attributes' do
+ let(:returns) { [:id, :sha_value] }
- expect { bulk_insert_item_class.bulk_insert!([items], returns: [:id, :name]) }
- .to raise_error(ArgumentError, "returns needs to be :ids or nil")
- end
+ it { is_expected.to contain_exactly([a_kind_of(Integer), '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12']) }
end
end
end
@@ -228,10 +226,20 @@ RSpec.describe BulkInsertSafe do
end
describe '.bulk_upsert!' do
+ subject(:bulk_upsert) { bulk_insert_item_class.bulk_upsert!([new_object], unique_by: %w[name]) }
+
it 'updates existing object' do
- bulk_insert_item_class.bulk_upsert!([new_object], unique_by: %w[name])
+ expect { bulk_upsert }.to change { existing_object.reload.secret_value }.to('new value')
+ end
- expect(existing_object.reload.secret_value).to eq('new value')
+ context 'when the `created_at` attribute is provided' do
+ before do
+ new_object.created_at = 10.days.from_now
+ end
+
+ it 'does not change the existing `created_at` value' do
+ expect { bulk_upsert }.not_to change { existing_object.reload.created_at }
+ end
end
end
end
@@ -250,7 +258,7 @@ RSpec.describe BulkInsertSafe do
it 'successfully inserts an item' do
expect(ActiveRecord::InsertAll).to receive(:new)
.with(
- bulk_insert_items_with_composite_pk_class, [new_object.as_json], on_duplicate: :raise, returning: false, unique_by: %w[id name]
+ bulk_insert_items_with_composite_pk_class.insert_all_proxy_class, [new_object.as_json], on_duplicate: :raise, returning: false, unique_by: %w[id name]
).and_call_original
expect { bulk_insert_items_with_composite_pk_class.bulk_insert!([new_object]) }.to(
diff --git a/spec/models/concerns/checksummable_spec.rb b/spec/models/concerns/checksummable_spec.rb
index 3a0387333e8..93a65605b50 100644
--- a/spec/models/concerns/checksummable_spec.rb
+++ b/spec/models/concerns/checksummable_spec.rb
@@ -13,11 +13,19 @@ RSpec.describe Checksummable do
end
end
- describe ".hexdigest" do
+ describe ".sha256_hexdigest" do
it 'returns the SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
- expect(subject.hexdigest(__FILE__)).to eq(expected)
+ expect(subject.sha256_hexdigest(__FILE__)).to eq(expected)
+ end
+ end
+
+ describe ".md5_hexdigest" do
+ it 'returns the MD5 sum of the file' do
+ expected = Digest::MD5.file(__FILE__).hexdigest
+
+ expect(subject.md5_hexdigest(__FILE__)).to eq(expected)
end
end
end
diff --git a/spec/models/concerns/ci/has_status_spec.rb b/spec/models/concerns/ci/has_status_spec.rb
index 0709a050056..9dfc7d84f89 100644
--- a/spec/models/concerns/ci/has_status_spec.rb
+++ b/spec/models/concerns/ci/has_status_spec.rb
@@ -363,6 +363,18 @@ RSpec.describe Ci::HasStatus do
it_behaves_like 'not containing the job', status
end
end
+
+ describe '.waiting_for_resource_or_upcoming' do
+ subject { CommitStatus.waiting_for_resource_or_upcoming }
+
+ %i[created scheduled waiting_for_resource].each do |status|
+ it_behaves_like 'containing the job', status
+ end
+
+ %i[running failed success canceled].each do |status|
+ it_behaves_like 'not containing the job', status
+ end
+ end
end
describe '::DEFAULT_STATUS' do
diff --git a/spec/models/concerns/vulnerability_finding_helpers_spec.rb b/spec/models/concerns/vulnerability_finding_helpers_spec.rb
new file mode 100644
index 00000000000..023ecccb520
--- /dev/null
+++ b/spec/models/concerns/vulnerability_finding_helpers_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VulnerabilityFindingHelpers do
+ let(:cls) do
+ Class.new do
+ include VulnerabilityFindingHelpers
+
+ attr_accessor :report_type
+
+ def initialize(report_type)
+ @report_type = report_type
+ end
+ end
+ end
+
+ describe '#requires_manual_resolution?' do
+ it 'returns false if the finding does not require manual resolution' do
+ expect(cls.new('sast').requires_manual_resolution?).to eq(false)
+ end
+
+ it 'returns true when the finding requires manual resolution' do
+ expect(cls.new('secret_detection').requires_manual_resolution?).to eq(true)
+ end
+ end
+end
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index b19554dd67e..298d5db3ab9 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe CustomerRelations::Contact, type: :model do
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:organization).optional }
+ it { is_expected.to have_and_belong_to_many(:issues) }
end
describe 'validations' do
diff --git a/spec/models/dependency_proxy/blob_spec.rb b/spec/models/dependency_proxy/blob_spec.rb
index 3797f6184fe..3c54d3126a8 100644
--- a/spec/models/dependency_proxy/blob_spec.rb
+++ b/spec/models/dependency_proxy/blob_spec.rb
@@ -2,17 +2,16 @@
require 'spec_helper'
RSpec.describe DependencyProxy::Blob, type: :model do
+ it_behaves_like 'ttl_expirable'
+
describe 'relationships' do
it { is_expected.to belong_to(:group) }
end
- it_behaves_like 'having unique enum values'
-
describe 'validations' do
it { is_expected.to validate_presence_of(:group) }
it { is_expected.to validate_presence_of(:file) }
it { is_expected.to validate_presence_of(:file_name) }
- it { is_expected.to validate_presence_of(:status) }
end
describe '.total_size' do
diff --git a/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb b/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb
index 2906ea7b774..9f6358e1286 100644
--- a/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb
+++ b/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb
@@ -20,4 +20,13 @@ RSpec.describe DependencyProxy::ImageTtlGroupPolicy, type: :model do
it { is_expected.to validate_numericality_of(:ttl).allow_nil.is_greater_than(0) }
end
end
+
+ describe '.enabled' do
+ it 'returns policies that are enabled' do
+ enabled_policy = create(:image_ttl_group_policy)
+ create(:image_ttl_group_policy, :disabled)
+
+ expect(described_class.enabled).to contain_exactly(enabled_policy)
+ end
+ end
end
diff --git a/spec/models/dependency_proxy/manifest_spec.rb b/spec/models/dependency_proxy/manifest_spec.rb
index 2a085b3613b..e7f0889345a 100644
--- a/spec/models/dependency_proxy/manifest_spec.rb
+++ b/spec/models/dependency_proxy/manifest_spec.rb
@@ -2,18 +2,17 @@
require 'spec_helper'
RSpec.describe DependencyProxy::Manifest, type: :model do
+ it_behaves_like 'ttl_expirable'
+
describe 'relationships' do
it { is_expected.to belong_to(:group) }
end
- it_behaves_like 'having unique enum values'
-
describe 'validations' do
it { is_expected.to validate_presence_of(:group) }
it { is_expected.to validate_presence_of(:file) }
it { is_expected.to validate_presence_of(:file_name) }
it { is_expected.to validate_presence_of(:digest) }
- it { is_expected.to validate_presence_of(:status) }
end
describe 'file is being stored' do
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index a0e5e9cbfe4..f9a05fbb06f 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -456,18 +456,6 @@ RSpec.describe Deployment do
end
end
- describe 'with_deployable' do
- subject { described_class.with_deployable }
-
- it 'retrieves deployments with deployable builds' do
- with_deployable = create(:deployment)
- create(:deployment, deployable: nil)
- create(:deployment, deployable_type: 'CommitStatus', deployable_id: non_existing_record_id)
-
- is_expected.to contain_exactly(with_deployable)
- end
- end
-
describe 'visible' do
subject { described_class.visible }
@@ -613,6 +601,26 @@ RSpec.describe Deployment do
end
end
+ describe '.builds' do
+ let!(:deployment1) { create(:deployment) }
+ let!(:deployment2) { create(:deployment) }
+ let!(:deployment3) { create(:deployment) }
+
+ subject { described_class.builds }
+
+ it 'retrieves builds for the deployments' do
+ is_expected.to match_array(
+ [deployment1.deployable, deployment2.deployable, deployment3.deployable])
+ end
+
+ it 'does not fetch the null deployable_ids' do
+ deployment3.update!(deployable_id: nil, deployable_type: nil)
+
+ is_expected.to match_array(
+ [deployment1.deployable, deployment2.deployable])
+ end
+ end
+
describe '#previous_deployment' do
using RSpec::Parameterized::TableSyntax
@@ -757,7 +765,7 @@ RSpec.describe Deployment do
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
expect(Deployments::HooksWorker).to receive(:perform_async)
- deploy.update_status('success')
+ expect(deploy.update_status('success')).to eq(true)
end
it 'updates finished_at when transitioning to a finished status' do
@@ -767,6 +775,139 @@ RSpec.describe Deployment do
expect(deploy.read_attribute(:finished_at)).to eq(Time.current)
end
end
+
+ it 'tracks an exception if an invalid status transition is detected' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(instance_of(described_class::StatusUpdateError), deployment_id: deploy.id)
+
+ expect(deploy.update_status('running')).to eq(false)
+ end
+
+ it 'tracks an exception if an invalid argument' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(instance_of(described_class::StatusUpdateError), deployment_id: deploy.id)
+
+ expect(deploy.update_status('created')).to eq(false)
+ end
+ end
+
+ describe '#sync_status_with' do
+ subject { deployment.sync_status_with(ci_build) }
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:deployment) { create(:deployment, project: project, status: deployment_status) }
+ let(:ci_build) { create(:ci_build, project: project, status: build_status) }
+
+ shared_examples_for 'synchronizing deployment' do
+ it 'changes deployment status' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ is_expected.to eq(true)
+
+ expect(deployment.status).to eq(build_status.to_s)
+ expect(deployment.errors).to be_empty
+ end
+ end
+
+ shared_examples_for 'gracefully handling error' do
+ it 'tracks an exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(described_class::StatusSyncError),
+ deployment_id: deployment.id,
+ build_id: ci_build.id)
+
+ is_expected.to eq(false)
+
+ expect(deployment.status).to eq(deployment_status.to_s)
+ expect(deployment.errors.full_messages).to include(error_message)
+ end
+ end
+
+ shared_examples_for 'ignoring build' do
+ it 'does not change deployment status' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ is_expected.to eq(false)
+
+ expect(deployment.status).to eq(deployment_status.to_s)
+ expect(deployment.errors).to be_empty
+ end
+ end
+
+ context 'with created deployment' do
+ let(:deployment_status) { :created }
+
+ context 'with running build' do
+ let(:build_status) { :running }
+
+ it_behaves_like 'synchronizing deployment'
+ end
+
+ context 'with finished build' do
+ let(:build_status) { :success }
+
+ it_behaves_like 'synchronizing deployment'
+ end
+
+ context 'with unrelated build' do
+ let(:build_status) { :waiting_for_resource }
+
+ it_behaves_like 'ignoring build'
+ end
+ end
+
+ context 'with running deployment' do
+ let(:deployment_status) { :running }
+
+ context 'with running build' do
+ let(:build_status) { :running }
+
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %Q{Status cannot transition via \"run\"} }
+ end
+ end
+
+ context 'with finished build' do
+ let(:build_status) { :success }
+
+ it_behaves_like 'synchronizing deployment'
+ end
+
+ context 'with unrelated build' do
+ let(:build_status) { :waiting_for_resource }
+
+ it_behaves_like 'ignoring build'
+ end
+ end
+
+ context 'with finished deployment' do
+ let(:deployment_status) { :success }
+
+ context 'with running build' do
+ let(:build_status) { :running }
+
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %Q{Status cannot transition via \"run\"} }
+ end
+ end
+
+ context 'with finished build' do
+ let(:build_status) { :success }
+
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %Q{Status cannot transition via \"succeed\"} }
+ end
+ end
+
+ context 'with unrelated build' do
+ let(:build_status) { :waiting_for_resource }
+
+ it_behaves_like 'ignoring build'
+ end
+ end
end
describe '#valid_sha' do
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index e3e9d1f7a71..08c639957d3 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -801,38 +801,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(query_count).to eq(0)
end
end
-
- context 'when the feature for disable_join is disabled' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:ci_build) { create(:ci_build, project: project, pipeline: pipeline) }
-
- before do
- stub_feature_flags(environment_last_visible_pipeline_disable_joins: false)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build)
- end
-
- context 'for preload' do
- it 'executes the original association instead of override' do
- environment.reload
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_deployable: []])
-
- expect_any_instance_of(Deployment).not_to receive(:deployable)
-
- query_count = ActiveRecord::QueryRecorder.new do
- expect(subject.id).to eq(ci_build.id)
- end.count
-
- expect(query_count).to eq(0)
- end
- end
-
- context 'for direct call' do
- it 'executes the original association instead of override' do
- expect_any_instance_of(Deployment).not_to receive(:deployable)
- expect(subject.id).to eq(ci_build.id)
- end
- end
- end
end
describe '#last_visible_pipeline' do
@@ -963,40 +931,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(query_count).to eq(0)
end
end
-
- context 'when the feature for disable_join is disabled' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:ci_build) { create(:ci_build, project: project, pipeline: pipeline) }
-
- before do
- stub_feature_flags(environment_last_visible_pipeline_disable_joins: false)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build)
- end
-
- subject { environment.last_visible_pipeline }
-
- context 'for preload' do
- it 'executes the original association instead of override' do
- environment.reload
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_pipeline: []])
-
- expect_any_instance_of(Ci::Build).not_to receive(:pipeline)
-
- query_count = ActiveRecord::QueryRecorder.new do
- expect(subject.id).to eq(pipeline.id)
- end.count
-
- expect(query_count).to eq(0)
- end
- end
-
- context 'for direct call' do
- it 'executes the original association instead of override' do
- expect_any_instance_of(Ci::Build).not_to receive(:pipeline)
- expect(subject.id).to eq(pipeline.id)
- end
- end
- end
end
describe '#upcoming_deployment' do
diff --git a/spec/models/error_tracking/error_spec.rb b/spec/models/error_tracking/error_spec.rb
index 5543392b624..9b8a81c6372 100644
--- a/spec/models/error_tracking/error_spec.rb
+++ b/spec/models/error_tracking/error_spec.rb
@@ -81,6 +81,13 @@ RSpec.describe ErrorTracking::Error, type: :model do
end
describe '#to_sentry_detailed_error' do
- it { expect(error.to_sentry_detailed_error).to be_kind_of(Gitlab::ErrorTracking::DetailedError) }
+ let_it_be(:event) { create(:error_tracking_error_event, error: error) }
+
+ subject { error.to_sentry_detailed_error }
+
+ it { is_expected.to be_kind_of(Gitlab::ErrorTracking::DetailedError) }
+ it { expect(subject.integrated).to be_truthy }
+ it { expect(subject.first_release_version).to eq('db853d7') }
+ it { expect(subject.last_release_version).to eq('db853d7') }
end
end
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index 29255e53fcf..d17541b4a6c 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -79,6 +79,46 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
+ describe 'Callbacks' do
+ describe 'after_save :create_client_key!' do
+ subject { build(:project_error_tracking_setting, :integrated, project: project) }
+
+ context 'no client key yet' do
+ it 'creates a new client key' do
+ expect { subject.save! }.to change { ErrorTracking::ClientKey.count }.by(1)
+ end
+
+ context 'sentry backend' do
+ before do
+ subject.integrated = false
+ end
+
+ it 'does not create a new client key' do
+ expect { subject.save! }.not_to change { ErrorTracking::ClientKey.count }
+ end
+ end
+
+ context 'feature disabled' do
+ before do
+ subject.enabled = false
+ end
+
+ it 'does not create a new client key' do
+ expect { subject.save! }.not_to change { ErrorTracking::ClientKey.count }
+ end
+ end
+ end
+
+ context 'client key already exists' do
+ let!(:client_key) { create(:error_tracking_client_key, project: project) }
+
+ it 'does not create a new client key' do
+ expect { subject.save! }.not_to change { ErrorTracking::ClientKey.count }
+ end
+ end
+ end
+ end
+
describe '.extract_sentry_external_url' do
subject { described_class.extract_sentry_external_url(sentry_url) }
@@ -494,4 +534,10 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
it { expect(subject.sentry_enabled).to eq(sentry_enabled) }
end
end
+
+ describe '#gitlab_dsn' do
+ let!(:client_key) { create(:error_tracking_client_key, project: project) }
+
+ it { expect(subject.gitlab_dsn).to eq(client_key.sentry_dsn) }
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index e8aebe35302..e88abc21ef2 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Group do
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::GroupDistribution').dependent(:destroy) }
it { is_expected.to have_many(:daily_build_group_report_results).class_name('Ci::DailyBuildGroupReportResult') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout').with_foreign_key(:group_id) }
+ it { is_expected.to have_many(:bulk_import_exports).class_name('BulkImports::Export') }
describe '#members & #requesters' do
let(:requester) { create(:user) }
@@ -2369,7 +2370,7 @@ RSpec.describe Group do
let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
- subject { group.update_shared_runners_setting!('disabled_and_unoverridable') }
+ subject { group.update_shared_runners_setting!(Namespace::SR_DISABLED_AND_UNOVERRIDABLE) }
it 'disables shared Runners for all descendant groups and projects' do
expect { subject_and_reload(group, sub_group, sub_group_2, project, project_2) }
@@ -2395,7 +2396,7 @@ RSpec.describe Group do
end
context 'disabled_with_override' do
- subject { group.update_shared_runners_setting!('disabled_with_override') }
+ subject { group.update_shared_runners_setting!(Namespace::SR_DISABLED_WITH_OVERRIDE) }
context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
@@ -2607,17 +2608,29 @@ RSpec.describe Group do
end
describe '.ids_with_disabled_email' do
- let!(:parent_1) { create(:group, emails_disabled: true) }
- let!(:child_1) { create(:group, parent: parent_1) }
+ let_it_be(:parent_1) { create(:group, emails_disabled: true) }
+ let_it_be(:child_1) { create(:group, parent: parent_1) }
- let!(:parent_2) { create(:group, emails_disabled: false) }
- let!(:child_2) { create(:group, parent: parent_2) }
+ let_it_be(:parent_2) { create(:group, emails_disabled: false) }
+ let_it_be(:child_2) { create(:group, parent: parent_2) }
- let!(:other_group) { create(:group, emails_disabled: false) }
+ let_it_be(:other_group) { create(:group, emails_disabled: false) }
- subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
+ shared_examples 'returns namespaces with disabled email' do
+ subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
- it { is_expected.to eq(Set.new([child_1.id])) }
+ it { is_expected.to eq(Set.new([child_1.id])) }
+ end
+
+ it_behaves_like 'returns namespaces with disabled email'
+
+ context 'when feature flag :linear_group_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_group_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'returns namespaces with disabled email'
+ end
end
describe '.timelogs' do
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index 551e6e7572c..cc0b69e3526 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -31,6 +31,23 @@ RSpec.describe InstanceConfiguration do
expect(result.size).to eq(InstanceConfiguration::SSH_ALGORITHMS.size)
end
+ it 'includes all algorithms' do
+ stub_pub_file(pub_file)
+
+ result = subject.settings[:ssh_algorithms_hashes]
+
+ expect(result.map { |a| a[:name] }).to match_array(%w(DSA ECDSA ED25519 RSA))
+ end
+
+ it 'does not include disabled algorithm' do
+ Gitlab::CurrentSettings.current_application_settings.update!(dsa_key_restriction: ApplicationSetting::FORBIDDEN_KEY_VALUE)
+ stub_pub_file(pub_file)
+
+ result = subject.settings[:ssh_algorithms_hashes]
+
+ expect(result.map { |a| a[:name] }).to match_array(%w(ECDSA ED25519 RSA))
+ end
+
def pub_file(exist: true)
path = exist ? 'spec/fixtures/ssh_host_example_key.pub' : 'spec/fixtures/ssh_host_example_key.pub.random'
@@ -175,6 +192,9 @@ RSpec.describe InstanceConfiguration do
throttle_authenticated_packages_api_enabled: true,
throttle_authenticated_packages_api_requests_per_period: 1011,
throttle_authenticated_packages_api_period_in_seconds: 1012,
+ throttle_authenticated_git_lfs_enabled: true,
+ throttle_authenticated_git_lfs_requests_per_period: 1022,
+ throttle_authenticated_git_lfs_period_in_seconds: 1023,
issues_create_limit: 1013,
notes_create_limit: 1014,
project_export_limit: 1015,
@@ -196,6 +216,7 @@ RSpec.describe InstanceConfiguration do
expect(rate_limits[:protected_paths]).to eq({ enabled: true, requests_per_period: 1007, period_in_seconds: 1008 })
expect(rate_limits[:unauthenticated_packages_api]).to eq({ enabled: false, requests_per_period: 1009, period_in_seconds: 1010 })
expect(rate_limits[:authenticated_packages_api]).to eq({ enabled: true, requests_per_period: 1011, period_in_seconds: 1012 })
+ expect(rate_limits[:authenticated_git_lfs_api]).to eq({ enabled: true, requests_per_period: 1022, period_in_seconds: 1023 })
expect(rate_limits[:issue_creation]).to eq({ enabled: true, requests_per_period: 1013, period_in_seconds: 60 })
expect(rate_limits[:note_creation]).to eq({ enabled: true, requests_per_period: 1014, period_in_seconds: 60 })
expect(rate_limits[:project_export]).to eq({ enabled: true, requests_per_period: 1015, period_in_seconds: 60 })
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 8a06f7fac99..1a83d948fcf 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe Integration do
it { is_expected.to have_one(:service_hook).inverse_of(:integration).with_foreign_key(:service_id) }
it { is_expected.to have_one(:issue_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::IssueTrackerData') }
it { is_expected.to have_one(:jira_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::JiraTrackerData') }
- it { is_expected.to have_one(:open_project_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::OpenProjectTrackerData') }
end
describe 'validations' do
diff --git a/spec/models/integrations/open_project_spec.rb b/spec/models/integrations/open_project_spec.rb
deleted file mode 100644
index 789911acae8..00000000000
--- a/spec/models/integrations/open_project_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Integrations::OpenProject do
- describe 'Validations' do
- context 'when integration is active' do
- before do
- subject.active = true
- end
-
- it { is_expected.to validate_presence_of(:url) }
- it { is_expected.to validate_presence_of(:token) }
- it { is_expected.to validate_presence_of(:project_identifier_code) }
-
- it_behaves_like 'issue tracker integration URL attribute', :url
- it_behaves_like 'issue tracker integration URL attribute', :api_url
- end
-
- context 'when integration is inactive' do
- before do
- subject.active = false
- end
-
- it { is_expected.not_to validate_presence_of(:url) }
- it { is_expected.not_to validate_presence_of(:token) }
- it { is_expected.not_to validate_presence_of(:project_identifier_code) }
- end
- end
-end
diff --git a/spec/models/integrations/open_project_tracker_data_spec.rb b/spec/models/integrations/open_project_tracker_data_spec.rb
deleted file mode 100644
index 41c913f978c..00000000000
--- a/spec/models/integrations/open_project_tracker_data_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Integrations::OpenProjectTrackerData do
- describe 'associations' do
- it { is_expected.to belong_to(:integration) }
- end
-
- describe 'closed_status_id' do
- it 'returns the set value' do
- expect(build(:open_project_tracker_data).closed_status_id).to eq('15')
- end
-
- it 'returns the default value if not set' do
- expect(build(:open_project_tracker_data, closed_status_id: nil).closed_status_id).to eq('13')
- end
- end
-end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 1747972e8ae..4319407706e 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe Issue do
it { is_expected.to have_many(:issue_email_participants) }
it { is_expected.to have_many(:timelogs).autosave(true) }
it { is_expected.to have_one(:incident_management_issuable_escalation_status) }
+ it { is_expected.to have_and_belong_to_many(:customer_relations_contacts) }
describe 'versions.most_recent' do
it 'returns the most recent version' do
@@ -222,17 +223,15 @@ RSpec.describe Issue do
end
end
- describe '#order_by_position_and_priority' do
+ describe '#order_by_relative_position' do
let(:project) { reusable_project }
- let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
- let(:p2) { create(:label, title: 'P2', project: project, priority: 2) }
- let!(:issue1) { create(:labeled_issue, project: project, labels: [p1]) }
- let!(:issue2) { create(:labeled_issue, project: project, labels: [p2]) }
+ let!(:issue1) { create(:issue, project: project) }
+ let!(:issue2) { create(:issue, project: project) }
let!(:issue3) { create(:issue, project: project, relative_position: -200) }
let!(:issue4) { create(:issue, project: project, relative_position: -100) }
it 'returns ordered list' do
- expect(project.issues.order_by_position_and_priority)
+ expect(project.issues.order_by_relative_position)
.to match [issue3, issue4, issue1, issue2]
end
end
@@ -1505,6 +1504,26 @@ RSpec.describe Issue do
end
end
+ describe '#supports_move_and_clone?' do
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_refind(:issue) { create(:incident, project: project) }
+
+ where(:issue_type, :supports_move_and_clone) do
+ :issue | true
+ :incident | true
+ end
+
+ with_them do
+ before do
+ issue.update!(issue_type: issue_type)
+ end
+
+ it do
+ expect(issue.supports_move_and_clone?).to eq(supports_move_and_clone)
+ end
+ end
+ end
+
describe '#email_participants_emails' do
let_it_be(:issue) { create(:issue) }
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
deleted file mode 100644
index db2f8b4d2d3..00000000000
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe LooseForeignKeys::DeletedRecord do
- let_it_be(:deleted_record_1) { described_class.create!(created_at: 1.day.ago, deleted_table_name: 'projects', deleted_table_primary_key_value: 5) }
- let_it_be(:deleted_record_2) { described_class.create!(created_at: 3.days.ago, deleted_table_name: 'projects', deleted_table_primary_key_value: 1) }
- let_it_be(:deleted_record_3) { described_class.create!(created_at: 5.days.ago, deleted_table_name: 'projects', deleted_table_primary_key_value: 3) }
- let_it_be(:deleted_record_4) { described_class.create!(created_at: 10.days.ago, deleted_table_name: 'projects', deleted_table_primary_key_value: 1) } # duplicate
-
- # skip created_at because it gets truncated after insert
- def map_attributes(records)
- records.pluck(:deleted_table_name, :deleted_table_primary_key_value)
- end
-
- describe 'partitioning strategy' do
- it 'has retain_non_empty_partitions option' do
- expect(described_class.partitioning_strategy.retain_non_empty_partitions).to eq(true)
- end
- end
-
- describe '.load_batch' do
- it 'loads records and orders them by creation date' do
- records = described_class.load_batch(4)
-
- expect(map_attributes(records)).to eq([['projects', 1], ['projects', 3], ['projects', 1], ['projects', 5]])
- end
-
- it 'supports configurable batch size' do
- records = described_class.load_batch(2)
-
- expect(map_attributes(records)).to eq([['projects', 1], ['projects', 3]])
- end
- end
-
- describe '.delete_records' do
- it 'deletes exactly one record' do
- described_class.delete_records([deleted_record_2])
-
- expect(described_class.count).to eq(3)
- expect(described_class.find_by(created_at: deleted_record_2.created_at)).to eq(nil)
- end
-
- it 'deletes two records' do
- described_class.delete_records([deleted_record_2, deleted_record_4])
-
- expect(described_class.count).to eq(2)
- end
-
- it 'deletes all records' do
- described_class.delete_records([deleted_record_1, deleted_record_2, deleted_record_3, deleted_record_4])
-
- expect(described_class.count).to eq(0)
- end
- end
-end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 3f7f69ff34e..afe78adc547 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -7,11 +7,11 @@ RSpec.describe Member do
using RSpec::Parameterized::TableSyntax
- describe "Associations" do
+ describe 'Associations' do
it { is_expected.to belong_to(:user) }
end
- describe "Validation" do
+ describe 'Validation' do
subject { described_class.new(access_level: Member::GUEST) }
it { is_expected.to validate_presence_of(:user) }
@@ -28,7 +28,7 @@ RSpec.describe Member do
subject { build(:project_member) }
end
- context "when an invite email is provided" do
+ context 'when an invite email is provided' do
let_it_be(:project) { create(:project) }
let(:member) { build(:project_member, source: project, invite_email: "user@example.com", user: nil) }
@@ -37,34 +37,36 @@ RSpec.describe Member do
expect(member).to be_valid
end
- it "requires a valid invite email" do
+ it 'requires a valid invite email' do
member.invite_email = "nope"
expect(member).not_to be_valid
end
- it "requires a unique invite email scoped to this source" do
+ it 'requires a unique invite email scoped to this source' do
create(:project_member, source: member.source, invite_email: member.invite_email)
expect(member).not_to be_valid
end
end
- context "when an invite email is not provided" do
+ context 'when an invite email is not provided' do
let(:member) { build(:project_member) }
- it "requires a user" do
+ it 'requires a user' do
member.user = nil
expect(member).not_to be_valid
end
- it "is valid otherwise" do
+ it 'is valid otherwise' do
expect(member).to be_valid
end
end
context 'with admin signup restrictions' do
+ let(:expected_message) { _('is not allowed for this group. Check with your administrator.') }
+
context 'when allowed domains for signup is enabled' do
before do
stub_application_setting(domain_allowlist: ['example.com'])
@@ -74,7 +76,7 @@ RSpec.describe Member do
member = build(:group_member, :invited, invite_email: 'info@gitlab.com')
expect(member).not_to be_valid
- expect(member.errors.messages[:user].first).to eq(_('domain is not authorized for sign-up.'))
+ expect(member.errors.messages[:user].first).to eq(expected_message)
end
end
@@ -88,7 +90,7 @@ RSpec.describe Member do
member = build(:group_member, :invited, invite_email: 'denylist@example.org')
expect(member).not_to be_valid
- expect(member.errors.messages[:user].first).to eq(_('is not from an allowed domain.'))
+ expect(member.errors.messages[:user].first).to eq(expected_message)
end
end
@@ -102,18 +104,18 @@ RSpec.describe Member do
member = build(:group_member, :invited, invite_email: 'info@gitlab.com')
expect(member).not_to be_valid
- expect(member.errors.messages[:user].first).to eq(_('is not allowed. Try again with a different email address, or contact your GitLab admin.'))
+ expect(member.errors.messages[:user].first).to eq(expected_message)
end
end
end
- context "when a child member inherits its access level" do
+ context 'when a child member inherits its access level' do
let(:user) { create(:user) }
let(:member) { create(:group_member, :developer, user: user) }
let(:child_group) { create(:group, parent: member.group) }
let(:child_member) { build(:group_member, group: child_group, user: user) }
- it "requires a higher level" do
+ it 'requires a higher level' do
child_member.access_level = GroupMember::REPORTER
child_member.validate
@@ -123,7 +125,7 @@ RSpec.describe Member do
# Membership in a subgroup confers certain access rights, such as being
# able to merge or push code to protected branches.
- it "is valid with an equal level" do
+ it 'is valid with an equal level' do
child_member.access_level = GroupMember::DEVELOPER
child_member.validate
@@ -131,7 +133,7 @@ RSpec.describe Member do
expect(child_member).to be_valid
end
- it "is valid with a higher level" do
+ it 'is valid with a higher level' do
child_member.access_level = GroupMember::MAINTAINER
child_member.validate
@@ -167,6 +169,8 @@ RSpec.describe Member do
describe 'Scopes & finders' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:group) { create(:group) }
+ let_it_be(:blocked_pending_approval_user) { create(:user, :blocked_pending_approval ) }
+ let_it_be(:blocked_pending_approval_project_member) { create(:project_member, :invited, :developer, project: project, invite_email: blocked_pending_approval_user.email) }
before_all do
@owner_user = create(:user).tap { |u| group.add_owner(u) }
@@ -536,9 +540,28 @@ RSpec.describe Member do
it { is_expected.to eq [example_member] }
end
end
+
+ describe '.with_invited_user_state' do
+ subject(:with_invited_user_state) { described_class.with_invited_user_state }
+
+ it { is_expected.to include @owner }
+ it { is_expected.to include @maintainer }
+ it { is_expected.to include @invited_member }
+ it { is_expected.to include @accepted_invite_member }
+ it { is_expected.to include @requested_member }
+ it { is_expected.to include @accepted_request_member }
+
+ context 'with invited pending members' do
+ it 'includes invited user state' do
+ invited_pending_members = with_invited_user_state.select { |m| m.invited_user_state.present? }
+ expect(invited_pending_members.count).to eq 1
+ expect(invited_pending_members).to include blocked_pending_approval_project_member
+ end
+ end
+ end
end
- describe "Delegate methods" do
+ describe 'Delegate methods' do
it { is_expected.to respond_to(:user_name) }
it { is_expected.to respond_to(:user_email) }
end
@@ -608,29 +631,29 @@ RSpec.describe Member do
end
end
- describe "#accept_invite!" do
+ describe '#accept_invite!' do
let!(:member) { create(:project_member, invite_email: "user@example.com", user: nil) }
let(:user) { create(:user) }
- it "resets the invite token" do
+ it 'resets the invite token' do
member.accept_invite!(user)
expect(member.invite_token).to be_nil
end
- it "sets the invite accepted timestamp" do
+ it 'sets the invite accepted timestamp' do
member.accept_invite!(user)
expect(member.invite_accepted_at).not_to be_nil
end
- it "sets the user" do
+ it 'sets the user' do
member.accept_invite!(user)
expect(member.user).to eq(user)
end
- it "calls #after_accept_invite" do
+ it 'calls #after_accept_invite' do
expect(member).to receive(:after_accept_invite)
member.accept_invite!(user)
@@ -657,26 +680,26 @@ RSpec.describe Member do
end
end
- describe "#decline_invite!" do
+ describe '#decline_invite!' do
let!(:member) { create(:project_member, invite_email: "user@example.com", user: nil) }
- it "destroys the member" do
+ it 'destroys the member' do
member.decline_invite!
expect(member).to be_destroyed
end
- it "calls #after_decline_invite" do
+ it 'calls #after_decline_invite' do
expect(member).to receive(:after_decline_invite)
member.decline_invite!
end
end
- describe "#generate_invite_token" do
+ describe '#generate_invite_token' do
let!(:member) { create(:project_member, invite_email: "user@example.com", user: nil) }
- it "sets the invite token" do
+ it 'sets the invite token' do
expect { member.generate_invite_token }.to change { member.invite_token }
end
end
@@ -684,12 +707,12 @@ RSpec.describe Member do
describe 'generate invite token on create' do
let!(:member) { build(:project_member, invite_email: "user@example.com") }
- it "sets the invite token" do
+ it 'sets the invite token' do
expect { member.save! }.to change { member.invite_token }.to(kind_of(String))
end
context 'when invite was already accepted' do
- it "does not set invite token" do
+ it 'does not set invite token' do
member.invite_accepted_at = 1.day.ago
expect { member.save! }.not_to change { member.invite_token }.from(nil)
@@ -744,7 +767,7 @@ RSpec.describe Member do
end
end
- describe "#invite_to_unknown_user?" do
+ describe '#invite_to_unknown_user?' do
subject { member.invite_to_unknown_user? }
let(:member) { create(:project_member, invite_email: "user@example.com", invite_token: '1234', user: user) }
@@ -762,7 +785,7 @@ RSpec.describe Member do
end
end
- describe "destroying a record", :delete do
+ describe 'destroying a record', :delete, :sidekiq_inline do
it "refreshes user's authorized projects" do
project = create(:project, :private)
user = create(:user)
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 1704d5adb96..ca846cf9e8e 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -244,11 +244,16 @@ RSpec.describe ProjectMember do
project.add_user(user, Gitlab::Access::GUEST)
end
- it 'changes access level' do
+ it 'changes access level', :sidekiq_inline do
expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
end
- it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker to recalculate authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).to receive(:perform_async).with(project.id, user.id)
+
+ action
+ end
+
it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
end
@@ -298,7 +303,7 @@ RSpec.describe ProjectMember do
project.add_user(user, Gitlab::Access::GUEST)
end
- it 'changes access level' do
+ it 'changes access level', :sidekiq_inline do
expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 06ca88644b7..d871453e062 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1348,7 +1348,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
[
'WIP:', 'WIP: ', '[WIP]', '[WIP] ', ' [WIP] WIP: [WIP] WIP:',
- 'draft:', 'Draft: ', '[Draft]', '[DRAFT] ', 'Draft - '
+ 'draft:', 'Draft: ', '[Draft]', '[DRAFT] '
].each do |wip_prefix|
it "detects the '#{wip_prefix}' prefix" do
subject.title = "#{wip_prefix}#{subject.title}"
@@ -1357,16 +1357,27 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ [
+ "WIP ", "(WIP)",
+ "draft", "Draft", "Draft -", "draft - ", "Draft ", "draft "
+ ].each do |draft_prefix|
+ it "doesn't detect '#{draft_prefix}' at the start of the title as a draft" do
+ subject.title = "#{draft_prefix}#{subject.title}"
+
+ expect(subject.work_in_progress?).to eq false
+ end
+ end
+
it "detects merge request title just saying 'wip'" do
subject.title = "wip"
expect(subject.work_in_progress?).to eq true
end
- it "detects merge request title just saying 'draft'" do
+ it "does not detect merge request title just saying 'draft'" do
subject.title = "draft"
- expect(subject.work_in_progress?).to eq true
+ expect(subject.work_in_progress?).to eq false
end
it 'does not detect WIP in the middle of the title' do
@@ -1428,7 +1439,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
[
'WIP:', 'WIP: ', '[WIP]', '[WIP] ', '[WIP] WIP: [WIP] WIP:',
- 'draft:', 'Draft: ', '[Draft]', '[DRAFT] ', 'Draft - '
+ 'draft:', 'Draft: ', '[Draft]', '[DRAFT] '
].each do |wip_prefix|
it "removes the '#{wip_prefix}' prefix" do
wipless_title = subject.title
@@ -3078,7 +3089,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe '#mergeable_state?' do
+ shared_examples 'for mergeable_state' do
subject { create(:merge_request) }
it 'checks if merge request can be merged' do
@@ -3119,33 +3130,61 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'when failed' do
- context 'when #mergeable_ci_state? is false' do
- before do
- allow(subject).to receive(:mergeable_ci_state?) { false }
- end
+ shared_examples 'failed skip_ci_check' do
+ context 'when #mergeable_ci_state? is false' do
+ before do
+ allow(subject).to receive(:mergeable_ci_state?) { false }
+ end
- it 'returns false' do
- expect(subject.mergeable_state?).to be_falsey
+ it 'returns false' do
+ expect(subject.mergeable_state?).to be_falsey
+ end
+
+ it 'returns true when skipping ci check' do
+ expect(subject.mergeable_state?(skip_ci_check: true)).to be(true)
+ end
end
- it 'returns true when skipping ci check' do
- expect(subject.mergeable_state?(skip_ci_check: true)).to be(true)
+ context 'when #mergeable_discussions_state? is false' do
+ before do
+ allow(subject).to receive(:mergeable_discussions_state?) { false }
+ end
+
+ it 'returns false' do
+ expect(subject.mergeable_state?).to be_falsey
+ end
+
+ it 'returns true when skipping discussions check' do
+ expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true)
+ end
end
end
- context 'when #mergeable_discussions_state? is false' do
+ context 'when improved_mergeability_checks is on' do
+ it_behaves_like 'failed skip_ci_check'
+ end
+
+ context 'when improved_mergeability_checks is off' do
before do
- allow(subject).to receive(:mergeable_discussions_state?) { false }
+ stub_feature_flags(improved_mergeability_checks: false)
end
- it 'returns false' do
- expect(subject.mergeable_state?).to be_falsey
- end
+ it_behaves_like 'failed skip_ci_check'
+ end
+ end
+ end
- it 'returns true when skipping discussions check' do
- expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true)
- end
+ describe '#mergeable_state?' do
+ context 'when merge state caching is on' do
+ it_behaves_like 'for mergeable_state'
+ end
+
+ context 'when merge state caching is off' do
+ before do
+ stub_feature_flags(mergeability_caching: false)
end
+
+ it_behaves_like 'for mergeable_state'
end
end
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index 2cd66f42458..d7b0ee888c0 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Namespace::TraversalHierarchy, type: :model do
- let_it_be(:root, reload: true) { create(:group, :with_hierarchy) }
+ let!(:root) { create(:group, :with_hierarchy) }
describe '.for_namespace' do
let(:hierarchy) { described_class.for_namespace(group) }
@@ -62,7 +62,12 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
it { expect(hierarchy.incorrect_traversal_ids).to be_empty }
- it_behaves_like 'hierarchy with traversal_ids'
+ it_behaves_like 'hierarchy with traversal_ids' do
+ before do
+ subject
+ end
+ end
+
it_behaves_like 'locked row' do
let(:recorded_queries) { ActiveRecord::QueryRecorder.new }
let(:row) { root }
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index c1cc8fc3e88..429727c2360 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe NamespaceSetting, type: :model do
it { is_expected.to belong_to(:namespace) }
end
+ it { is_expected.to define_enum_for(:jobs_to_be_done).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other]).with_suffix }
+
describe "validations" do
describe "#default_branch_name_content" do
let_it_be(:group) { create(:group) }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 51a26d82daa..c201d89947e 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe Namespace do
include GitHelpers
include ReloadHelpers
+ let_it_be(:group_sti_name) { Group.sti_name }
+ let_it_be(:project_sti_name) { Namespaces::ProjectNamespace.sti_name }
+ let_it_be(:user_sti_name) { Namespaces::UserNamespace.sti_name }
+
let!(:namespace) { create(:namespace, :with_namespace_settings) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:repository_storage) { 'default' }
@@ -38,20 +42,22 @@ RSpec.describe Namespace do
context 'validating the parent of a namespace' do
using RSpec::Parameterized::TableSyntax
+ # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
where(:parent_type, :child_type, :error) do
- nil | 'User' | nil
- nil | 'Group' | nil
- nil | 'Project' | 'must be set for a project namespace'
- 'Project' | 'User' | 'project namespace cannot be the parent of another namespace'
- 'Project' | 'Group' | 'project namespace cannot be the parent of another namespace'
- 'Project' | 'Project' | 'project namespace cannot be the parent of another namespace'
- 'Group' | 'User' | 'cannot not be used for user namespace'
- 'Group' | 'Group' | nil
- 'Group' | 'Project' | nil
- 'User' | 'User' | 'cannot not be used for user namespace'
- 'User' | 'Group' | 'user namespace cannot be the parent of another namespace'
- 'User' | 'Project' | nil
- end
+ nil | ref(:user_sti_name) | nil
+ nil | ref(:group_sti_name) | nil
+ nil | ref(:project_sti_name) | 'must be set for a project namespace'
+ ref(:project_sti_name) | ref(:user_sti_name) | 'project namespace cannot be the parent of another namespace'
+ ref(:project_sti_name) | ref(:group_sti_name) | 'project namespace cannot be the parent of another namespace'
+ ref(:project_sti_name) | ref(:project_sti_name) | 'project namespace cannot be the parent of another namespace'
+ ref(:group_sti_name) | ref(:user_sti_name) | 'cannot not be used for user namespace'
+ ref(:group_sti_name) | ref(:group_sti_name) | nil
+ ref(:group_sti_name) | ref(:project_sti_name) | nil
+ ref(:user_sti_name) | ref(:user_sti_name) | 'cannot not be used for user namespace'
+ ref(:user_sti_name) | ref(:group_sti_name) | 'user namespace cannot be the parent of another namespace'
+ ref(:user_sti_name) | ref(:project_sti_name) | nil
+ end
+ # rubocop:enable Lint/BinaryOperatorWithIdenticalOperands
with_them do
it 'validates namespace parent' do
@@ -127,39 +133,77 @@ RSpec.describe Namespace do
end
context 'top-level group' do
- let(:group) { build(:group, path: 'tree') }
+ let(:group) { build(:namespace, path: 'tree') }
it { expect(group).to be_valid }
end
end
- describe '1 char path length' do
- it 'does not allow to create one' do
- namespace = build(:namespace, path: 'j')
+ describe 'path validator' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:parent) { create(:namespace) }
- expect(namespace).not_to be_valid
- expect(namespace.errors[:path].first).to eq('is too short (minimum is 2 characters)')
+ # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
+ where(:namespace_type, :path, :valid) do
+ ref(:project_sti_name) | 'j' | true
+ ref(:project_sti_name) | 'path.' | true
+ ref(:project_sti_name) | 'blob' | false
+ ref(:group_sti_name) | 'j' | false
+ ref(:group_sti_name) | 'path.' | false
+ ref(:group_sti_name) | 'blob' | true
+ ref(:user_sti_name) | 'j' | false
+ ref(:user_sti_name) | 'path.' | false
+ ref(:user_sti_name) | 'blob' | true
end
+ # rubocop:enable Lint/BinaryOperatorWithIdenticalOperands
- it 'does not allow to update one' do
- namespace = create(:namespace)
- namespace.update(path: 'j')
+ with_them do
+ it 'validates namespace path' do
+ parent_namespace = parent if namespace_type == Namespaces::ProjectNamespace.sti_name
+ namespace = build(:namespace, type: namespace_type, parent: parent_namespace, path: path)
- expect(namespace).not_to be_valid
- expect(namespace.errors[:path].first).to eq('is too short (minimum is 2 characters)')
+ expect(namespace.valid?).to be(valid)
+ end
end
+ end
- it 'allows updating other attributes for existing record' do
- namespace = build(:namespace, path: 'j', owner: create(:user))
- namespace.save(validate: false)
- namespace.reload
+ describe '1 char path length' do
+ context 'with user namespace' do
+ let(:namespace) { build(:namespace) }
- expect(namespace.path).to eq('j')
+ it 'does not allow to update path to single char' do
+ namespace.save!
- namespace.update(name: 'something new')
+ namespace.path = 'j'
- expect(namespace).to be_valid
- expect(namespace.name).to eq('something new')
+ expect(namespace).not_to be_valid
+ expect(namespace.errors[:path].first).to eq('is too short (minimum is 2 characters)')
+ end
+
+ it 'allows updating other attributes for existing record' do
+ namespace.save!
+ namespace.update_attribute(:path, 'j')
+ namespace.reload
+
+ expect(namespace.path).to eq('j')
+
+ namespace.update(name: 'something new')
+
+ expect(namespace).to be_valid
+ expect(namespace.name).to eq('something new')
+ end
+ end
+
+ context 'with project namespace' do
+ let(:namespace) { build(:project_namespace) }
+
+ it 'allows to update path to single char' do
+ namespace = create(:project_namespace)
+ namespace.update(path: 'j')
+
+ expect(namespace).to be_valid
+ end
end
end
end
@@ -170,55 +214,53 @@ RSpec.describe Namespace do
let(:namespace) { Namespace.find(create(:namespace, type: namespace_type, parent: parent).id) }
context 'creating a Group' do
- let(:namespace_type) { 'Group' }
+ let(:namespace_type) { group_sti_name }
- it 'is valid' do
+ it 'is the correct type of namespace' do
expect(namespace).to be_a(Group)
expect(namespace.kind).to eq('group')
- expect(namespace.group?).to be_truthy
+ expect(namespace.group_namespace?).to be_truthy
end
end
context 'creating a ProjectNamespace' do
- let(:namespace_type) { 'Project' }
+ let(:namespace_type) { project_sti_name }
let(:parent) { create(:group) }
- it 'is valid' do
+ it 'is the correct type of namespace' do
expect(Namespace.find(namespace.id)).to be_a(Namespaces::ProjectNamespace)
expect(namespace.kind).to eq('project')
- expect(namespace.project?).to be_truthy
+ expect(namespace.project_namespace?).to be_truthy
end
end
context 'creating a UserNamespace' do
- let(:namespace_type) { 'User' }
+ let(:namespace_type) { user_sti_name }
- it 'is valid' do
- # TODO: We create a normal Namespace until
- # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68894 is ready
- expect(Namespace.find(namespace.id)).to be_a(Namespace)
+ it 'is the correct type of namespace' do
+ expect(Namespace.find(namespace.id)).to be_a(Namespaces::UserNamespace)
expect(namespace.kind).to eq('user')
- expect(namespace.user?).to be_truthy
+ expect(namespace.user_namespace?).to be_truthy
end
end
context 'creating a default Namespace' do
let(:namespace_type) { nil }
- it 'is valid' do
+ it 'is the correct type of namespace' do
expect(Namespace.find(namespace.id)).to be_a(Namespace)
expect(namespace.kind).to eq('user')
- expect(namespace.user?).to be_truthy
+ expect(namespace.user_namespace?).to be_truthy
end
end
context 'creating an unknown Namespace type' do
let(:namespace_type) { 'One' }
- it 'defaults to a Namespace' do
+ it 'creates a default Namespace' do
expect(Namespace.find(namespace.id)).to be_a(Namespace)
expect(namespace.kind).to eq('user')
- expect(namespace.user?).to be_truthy
+ expect(namespace.user_namespace?).to be_truthy
end
end
end
@@ -257,6 +299,15 @@ RSpec.describe Namespace do
expect(described_class.sorted_by_similarity_and_parent_id_desc('Namespace')).to eq([namespace2, namespace1, namespace2sub, namespace1sub, namespace])
end
end
+
+ describe '.without_project_namespaces' do
+ let_it_be(:user_namespace) { create(:user_namespace) }
+ let_it_be(:project_namespace) { create(:project_namespace) }
+
+ it 'excludes project namespaces' do
+ expect(described_class.without_project_namespaces).to match_array([namespace, namespace1, namespace2, namespace1sub, namespace2sub, user_namespace, project_namespace.parent])
+ end
+ end
end
describe 'delegate' do
@@ -428,9 +479,9 @@ RSpec.describe Namespace do
end
describe '.search' do
- let_it_be(:first_group) { build(:group, name: 'my first namespace', path: 'old-path').tap(&:save!) }
- let_it_be(:parent_group) { build(:group, name: 'my parent namespace', path: 'parent-path').tap(&:save!) }
- let_it_be(:second_group) { build(:group, name: 'my second namespace', path: 'new-path', parent: parent_group).tap(&:save!) }
+ let_it_be(:first_group) { create(:group, name: 'my first namespace', path: 'old-path') }
+ let_it_be(:parent_group) { create(:group, name: 'my parent namespace', path: 'parent-path') }
+ let_it_be(:second_group) { create(:group, name: 'my second namespace', path: 'new-path', parent: parent_group) }
let_it_be(:project_with_same_path) { create(:project, id: second_group.id, path: first_group.path) }
it 'returns namespaces with a matching name' do
@@ -1558,8 +1609,8 @@ RSpec.describe Namespace do
end
end
- describe '#user?' do
- subject { namespace.user? }
+ describe '#user_namespace?' do
+ subject { namespace.user_namespace? }
context 'when type is a user' do
let(:user) { create(:user) }
@@ -1745,10 +1796,10 @@ RSpec.describe Namespace do
using RSpec::Parameterized::TableSyntax
where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :shared_runners_setting) do
- true | true | 'enabled'
- true | false | 'enabled'
- false | true | 'disabled_with_override'
- false | false | 'disabled_and_unoverridable'
+ true | true | Namespace::SR_ENABLED
+ true | false | Namespace::SR_ENABLED
+ false | true | Namespace::SR_DISABLED_WITH_OVERRIDE
+ false | false | Namespace::SR_DISABLED_AND_UNOVERRIDABLE
end
with_them do
@@ -1764,15 +1815,15 @@ RSpec.describe Namespace do
using RSpec::Parameterized::TableSyntax
where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :other_setting, :result) do
- true | true | 'enabled' | false
- true | true | 'disabled_with_override' | true
- true | true | 'disabled_and_unoverridable' | true
- false | true | 'enabled' | false
- false | true | 'disabled_with_override' | false
- false | true | 'disabled_and_unoverridable' | true
- false | false | 'enabled' | false
- false | false | 'disabled_with_override' | false
- false | false | 'disabled_and_unoverridable' | false
+ true | true | Namespace::SR_ENABLED | false
+ true | true | Namespace::SR_DISABLED_WITH_OVERRIDE | true
+ true | true | Namespace::SR_DISABLED_AND_UNOVERRIDABLE | true
+ false | true | Namespace::SR_ENABLED | false
+ false | true | Namespace::SR_DISABLED_WITH_OVERRIDE | false
+ false | true | Namespace::SR_DISABLED_AND_UNOVERRIDABLE | true
+ false | false | Namespace::SR_ENABLED | false
+ false | false | Namespace::SR_DISABLED_WITH_OVERRIDE | false
+ false | false | Namespace::SR_DISABLED_AND_UNOVERRIDABLE | false
end
with_them do
diff --git a/spec/models/namespaces/user_namespace_spec.rb b/spec/models/namespaces/user_namespace_spec.rb
new file mode 100644
index 00000000000..7c00a597756
--- /dev/null
+++ b/spec/models/namespaces/user_namespace_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Main user namespace functionality it still in `Namespace`, so most
+# of the specs are in `namespace_spec.rb`.
+# UserNamespace specific specs will end up being migrated here.
+RSpec.describe Namespaces::UserNamespace, type: :model do
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:owner) }
+ end
+end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 5e3773513f1..0dd77967f25 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -108,6 +108,34 @@ RSpec.describe Note do
end
describe 'callbacks' do
+ describe '#keep_around_commit' do
+ let!(:noteable) { create(:issue) }
+
+ it "calls #keep_around_commit normally" do
+ note = build(:note, project: noteable.project, noteable: noteable)
+
+ expect(note).to receive(:keep_around_commit)
+
+ note.save!
+ end
+
+ it "skips #keep_around_commit if 'skip_keep_around_commits' is true" do
+ note = build(:note, project: noteable.project, noteable: noteable, skip_keep_around_commits: true)
+
+ expect(note).not_to receive(:keep_around_commit)
+
+ note.save!
+ end
+
+ it "skips #keep_around_commit if 'importing' is true" do
+ note = build(:note, project: noteable.project, noteable: noteable, importing: true)
+
+ expect(note).not_to receive(:keep_around_commit)
+
+ note.save!
+ end
+ end
+
describe '#notify_after_create' do
it 'calls #after_note_created on the noteable' do
noteable = create(:issue)
diff --git a/spec/models/operations/feature_flag_spec.rb b/spec/models/operations/feature_flag_spec.rb
index d689632e2b4..e709470b312 100644
--- a/spec/models/operations/feature_flag_spec.rb
+++ b/spec/models/operations/feature_flag_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Operations::FeatureFlag do
describe 'associations' do
it { is_expected.to belong_to(:project) }
- it { is_expected.to have_many(:scopes) }
+ it { is_expected.to have_many(:strategies) }
end
describe '.reference_pattern' do
@@ -52,17 +52,6 @@ RSpec.describe Operations::FeatureFlag do
it { is_expected.to define_enum_for(:version).with_values(new_version_flag: 2) }
context 'a version 2 feature flag' do
- it 'is invalid if associated with Operations::FeatureFlagScope models' do
- project = create(:project)
- feature_flag = described_class.new({ name: 'test', project: project, version: 2,
- scopes_attributes: [{ environment_scope: '*', active: false }] })
-
- expect(feature_flag.valid?).to eq(false)
- expect(feature_flag.errors.messages).to eq({
- version_associations: ["version 2 feature flags may not have scopes"]
- })
- end
-
it 'is valid if associated with Operations::FeatureFlags::Strategy models' do
project = create(:project)
feature_flag = described_class.create!({ name: 'test', project: project, version: 2,
@@ -81,18 +70,6 @@ RSpec.describe Operations::FeatureFlag do
end
end
- describe 'the default scope' do
- let_it_be(:project) { create(:project) }
-
- context 'with a version 2 feature flag' do
- it 'does not create a default scope' do
- feature_flag = described_class.create!({ name: 'test', project: project, scopes_attributes: [], version: 2 })
-
- expect(feature_flag.scopes).to eq([])
- end
- end
- end
-
describe '.enabled' do
subject { described_class.enabled }
@@ -187,26 +164,4 @@ RSpec.describe Operations::FeatureFlag do
expect(subject.hook_attrs).to eq(hook_attrs)
end
end
-
- describe "#execute_hooks" do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
-
- it 'does not execute the hook when feature_flag event is disabled' do
- create(:project_hook, project: project, feature_flag_events: false)
- expect(WebHookWorker).not_to receive(:perform_async)
-
- feature_flag.execute_hooks(user)
- feature_flag.touch
- end
-
- it 'executes hook when feature_flag event is enabled' do
- hook = create(:project_hook, project: project, feature_flag_events: true)
- expect(WebHookWorker).to receive(:perform_async).with(hook.id, an_instance_of(Hash), 'feature_flag_hooks')
-
- feature_flag.execute_hooks(user)
- feature_flag.touch
- end
- end
end
diff --git a/spec/models/packages/helm/file_metadatum_spec.rb b/spec/models/packages/helm/file_metadatum_spec.rb
index c7c17b157e4..995179b391d 100644
--- a/spec/models/packages/helm/file_metadatum_spec.rb
+++ b/spec/models/packages/helm/file_metadatum_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe Packages::Helm::FileMetadatum, type: :model do
it 'validates #channel', :aggregate_failures do
is_expected.to validate_presence_of(:channel)
- is_expected.to allow_value('a' * 63).for(:channel)
- is_expected.not_to allow_value('a' * 64).for(:channel)
+ is_expected.to allow_value('a' * 255).for(:channel)
+ is_expected.not_to allow_value('a' * 256).for(:channel)
is_expected.to allow_value('release').for(:channel)
is_expected.to allow_value('my-repo').for(:channel)
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 99e5769fc1f..2573c01d686 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -1184,7 +1184,7 @@ RSpec.describe Packages::Package, type: :model do
end
context 'with an already existing build info' do
- let_it_be(:build_info) { create(:packages_build_info, package: package, pipeline: pipeline) }
+ let_it_be(:build_info) { create(:package_build_info, package: package, pipeline: pipeline) }
it 'does not create a build info' do
expect { subject }.not_to change { ::Packages::BuildInfo.count }
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 7b997f0d4e1..2b6ed9a9927 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe PagesDomain do
with_them do
it "is adds the expected errors" do
- expect(pages_domain.errors.keys).to eq errors_on
+ expect(pages_domain.errors.attribute_names).to eq errors_on
end
end
end
@@ -155,7 +155,7 @@ RSpec.describe PagesDomain do
it "adds error to certificate" do
domain.valid?
- expect(domain.errors.keys).to contain_exactly(:key, :certificate)
+ expect(domain.errors.attribute_names).to contain_exactly(:key, :certificate)
end
end
@@ -165,7 +165,7 @@ RSpec.describe PagesDomain do
domain.valid?
- expect(domain.errors.keys).to contain_exactly(:key)
+ expect(domain.errors.attribute_names).to contain_exactly(:key)
end
end
end
@@ -287,6 +287,19 @@ RSpec.describe PagesDomain do
it { is_expected.to be_truthy }
end
+
+ # The LetsEncrypt DST Root CA X3 expired on 2021-09-30, but the
+ # cross-sign in ISRG Root X1 enables it to function provided a chain
+ # of trust can be established with the system store. See:
+ #
+ # 1. https://community.letsencrypt.org/t/production-chain-changes/150739
+ # 2. https://letsencrypt.org/2020/12/21/extending-android-compatibility.html
+ # 3. https://www.openssl.org/blog/blog/2021/09/13/LetsEncryptRootCertExpire/
+ context 'with a LetsEncrypt bundle with an expired DST Root CA X3' do
+ let(:domain) { build(:pages_domain, :letsencrypt_expired_x3_root) }
+
+ it { is_expected.to be_truthy }
+ end
end
describe '#expired?' do
diff --git a/spec/models/preloaders/merge_requests_preloader_spec.rb b/spec/models/preloaders/merge_requests_preloader_spec.rb
deleted file mode 100644
index 7108de2e491..00000000000
--- a/spec/models/preloaders/merge_requests_preloader_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Preloaders::MergeRequestsPreloader do
- describe '#execute' do
- let_it_be_with_refind(:merge_requests) { create_list(:merge_request, 3) }
- let_it_be(:upvotes) { merge_requests.each { |m| create(:award_emoji, :upvote, awardable: m) } }
-
- it 'does not make n+1 queries' do
- described_class.new(merge_requests).execute
-
- control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
- # expectations make sure the queries execute
- merge_requests.each do |m|
- expect(m.target_project.project_feature).not_to be_nil
- expect(m.lazy_upvotes_count).to eq(1)
- end
- end
-
- # 1 query for BatchLoader to load all upvotes at once
- expect(control.count).to eq(1)
- end
-
- it 'runs extra queries without preloading' do
- control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
- # expectations make sure the queries execute
- merge_requests.each do |m|
- expect(m.target_project.project_feature).not_to be_nil
- expect(m.lazy_upvotes_count).to eq(1)
- end
- end
-
- # 4 queries per merge request =
- # 1 to load merge request
- # 1 to load project
- # 1 to load project_feature
- # 1 to load upvotes count
- expect(control.count).to eq(4 * merge_requests.size)
- end
- end
-end
diff --git a/spec/models/product_analytics_event_spec.rb b/spec/models/product_analytics_event_spec.rb
index 286729b8398..801e6dd5e10 100644
--- a/spec/models/product_analytics_event_spec.rb
+++ b/spec/models/product_analytics_event_spec.rb
@@ -36,17 +36,6 @@ RSpec.describe ProductAnalyticsEvent, type: :model do
it { expect(described_class.count_by_graph('platform', 30.days)).to eq({ 'app' => 1, 'mobile' => 1, 'web' => 2 }) }
end
- describe '.by_category_and_action' do
- let_it_be(:event) { create(:product_analytics_event, se_category: 'catA', se_action: 'actA') }
-
- before do
- create(:product_analytics_event, se_category: 'catA', se_action: 'actB')
- create(:product_analytics_event, se_category: 'catB', se_action: 'actA')
- end
-
- it { expect(described_class.by_category_and_action('catA', 'actA')).to match_array([event]) }
- end
-
describe '.count_collector_tstamp_by_day' do
let_it_be(:time_now) { Time.zone.now }
let_it_be(:time_ago) { Time.zone.now - 5.days }
diff --git a/spec/models/project_feature_usage_spec.rb b/spec/models/project_feature_usage_spec.rb
index 698c5374e88..3765a2b37a7 100644
--- a/spec/models/project_feature_usage_spec.rb
+++ b/spec/models/project_feature_usage_spec.rb
@@ -133,10 +133,8 @@ RSpec.describe ProjectFeatureUsage, type: :model do
subject { project.feature_usage }
- context 'database load balancing is configured', :db_load_balancing do
+ context 'database load balancing is configured' do
before do
- allow(ActiveRecord::Base).to receive(:connection).and_return(::Gitlab::Database::LoadBalancing.proxy)
-
::Gitlab::Database::LoadBalancing::Session.clear_session
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 3989ddc31e8..10220448936 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -140,6 +140,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:error_tracking_client_keys).class_name('ErrorTracking::ClientKey') }
it { is_expected.to have_many(:pending_builds).class_name('Ci::PendingBuild') }
it { is_expected.to have_many(:ci_feature_usages).class_name('Projects::CiFeatureUsage') }
+ it { is_expected.to have_many(:bulk_import_exports).class_name('BulkImports::Export') }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -494,23 +495,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#merge_requests_author_approval' do
- where(:attribute_value, :return_value) do
- true | true
- false | false
- nil | false
- end
-
- with_them do
- let(:project) { create(:project, merge_requests_author_approval: attribute_value) }
-
- it 'returns expected value' do
- expect(project.merge_requests_author_approval).to eq(return_value)
- expect(project.merge_requests_author_approval?).to eq(return_value)
- end
- end
- end
-
describe '#all_pipelines' do
let_it_be(:project) { create(:project) }
@@ -3066,7 +3050,7 @@ RSpec.describe Project, factory_default: :keep do
let(:project) { create(:project) }
it 'marks the location with project ID' do
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:mark_primary_write_location).with(:project, project.id)
+ expect(ApplicationRecord.sticking).to receive(:mark_primary_write_location).with(:project, project.id)
project.mark_primary_write_location
end
@@ -6303,24 +6287,18 @@ RSpec.describe Project, factory_default: :keep do
describe 'validation #changing_shared_runners_enabled_is_allowed' do
where(:shared_runners_setting, :project_shared_runners_enabled, :valid_record) do
- 'enabled' | true | true
- 'enabled' | false | true
- 'disabled_with_override' | true | true
- 'disabled_with_override' | false | true
- 'disabled_and_unoverridable' | true | false
- 'disabled_and_unoverridable' | false | true
+ :shared_runners_enabled | true | true
+ :shared_runners_enabled | false | true
+ :disabled_with_override | true | true
+ :disabled_with_override | false | true
+ :disabled_and_unoverridable | true | false
+ :disabled_and_unoverridable | false | true
end
with_them do
- let(:group) { create(:group) }
+ let(:group) { create(:group, shared_runners_setting) }
let(:project) { build(:project, namespace: group, shared_runners_enabled: project_shared_runners_enabled) }
- before do
- allow_next_found_instance_of(Group) do |group|
- allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- end
- end
-
it 'validates the configuration' do
expect(project.valid?).to eq(valid_record)
@@ -7239,35 +7217,6 @@ RSpec.describe Project, factory_default: :keep do
expect(project.reload.topics.map(&:name)).to eq(%w[topic1 topic2 topic3])
end
end
-
- context 'during ExtractProjectTopicsIntoSeparateTable migration' do
- before do
- topic_a = ActsAsTaggableOn::Tag.find_or_create_by!(name: 'topicA')
- topic_b = ActsAsTaggableOn::Tag.find_or_create_by!(name: 'topicB')
-
- project.reload.topics_acts_as_taggable = [topic_a, topic_b]
- project.save!
- project.reload
- end
-
- it 'topic_list returns correct string array' do
- expect(project.topic_list).to eq(%w[topicA topicB topic1 topic2 topic3])
- end
-
- it 'topics returns correct topic records' do
- expect(project.topics.map(&:class)).to eq([ActsAsTaggableOn::Tag, ActsAsTaggableOn::Tag, Projects::Topic, Projects::Topic, Projects::Topic])
- expect(project.topics.map(&:name)).to eq(%w[topicA topicB topic1 topic2 topic3])
- end
-
- it 'topic_list= sets new topics and removes old topics' do
- project.topic_list = 'new-topic1, new-topic2'
- project.save!
- project.reload
-
- expect(project.topics.map(&:class)).to eq([Projects::Topic, Projects::Topic])
- expect(project.topics.map(&:name)).to eq(%w[new-topic1 new-topic2])
- end
- end
end
shared_examples 'all_runners' do
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index ba769e830fd..ead6238b2f4 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -294,15 +294,17 @@ RSpec.describe ProjectStatistics do
describe '#update_lfs_objects_size' do
let!(:lfs_object1) { create(:lfs_object, size: 23.megabytes) }
let!(:lfs_object2) { create(:lfs_object, size: 34.megabytes) }
+ let!(:lfs_object3) { create(:lfs_object, size: 34.megabytes) }
let!(:lfs_objects_project1) { create(:lfs_objects_project, project: project, lfs_object: lfs_object1) }
let!(:lfs_objects_project2) { create(:lfs_objects_project, project: project, lfs_object: lfs_object2) }
+ let!(:lfs_objects_project3) { create(:lfs_objects_project, project: project, lfs_object: lfs_object3) }
before do
statistics.update_lfs_objects_size
end
it "stores the size of related LFS objects" do
- expect(statistics.lfs_objects_size).to eq 57.megabytes
+ expect(statistics.lfs_objects_size).to eq 91.megabytes
end
end
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index 409dc932709..397c65f4d5c 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -3,12 +3,18 @@
require 'spec_helper'
RSpec.describe Projects::Topic do
- let_it_be(:topic, reload: true) { create(:topic) }
+ let_it_be(:topic, reload: true) { create(:topic, name: 'topic') }
subject { topic }
it { expect(subject).to be_valid }
+ describe 'modules' do
+ subject { described_class }
+
+ it { is_expected.to include_module(Avatarable) }
+ end
+
describe 'associations' do
it { is_expected.to have_many(:project_topics) }
it { is_expected.to have_many(:projects) }
@@ -18,5 +24,76 @@ RSpec.describe Projects::Topic do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:description).is_at_most(1024) }
+ end
+
+ describe 'scopes' do
+ describe 'order_by_total_projects_count' do
+ let!(:topic1) { create(:topic, name: 'topicB') }
+ let!(:topic2) { create(:topic, name: 'topicC') }
+ let!(:topic3) { create(:topic, name: 'topicA') }
+ let!(:project1) { create(:project, topic_list: 'topicC, topicA, topicB') }
+ let!(:project2) { create(:project, topic_list: 'topicC, topicA') }
+ let!(:project3) { create(:project, topic_list: 'topicC') }
+
+ it 'sorts topics by total_projects_count' do
+ topics = described_class.order_by_total_projects_count
+
+ expect(topics.map(&:name)).to eq(%w[topicC topicA topicB topic])
+ end
+ end
+
+ describe 'reorder_by_similarity' do
+ let!(:topic1) { create(:topic, name: 'my-topic') }
+ let!(:topic2) { create(:topic, name: 'other') }
+ let!(:topic3) { create(:topic, name: 'topic2') }
+
+ it 'sorts topics by similarity' do
+ topics = described_class.reorder_by_similarity('topic')
+
+ expect(topics.map(&:name)).to eq(%w[topic my-topic topic2 other])
+ end
+ end
+ end
+
+ describe '#search' do
+ it 'returns topics with a matching name' do
+ expect(described_class.search(topic.name)).to eq([topic])
+ end
+
+ it 'returns topics with a partially matching name' do
+ expect(described_class.search(topic.name[0..2])).to eq([topic])
+ end
+
+ it 'returns topics with a matching name regardless of the casing' do
+ expect(described_class.search(topic.name.upcase)).to eq([topic])
+ end
+ end
+
+ describe '#avatar_type' do
+ it "is true if avatar is image" do
+ topic.update_attribute(:avatar, 'uploads/avatar.png')
+ expect(topic.avatar_type).to be_truthy
+ end
+
+ it "is false if avatar is html page" do
+ topic.update_attribute(:avatar, 'uploads/avatar.html')
+ topic.avatar_type
+
+ expect(topic.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
+ end
+ end
+
+ describe '#avatar_url' do
+ context 'when avatar file is uploaded' do
+ before do
+ topic.update!(avatar: fixture_file_upload("spec/fixtures/dk.png"))
+ end
+
+ it 'shows correct avatar url' do
+ expect(topic.avatar_url).to eq(topic.avatar.url)
+ expect(topic.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, topic.avatar.url].join)
+ end
+ end
end
end
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 019c01af672..587a9683a8e 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -308,4 +308,15 @@ RSpec.describe ProtectedBranch do
expect(described_class.by_name('')).to be_empty
end
end
+
+ describe '.get_ids_by_name' do
+ let(:branch_name) { 'branch_name' }
+ let!(:protected_branch) { create(:protected_branch, name: branch_name) }
+ let(:branch_id) { protected_branch.id }
+
+ it 'returns the id for each protected branch matching name' do
+ expect(described_class.get_ids_by_name([branch_name]))
+ .to match_array([branch_id])
+ end
+ end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index dc55214c1dd..7bad907cf90 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Repository do
let(:feature_flag) { true }
before do
- stub_feature_flags(gitaly_tags_finder: feature_flag)
+ stub_feature_flags(tags_finder_gitaly: feature_flag)
end
context 'name_desc' do
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 40a28b9e0cc..e8a933d2277 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -115,6 +115,7 @@ RSpec.describe SnippetRepository do
allow(snippet).to receive(:repository).and_return(repo)
allow(repo).to receive(:ls_files).and_return([])
allow(repo).to receive(:root_ref).and_return('master')
+ allow(repo).to receive(:empty?).and_return(false)
end
it 'infers the commit action based on the parameters if not present' do
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 6bac5e31435..0ac684cd04c 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -242,4 +242,28 @@ RSpec.describe Upload do
it { expect(subject.uploader_context).to match(a_hash_including(secret: 'secret', identifier: 'file.txt')) }
end
+
+ describe '#update_project_statistics' do
+ let_it_be(:project) { create(:project) }
+
+ subject do
+ create(:upload, model: project)
+ end
+
+ it 'updates project statistics when upload is added' do
+ expect(ProjectCacheWorker).to receive(:perform_async)
+ .with(project.id, [], [:uploads_size])
+
+ subject.save!
+ end
+
+ it 'updates project statistics when upload is removed' do
+ subject.save!
+
+ expect(ProjectCacheWorker).to receive(:perform_async)
+ .with(project.id, [], [:uploads_size])
+
+ subject.destroy!
+ end
+ end
end
diff --git a/spec/models/user_detail_spec.rb b/spec/models/user_detail_spec.rb
index ba7ea3f7ce2..9189b9a1469 100644
--- a/spec/models/user_detail_spec.rb
+++ b/spec/models/user_detail_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe UserDetail do
it { is_expected.to belong_to(:user) }
+ it { is_expected.to define_enum_for(:registration_objective).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other, :joining_team]).with_suffix }
describe 'validations' do
describe '#job_title' do
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 5806f123871..d4491aacd9f 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -80,12 +80,6 @@ RSpec.describe UserPreference do
end
end
- describe '#timezone' do
- it 'returns server time as default' do
- expect(user_preference.timezone).to eq(Time.zone.tzinfo.name)
- end
- end
-
describe '#tab_width' do
it 'is set to 8 by default' do
# Intentionally not using factory here to test the constructor.
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index ca4c38d4663..db805a804c8 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -79,6 +79,9 @@ RSpec.describe User do
it { is_expected.to delegate_method(:bio).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:bio=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:registration_objective).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:registration_objective=).to(:user_detail).with_arguments(:args).allow_nil }
end
describe 'associations' do
@@ -123,7 +126,7 @@ RSpec.describe User do
it { is_expected.to have_many(:callouts).class_name('UserCallout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
- describe "#user_detail" do
+ describe '#user_detail' do
it 'does not persist `user_detail` by default' do
expect(create(:user).user_detail).not_to be_persisted
end
@@ -160,25 +163,25 @@ RSpec.describe User do
end
end
- describe "#abuse_report" do
+ describe '#abuse_report' do
let(:current_user) { create(:user) }
let(:other_user) { create(:user) }
it { is_expected.to have_one(:abuse_report) }
- it "refers to the abuse report whose user_id is the current user" do
+ it 'refers to the abuse report whose user_id is the current user' do
abuse_report = create(:abuse_report, reporter: other_user, user: current_user)
expect(current_user.abuse_report).to eq(abuse_report)
end
- it "does not refer to the abuse report whose reporter_id is the current user" do
+ it 'does not refer to the abuse report whose reporter_id is the current user' do
create(:abuse_report, reporter: current_user, user: other_user)
expect(current_user.abuse_report).to be_nil
end
- it "does not update the user_id of an abuse report when the user is updated" do
+ it 'does not update the user_id of an abuse report when the user is updated' do
abuse_report = create(:abuse_report, reporter: current_user, user: other_user)
current_user.block
@@ -343,8 +346,9 @@ RSpec.describe User do
it 'falls back to english when I18n.default_locale is not an available language' do
I18n.default_locale = :kl
+ default_preferred_language = user.send(:default_preferred_language)
- expect(user.preferred_language).to eq 'en'
+ expect(user.preferred_language).to eq default_preferred_language
end
end
end
@@ -374,7 +378,7 @@ RSpec.describe User do
end
context 'when username is changed' do
- let(:user) { build_stubbed(:user, username: 'old_path', namespace: build_stubbed(:namespace)) }
+ let(:user) { build_stubbed(:user, username: 'old_path', namespace: build_stubbed(:user_namespace)) }
it 'validates move_dir is allowed for the namespace' do
expect(user.namespace).to receive(:any_project_has_container_registry_tags?).and_return(true)
@@ -401,7 +405,7 @@ RSpec.describe User do
user = build(:user, username: "test.#{type}")
expect(user).not_to be_valid
- expect(user.errors.full_messages).to include('Username ending with a file extension is not allowed.')
+ expect(user.errors.full_messages).to include('Username ending with a reserved file extension is not allowed.')
expect(build(:user, username: "test#{type}")).to be_valid
end
end
@@ -490,6 +494,8 @@ RSpec.describe User do
end
describe 'email' do
+ let(:expected_error) { _('is not allowed for sign-up. Check with your administrator.') }
+
context 'when no signup domains allowed' do
before do
stub_application_setting(domain_allowlist: [])
@@ -533,7 +539,7 @@ RSpec.describe User do
it 'rejects example@test.com' do
user = build(:user, email: "example@test.com")
expect(user).to be_invalid
- expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
+ expect(user.errors.messages[:email].first).to eq(expected_error)
end
end
@@ -550,13 +556,13 @@ RSpec.describe User do
it 'rejects info@test.example.com' do
user = build(:user, email: "info@test.example.com")
expect(user).to be_invalid
- expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
+ expect(user.errors.messages[:email].first).to eq(expected_error)
end
it 'rejects example@test.com' do
user = build(:user, email: "example@test.com")
expect(user).to be_invalid
- expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
+ expect(user.errors.messages[:email].first).to eq(expected_error)
end
it 'accepts example@test.com when added by another user' do
@@ -594,7 +600,7 @@ RSpec.describe User do
it 'rejects info@example.com' do
user = build(:user, email: 'info@example.com')
expect(user).not_to be_valid
- expect(user.errors.messages[:email].first).to eq(_('is not from an allowed domain.'))
+ expect(user.errors.messages[:email].first).to eq(expected_error)
end
it 'accepts info@example.com when added by another user' do
@@ -628,7 +634,7 @@ RSpec.describe User do
it 'rejects info@example.com' do
user = build(:user, email: 'info@example.com')
expect(user).not_to be_valid
- expect(user.errors.messages[:email].first).to eq(_('domain is not authorized for sign-up.'))
+ expect(user.errors.messages[:email].first).to eq(expected_error)
end
end
end
@@ -669,7 +675,7 @@ RSpec.describe User do
user = build(:user, email: 'info@gitlab.com')
expect(user).not_to be_valid
- expect(user.errors.messages[:email].first).to eq(_('is not allowed. Try again with a different email address, or contact your GitLab admin.'))
+ expect(user.errors.messages[:email].first).to eq(expected_error)
end
it 'does accept a valid email address' do
@@ -715,7 +721,7 @@ RSpec.describe User do
end
end
- describe "scopes" do
+ describe 'scopes' do
context 'blocked users' do
let_it_be(:active_user) { create(:user) }
let_it_be(:blocked_user) { create(:user, :blocked) }
@@ -753,8 +759,8 @@ RSpec.describe User do
end
end
- describe ".with_two_factor" do
- it "returns users with 2fa enabled via OTP" do
+ describe '.with_two_factor' do
+ it 'returns users with 2fa enabled via OTP' do
user_with_2fa = create(:user, :two_factor_via_otp)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
@@ -763,8 +769,8 @@ RSpec.describe User do
expect(users_with_two_factor).not_to include(user_without_2fa.id)
end
- shared_examples "returns the right users" do |trait|
- it "returns users with 2fa enabled via hardware token" do
+ shared_examples 'returns the right users' do |trait|
+ it 'returns users with 2fa enabled via hardware token' do
user_with_2fa = create(:user, trait)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
@@ -773,7 +779,7 @@ RSpec.describe User do
expect(users_with_two_factor).not_to include(user_without_2fa.id)
end
- it "returns users with 2fa enabled via OTP and hardware token" do
+ it 'returns users with 2fa enabled via OTP and hardware token' do
user_with_2fa = create(:user, :two_factor_via_otp, trait)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
@@ -791,17 +797,17 @@ RSpec.describe User do
end
end
- describe "and U2F" do
+ describe 'and U2F' do
it_behaves_like "returns the right users", :two_factor_via_u2f
end
- describe "and WebAuthn" do
+ describe 'and WebAuthn' do
it_behaves_like "returns the right users", :two_factor_via_webauthn
end
end
- describe ".without_two_factor" do
- it "excludes users with 2fa enabled via OTP" do
+ describe '.without_two_factor' do
+ it 'excludes users with 2fa enabled via OTP' do
user_with_2fa = create(:user, :two_factor_via_otp)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@@ -810,8 +816,8 @@ RSpec.describe User do
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
- describe "and u2f" do
- it "excludes users with 2fa enabled via U2F" do
+ describe 'and u2f' do
+ it 'excludes users with 2fa enabled via U2F' do
user_with_2fa = create(:user, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@@ -820,7 +826,7 @@ RSpec.describe User do
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
- it "excludes users with 2fa enabled via OTP and U2F" do
+ it 'excludes users with 2fa enabled via OTP and U2F' do
user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@@ -830,8 +836,8 @@ RSpec.describe User do
end
end
- describe "and webauthn" do
- it "excludes users with 2fa enabled via WebAuthn" do
+ describe 'and webauthn' do
+ it 'excludes users with 2fa enabled via WebAuthn' do
user_with_2fa = create(:user, :two_factor_via_webauthn)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@@ -840,7 +846,7 @@ RSpec.describe User do
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
- it "excludes users with 2fa enabled via OTP and WebAuthn" do
+ it 'excludes users with 2fa enabled via OTP and WebAuthn' do
user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_webauthn)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@@ -1073,7 +1079,7 @@ RSpec.describe User do
end
end
- describe "Respond to" do
+ describe 'Respond to' do
it { is_expected.to respond_to(:admin?) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:external?) }
@@ -1095,7 +1101,7 @@ RSpec.describe User do
let(:user) { create(:user) }
let(:external_user) { create(:user, external: true) }
- it "sets other properties as well" do
+ it 'sets other properties as well' do
expect(external_user.can_create_team).to be_falsey
expect(external_user.can_create_group).to be_falsey
expect(external_user.projects_limit).to be 0
@@ -1514,7 +1520,7 @@ RSpec.describe User do
end
describe '#generate_password' do
- it "does not generate password by default" do
+ it 'does not generate password by default' do
user = create(:user, password: 'abcdefghe')
expect(user.password).to eq('abcdefghe')
@@ -1882,14 +1888,14 @@ RSpec.describe User do
describe 'deactivating a user' do
let(:user) { create(:user, name: 'John Smith') }
- context "an active user" do
- it "can be deactivated" do
+ context 'an active user' do
+ it 'can be deactivated' do
user.deactivate
expect(user.deactivated?).to be_truthy
end
- context "when user deactivation emails are disabled" do
+ context 'when user deactivation emails are disabled' do
before do
stub_application_setting(user_deactivation_emails_enabled: false)
end
@@ -1900,7 +1906,7 @@ RSpec.describe User do
end
end
- context "when user deactivation emails are enabled" do
+ context 'when user deactivation emails are enabled' do
it 'sends deactivated user an email' do
expect_next_instance_of(NotificationService) do |notification|
allow(notification).to receive(:user_deactivated).with(user.name, user.notification_email_or_default)
@@ -1911,12 +1917,12 @@ RSpec.describe User do
end
end
- context "a user who is blocked" do
+ context 'a user who is blocked' do
before do
user.block
end
- it "cannot be deactivated" do
+ it 'cannot be deactivated' do
user.deactivate
expect(user.reload.deactivated?).to be_falsy
@@ -2083,7 +2089,7 @@ RSpec.describe User do
describe 'with defaults' do
let(:user) { described_class.new }
- it "applies defaults to user" do
+ it 'applies defaults to user' do
expect(user.projects_limit).to eq(Gitlab.config.gitlab.default_projects_limit)
expect(user.can_create_group).to eq(Gitlab.config.gitlab.default_can_create_group)
expect(user.theme_id).to eq(Gitlab.config.gitlab.default_theme)
@@ -2095,7 +2101,7 @@ RSpec.describe User do
describe 'with default overrides' do
let(:user) { described_class.new(projects_limit: 123, can_create_group: false, can_create_team: true) }
- it "applies defaults to user" do
+ it 'applies defaults to user' do
expect(user.projects_limit).to eq(123)
expect(user.can_create_group).to be_falsey
expect(user.theme_id).to eq(1)
@@ -2114,7 +2120,7 @@ RSpec.describe User do
stub_application_setting(user_default_external: true)
end
- it "creates external user by default" do
+ it 'creates external user by default' do
user = create(:user)
expect(user.external).to be_truthy
@@ -2123,7 +2129,7 @@ RSpec.describe User do
end
describe 'with default overrides' do
- it "creates a non-external user" do
+ it 'creates a non-external user' do
user = create(:user, external: false)
expect(user.external).to be_falsey
@@ -2139,7 +2145,7 @@ RSpec.describe User do
}
protocol_and_expectation.each do |protocol, expected|
- it "has correct require_ssh_key?" do
+ it 'has correct require_ssh_key?' do
stub_application_setting(enabled_git_access_protocol: protocol)
user = build(:user)
@@ -2542,71 +2548,79 @@ RSpec.describe User do
end
describe '.find_by_full_path' do
- let!(:user) { create(:user) }
+ using RSpec::Parameterized::TableSyntax
- context 'with a route matching the given path' do
- let!(:route) { user.namespace.route }
+ # TODO: this `where/when` can be removed in issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070
+ # At that point we only need to check `user_namespace`
+ where(namespace_type: [:namespace, :user_namespace])
- it 'returns the user' do
- expect(described_class.find_by_full_path(route.path)).to eq(user)
- end
+ with_them do
+ let!(:user) { create(:user, namespace: create(namespace_type)) }
- it 'is case-insensitive' do
- expect(described_class.find_by_full_path(route.path.upcase)).to eq(user)
- expect(described_class.find_by_full_path(route.path.downcase)).to eq(user)
- end
- end
+ context 'with a route matching the given path' do
+ let!(:route) { user.namespace.route }
- context 'with a redirect route matching the given path' do
- let!(:redirect_route) { user.namespace.redirect_routes.create!(path: 'foo') }
+ it 'returns the user' do
+ expect(described_class.find_by_full_path(route.path)).to eq(user)
+ end
- context 'without the follow_redirects option' do
- it 'returns nil' do
- expect(described_class.find_by_full_path(redirect_route.path)).to eq(nil)
+ it 'is case-insensitive' do
+ expect(described_class.find_by_full_path(route.path.upcase)).to eq(user)
+ expect(described_class.find_by_full_path(route.path.downcase)).to eq(user)
end
end
- context 'with the follow_redirects option set to true' do
- it 'returns the user' do
- expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to eq(user)
+ context 'with a redirect route matching the given path' do
+ let!(:redirect_route) { user.namespace.redirect_routes.create!(path: 'foo') }
+
+ context 'without the follow_redirects option' do
+ it 'returns nil' do
+ expect(described_class.find_by_full_path(redirect_route.path)).to eq(nil)
+ end
end
- it 'is case-insensitive' do
- expect(described_class.find_by_full_path(redirect_route.path.upcase, follow_redirects: true)).to eq(user)
- expect(described_class.find_by_full_path(redirect_route.path.downcase, follow_redirects: true)).to eq(user)
+ context 'with the follow_redirects option set to true' do
+ it 'returns the user' do
+ expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to eq(user)
+ end
+
+ it 'is case-insensitive' do
+ expect(described_class.find_by_full_path(redirect_route.path.upcase, follow_redirects: true)).to eq(user)
+ expect(described_class.find_by_full_path(redirect_route.path.downcase, follow_redirects: true)).to eq(user)
+ end
end
end
- end
- context 'without a route or a redirect route matching the given path' do
- context 'without the follow_redirects option' do
- it 'returns nil' do
- expect(described_class.find_by_full_path('unknown')).to eq(nil)
+ context 'without a route or a redirect route matching the given path' do
+ context 'without the follow_redirects option' do
+ it 'returns nil' do
+ expect(described_class.find_by_full_path('unknown')).to eq(nil)
+ end
end
- end
- context 'with the follow_redirects option set to true' do
- it 'returns nil' do
- expect(described_class.find_by_full_path('unknown', follow_redirects: true)).to eq(nil)
+ context 'with the follow_redirects option set to true' do
+ it 'returns nil' do
+ expect(described_class.find_by_full_path('unknown', follow_redirects: true)).to eq(nil)
+ end
end
end
- end
- context 'with a group route matching the given path' do
- let!(:group) { create(:group, path: 'group_path') }
+ context 'with a group route matching the given path' do
+ let!(:group) { create(:group, path: 'group_path') }
- context 'when the group namespace has an owner_id (legacy data)' do
- before do
- group.update!(owner_id: user.id)
- end
+ context 'when the group namespace has an owner_id (legacy data)' do
+ before do
+ group.update!(owner_id: user.id)
+ end
- it 'returns nil' do
- expect(described_class.find_by_full_path('group_path')).to eq(nil)
+ it 'returns nil' do
+ expect(described_class.find_by_full_path('group_path')).to eq(nil)
+ end
end
- end
- context 'when the group namespace does not have an owner_id' do
- it 'returns nil' do
- expect(described_class.find_by_full_path('group_path')).to eq(nil)
+ context 'when the group namespace does not have an owner_id' do
+ it 'returns nil' do
+ expect(described_class.find_by_full_path('group_path')).to eq(nil)
+ end
end
end
end
@@ -2615,7 +2629,7 @@ RSpec.describe User do
describe 'all_ssh_keys' do
it { is_expected.to have_many(:keys).dependent(:destroy) }
- it "has all ssh keys" do
+ it 'has all ssh keys' do
user = create :user
key = create :key, key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD33bWLBxu48Sev9Fert1yzEO4WGcWglWF7K/AwblIUFselOt/QdOL9DSjpQGxLagO1s9wl53STIO8qGS4Ms0EJZyIXOEFMjFJ5xmjSy+S37By4sG7SsltQEHMxtbtFOaW5LV2wCrX+rUsRNqLMamZjgjcPO0/EgGCXIGMAYW4O7cwGZdXWYIhQ1Vwy+CsVMDdPkPgBXqK7nR/ey8KMs8ho5fMNgB5hBw/AL9fNGhRw3QTD6Q12Nkhl4VZES2EsZqlpNnJttnPdp847DUsT6yuLRlfiQfz5Cn9ysHFdXObMN5VYIiPFwHeYCZp1X2S4fDZooRE8uOLTfxWHPXwrhqSH", user_id: user.id
@@ -2651,10 +2665,10 @@ RSpec.describe User do
end
end
- describe "#clear_avatar_caches" do
+ describe '#clear_avatar_caches' do
let(:user) { create(:user) }
- it "clears the avatar cache when saving" do
+ it 'clears the avatar cache when saving' do
allow(user).to receive(:avatar_changed?).and_return(true)
expect(Gitlab::AvatarCache).to receive(:delete_by_email).with(*user.verified_emails)
@@ -3180,7 +3194,7 @@ RSpec.describe User do
end
end
- describe "#last_active_at" do
+ describe '#last_active_at' do
let(:last_activity_on) { 5.days.ago.to_date }
let(:current_sign_in_at) { 8.days.ago }
@@ -3218,7 +3232,7 @@ RSpec.describe User do
end
end
- describe "#can_be_deactivated?" do
+ describe '#can_be_deactivated?' do
let(:activity) { {} }
let(:user) { create(:user, name: 'John Smith', **activity) }
let(:day_within_minium_inactive_days_threshold) { User::MINIMUM_INACTIVE_DAYS.pred.days.ago }
@@ -3236,7 +3250,7 @@ RSpec.describe User do
end
end
- context "a user who is not active" do
+ context 'a user who is not active' do
before do
user.block
end
@@ -3277,7 +3291,7 @@ RSpec.describe User do
end
end
- describe "#contributed_projects" do
+ describe '#contributed_projects' do
subject { create(:user) }
let!(:project1) { create(:project) }
@@ -3292,11 +3306,11 @@ RSpec.describe User do
project2.add_maintainer(subject)
end
- it "includes IDs for projects the user has pushed to" do
+ it 'includes IDs for projects the user has pushed to' do
expect(subject.contributed_projects).to include(project1)
end
- it "includes IDs for projects the user has had merge requests merged into" do
+ it 'includes IDs for projects the user has had merge requests merged into' do
expect(subject.contributed_projects).to include(project3)
end
@@ -3390,7 +3404,7 @@ RSpec.describe User do
end
end
- describe "#recent_push" do
+ describe '#recent_push' do
let(:user) { build(:user) }
let(:project) { build(:project) }
let(:event) { build(:push_event) }
@@ -3554,7 +3568,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
end
- it "includes personal projects user has been given access to" do
+ it 'includes personal projects user has been given access to' do
user1 = create(:user)
user2 = create(:user)
project = create(:project, :private, namespace: user1.namespace)
@@ -3564,7 +3578,7 @@ RSpec.describe User do
expect(user2.authorized_projects).to include(project)
end
- it "includes projects of groups user has been added to" do
+ it 'includes projects of groups user has been added to' do
group = create(:group)
project = create(:project, group: group)
user = create(:user)
@@ -3574,7 +3588,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
end
- it "does not include projects of groups user has been removed from" do
+ it 'does not include projects of groups user has been removed from', :sidekiq_inline do
group = create(:group)
project = create(:project, group: group)
user = create(:user)
@@ -3599,7 +3613,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
end
- it "does not include destroyed projects user had access to" do
+ it 'does not include destroyed projects user had access to' do
user1 = create(:user)
user2 = create(:user)
project = create(:project, :private, namespace: user1.namespace)
@@ -3613,7 +3627,7 @@ RSpec.describe User do
expect(user2.authorized_projects).not_to include(project)
end
- it "does not include projects of destroyed groups user had access to" do
+ it 'does not include projects of destroyed groups user had access to' do
group = create(:group)
project = create(:project, namespace: group)
user = create(:user)
@@ -3841,7 +3855,7 @@ RSpec.describe User do
end
context 'with runner in a personal project' do
- let!(:namespace) { create(:namespace, owner: user) }
+ let!(:namespace) { create(:user_namespace, owner: user) }
let!(:project) { create(:project, namespace: namespace) }
let!(:runner) { create(:ci_runner, :project, projects: [project]) }
@@ -3909,7 +3923,7 @@ RSpec.describe User do
end
context 'with personal project runner in an owned group in an owned namespace and a group runner in that group' do
- let!(:namespace) { create(:namespace, owner: user) }
+ let!(:namespace) { create(:user_namespace, owner: user) }
let!(:group) { create(:group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
let!(:project) { create(:project, namespace: namespace, group: group) }
@@ -3923,7 +3937,7 @@ RSpec.describe User do
end
context 'with personal project runner in an owned namespace, an owned group, a subgroup and a group runner in that subgroup' do
- let!(:namespace) { create(:namespace, owner: user) }
+ let!(:namespace) { create(:user_namespace, owner: user) }
let!(:group) { create(:group) }
let!(:subgroup) { create(:group, parent: group) }
let!(:group_runner) { create(:ci_runner, :group, groups: [subgroup]) }
@@ -4166,7 +4180,7 @@ RSpec.describe User do
expect(user.admin).to be true
end
- it "accepts string values in addition to symbols" do
+ it 'accepts string values in addition to symbols' do
user.access_level = 'admin'
expect(user.access_level).to eq(:admin)
@@ -4247,7 +4261,7 @@ RSpec.describe User do
expect(ghost.user_type).to eq 'ghost'
end
- it "does not create a second ghost user if one is already present" do
+ it 'does not create a second ghost user if one is already present' do
expect do
described_class.ghost
described_class.ghost
@@ -4256,7 +4270,7 @@ RSpec.describe User do
end
context "when a regular user exists with the username 'ghost'" do
- it "creates a ghost user with a non-conflicting username" do
+ it 'creates a ghost user with a non-conflicting username' do
create(:user, username: 'ghost')
ghost = described_class.ghost
@@ -4266,7 +4280,7 @@ RSpec.describe User do
end
context "when a regular user exists with the email 'ghost@example.com'" do
- it "creates a ghost user with a non-conflicting email" do
+ it 'creates a ghost user with a non-conflicting email' do
create(:user, email: 'ghost@example.com')
ghost = described_class.ghost
@@ -4605,6 +4619,7 @@ RSpec.describe User do
user.save!
expect(user.namespace).not_to be_nil
+ expect(user.namespace).to be_kind_of(Namespaces::UserNamespace)
end
it 'creates the namespace setting' do
@@ -4746,7 +4761,7 @@ RSpec.describe User do
it { is_expected.to be true }
end
- context 'when email and username aren\'t changed' do
+ context "when email and username aren't changed" do
before do
user.name = 'new_name'
end
@@ -5057,26 +5072,26 @@ RSpec.describe User do
subject { user.required_terms_not_accepted? }
- context "when terms are not enforced" do
+ context 'when terms are not enforced' do
it { is_expected.to be_falsey }
end
- context "when terms are enforced" do
+ context 'when terms are enforced' do
before do
enforce_terms
end
- it "is not accepted by the user" do
+ it 'is not accepted by the user' do
expect(subject).to be_truthy
end
- it "is accepted by the user" do
+ it 'is accepted by the user' do
accept_terms(user)
expect(subject).to be_falsey
end
- it "auto accepts the term for project bots" do
+ it 'auto accepts the term for project bots' do
expect(project_bot.required_terms_not_accepted?).to be_falsey
end
end
@@ -6165,4 +6180,14 @@ RSpec.describe User do
it_behaves_like 'groups_with_developer_maintainer_project_access examples'
end
end
+
+ describe '.get_ids_by_username' do
+ let(:user_name) { 'user_name' }
+ let!(:user) { create(:user, username: user_name) }
+ let(:user_id) { user.id }
+
+ it 'returns the id of each record matching username' do
+ expect(described_class.get_ids_by_username([user_name])).to match_array([user_id])
+ end
+ end
end
diff --git a/spec/models/users/credit_card_validation_spec.rb b/spec/models/users/credit_card_validation_spec.rb
index fb9f6e35038..d2b4f5ebd65 100644
--- a/spec/models/users/credit_card_validation_spec.rb
+++ b/spec/models/users/credit_card_validation_spec.rb
@@ -4,4 +4,22 @@ require 'spec_helper'
RSpec.describe Users::CreditCardValidation do
it { is_expected.to belong_to(:user) }
+
+ it { is_expected.to validate_length_of(:holder_name).is_at_most(26) }
+ it { is_expected.to validate_numericality_of(:last_digits).is_less_than_or_equal_to(9999) }
+
+ describe '.similar_records' do
+ let(:card_details) { subject.attributes.slice(:expiration_date, :last_digits, :holder_name) }
+
+ subject(:credit_card_validation) { create(:credit_card_validation) }
+
+ let!(:match1) { create(:credit_card_validation, card_details) }
+ let!(:other1) { create(:credit_card_validation, card_details.merge(last_digits: 9)) }
+ let!(:match2) { create(:credit_card_validation, card_details) }
+ let!(:other2) { create(:credit_card_validation, card_details.merge(holder_name: 'foo bar')) }
+
+ it 'returns records with matching credit card, ordered by credit_card_validated_at' do
+ expect(subject.similar_records).to eq([match2, match1, subject])
+ end
+ end
end
diff --git a/spec/policies/clusters/agent_policy_spec.rb b/spec/policies/clusters/agent_policy_spec.rb
new file mode 100644
index 00000000000..307d751b78b
--- /dev/null
+++ b/spec/policies/clusters/agent_policy_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::AgentPolicy do
+ let(:cluster_agent) { create(:cluster_agent, name: 'agent' )}
+ let(:user) { create(:admin) }
+ let(:policy) { described_class.new(user, cluster_agent) }
+ let(:project) { cluster_agent.project }
+
+ describe 'rules' do
+ context 'when developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { expect(policy).to be_disallowed :admin_cluster }
+ end
+
+ context 'when maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it { expect(policy).to be_allowed :admin_cluster }
+ end
+ end
+end
diff --git a/spec/policies/clusters/agent_token_policy_spec.rb b/spec/policies/clusters/agent_token_policy_spec.rb
new file mode 100644
index 00000000000..9ae99e66f59
--- /dev/null
+++ b/spec/policies/clusters/agent_token_policy_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::AgentTokenPolicy do
+ let_it_be(:token) { create(:cluster_agent_token) }
+
+ let(:user) { create(:user) }
+ let(:policy) { described_class.new(user, token) }
+ let(:project) { token.agent.project }
+
+ describe 'rules' do
+ context 'when developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { expect(policy).to be_disallowed :admin_cluster }
+ it { expect(policy).to be_disallowed :read_cluster }
+ end
+
+ context 'when maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it { expect(policy).to be_allowed :admin_cluster }
+ it { expect(policy).to be_allowed :read_cluster }
+ end
+ end
+end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 482e12c029d..201ccf0fc14 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -1005,7 +1005,7 @@ RSpec.describe GroupPolicy do
context 'with maintainer' do
let(:current_user) { maintainer }
- it { is_expected.to be_allowed(:update_runners_registration_token) }
+ it { is_expected.to be_disallowed(:update_runners_registration_token) }
end
context 'with reporter' do
diff --git a/spec/policies/namespaces/project_namespace_policy_spec.rb b/spec/policies/namespaces/project_namespace_policy_spec.rb
new file mode 100644
index 00000000000..22f3ccec1f8
--- /dev/null
+++ b/spec/policies/namespaces/project_namespace_policy_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NamespacePolicy do
+ let_it_be(:parent) { create(:namespace) }
+ let_it_be(:namespace) { create(:project_namespace, parent: parent) }
+
+ let(:permissions) do
+ [:owner_access, :create_projects, :admin_namespace, :read_namespace,
+ :read_statistics, :transfer_projects, :create_package_settings,
+ :read_package_settings, :create_jira_connect_subscription]
+ end
+
+ subject { described_class.new(current_user, namespace) }
+
+ context 'with no user' do
+ let_it_be(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(*permissions) }
+ end
+
+ context 'regular user' do
+ let_it_be(:current_user) { create(:user) }
+
+ it { is_expected.to be_disallowed(*permissions) }
+ end
+
+ context 'parent owner' do
+ let_it_be(:current_user) { parent.owner }
+
+ it { is_expected.to be_disallowed(*permissions) }
+ end
+
+ context 'admin' do
+ let_it_be(:current_user) { create(:admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(*permissions) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.to be_disallowed(*permissions) }
+ end
+ end
+end
diff --git a/spec/policies/namespace_policy_spec.rb b/spec/policies/namespaces/user_namespace_policy_spec.rb
index b9823273de8..02eda31bfa7 100644
--- a/spec/policies/namespace_policy_spec.rb
+++ b/spec/policies/namespaces/user_namespace_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe NamespacePolicy do
+RSpec.describe Namespaces::UserNamespacePolicy do
let(:user) { create(:user) }
let(:owner) { create(:user) }
let(:admin) { create(:admin) }
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index c8da8a54f16..49126ed8e5f 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -30,129 +30,6 @@ RSpec.describe Clusters::ClusterPresenter do
end
end
- describe '#item_link' do
- let(:clusterable_presenter) { double('ClusterablePresenter', subject: clusterable) }
-
- subject { presenter.item_link(clusterable_presenter) }
-
- context 'for a group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [group]) }
- let(:group) { create(:group, name: 'Foo') }
- let(:cluster_link) { "<a href=\"#{group_cluster_path(cluster.group, cluster)}\">#{cluster.name}</a>" }
-
- before do
- group.add_maintainer(user)
- end
-
- shared_examples 'ancestor clusters' do
- context 'ancestor clusters' do
- let(:root_group) { create(:group, name: 'Root Group') }
- let(:parent) { create(:group, name: 'parent', parent: root_group) }
- let(:child) { create(:group, name: 'child', parent: parent) }
- let(:group) { create(:group, name: 'group', parent: child) }
-
- before do
- root_group.add_maintainer(user)
- end
-
- context 'top level group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [root_group]) }
-
- it 'returns full group names and link for cluster' do
- expect(subject).to eq("Root Group / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Root Group').and_call_original
-
- expect(subject).to be_html_safe
- end
- end
-
- context 'first level group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [parent]) }
-
- it 'returns full group names and link for cluster' do
- expect(subject).to eq("Root Group / parent / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Root Group / parent').and_call_original
-
- expect(subject).to be_html_safe
- end
- end
-
- context 'second level group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [child]) }
-
- let(:ellipsis_h) do
- /.*ellipsis_h.*/
- end
-
- it 'returns clipped group names and link for cluster' do
- expect(subject).to match("Root Group / #{ellipsis_h} / child / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Root Group / parent / child').and_call_original
-
- expect(subject).to be_html_safe
- end
- end
- end
- end
-
- context 'for a project clusterable' do
- let(:clusterable) { project }
- let(:project) { create(:project, group: group) }
-
- it 'returns the group name and the link for cluster' do
- expect(subject).to eq("Foo / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Foo').and_call_original
-
- expect(subject).to be_html_safe
- end
-
- include_examples 'ancestor clusters'
- end
-
- context 'for the group clusterable for the cluster' do
- let(:clusterable) { group }
-
- it 'returns link for cluster' do
- expect(subject).to eq(cluster_link)
- end
-
- include_examples 'ancestor clusters'
-
- it 'is html safe' do
- expect(subject).to be_html_safe
- end
- end
- end
-
- context 'for a project cluster' do
- let(:cluster) { create(:cluster, :project) }
- let(:cluster_link) { "<a href=\"#{project_cluster_path(cluster.project, cluster)}\">#{cluster.name}</a>" }
-
- before do
- cluster.project.add_maintainer(user)
- end
-
- context 'for the project clusterable' do
- let(:clusterable) { cluster.project }
-
- it 'returns link for cluster' do
- expect(subject).to eq(cluster_link)
- end
- end
- end
- end
-
describe '#provider_label' do
let(:cluster) { create(:cluster, provider_type: provider_type) }
@@ -191,26 +68,6 @@ RSpec.describe Clusters::ClusterPresenter do
end
end
- describe '#cluster_type_description' do
- subject { described_class.new(cluster).cluster_type_description }
-
- context 'project_type cluster' do
- it { is_expected.to eq('Project cluster') }
- end
-
- context 'group_type cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :group) }
-
- it { is_expected.to eq('Group cluster') }
- end
-
- context 'instance_type cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
-
- it { is_expected.to eq('Instance cluster') }
- end
- end
-
describe '#show_path' do
subject { described_class.new(cluster).show_path }
diff --git a/spec/presenters/commit_status_presenter_spec.rb b/spec/presenters/commit_status_presenter_spec.rb
index 4b2441d656e..f0bf1b860e4 100644
--- a/spec/presenters/commit_status_presenter_spec.rb
+++ b/spec/presenters/commit_status_presenter_spec.rb
@@ -15,6 +15,25 @@ RSpec.describe CommitStatusPresenter do
expect(described_class.superclass).to eq(Gitlab::View::Presenter::Delegated)
end
+ describe '#callout_failure_message' do
+ subject { presenter.callout_failure_message }
+
+ context 'when troubleshooting doc is available' do
+ let(:failure_reason) { :environment_creation_failure }
+
+ before do
+ build.failure_reason = failure_reason
+ end
+
+ it 'appends the troubleshooting link' do
+ doc = described_class::TROUBLESHOOTING_DOC[failure_reason]
+
+ expect(subject).to eq("#{described_class.callout_failure_messages[failure_reason]} " \
+ "<a href=\"#{presenter.help_page_path(doc[:path], anchor: doc[:anchor])}\">How do I fix it?</a>")
+ end
+ end
+ end
+
describe 'covers all failure reasons' do
let(:message) { presenter.callout_failure_message }
diff --git a/spec/presenters/group_clusterable_presenter_spec.rb b/spec/presenters/group_clusterable_presenter_spec.rb
index 84b97ba0bb7..b2dff2e3546 100644
--- a/spec/presenters/group_clusterable_presenter_spec.rb
+++ b/spec/presenters/group_clusterable_presenter_spec.rb
@@ -84,4 +84,10 @@ RSpec.describe GroupClusterablePresenter do
it { is_expected.to eq(metrics_dashboard_group_cluster_path(group, cluster)) }
end
+
+ describe '#learn_more_link' do
+ subject { presenter.learn_more_link }
+
+ it { is_expected.to include('user/group/clusters/index') }
+ end
end
diff --git a/spec/presenters/instance_clusterable_presenter_spec.rb b/spec/presenters/instance_clusterable_presenter_spec.rb
index 6968e3a4da3..0ace57bbf4d 100644
--- a/spec/presenters/instance_clusterable_presenter_spec.rb
+++ b/spec/presenters/instance_clusterable_presenter_spec.rb
@@ -32,4 +32,10 @@ RSpec.describe InstanceClusterablePresenter do
it { is_expected.to eq(metrics_dashboard_admin_cluster_path(cluster)) }
end
+
+ describe '#learn_more_link' do
+ subject { presenter.learn_more_link }
+
+ it { is_expected.to include('user/instance/clusters/index') }
+ end
end
diff --git a/spec/presenters/project_clusterable_presenter_spec.rb b/spec/presenters/project_clusterable_presenter_spec.rb
index 9057b518647..90b6671edd3 100644
--- a/spec/presenters/project_clusterable_presenter_spec.rb
+++ b/spec/presenters/project_clusterable_presenter_spec.rb
@@ -84,4 +84,10 @@ RSpec.describe ProjectClusterablePresenter do
it { is_expected.to eq(metrics_dashboard_project_cluster_path(project, cluster)) }
end
+
+ describe '#learn_more_link' do
+ subject { presenter.learn_more_link }
+
+ it { is_expected.to include('user/project/clusters/index') }
+ end
end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 81620fce448..95eb503c6bc 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -100,39 +100,105 @@ RSpec.describe API::API do
end
end
- context 'application context' do
- let_it_be(:project) { create(:project) }
+ describe 'logging', :aggregate_failures do
+ let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { project.owner }
- it 'logs all application context fields' do
- allow_any_instance_of(Gitlab::GrapeLogging::Loggers::ContextLogger).to receive(:parameters) do
- Gitlab::ApplicationContext.current.tap do |log_context|
- expect(log_context).to match('correlation_id' => an_instance_of(String),
- 'meta.caller_id' => 'GET /api/:version/projects/:id/issues',
- 'meta.remote_ip' => an_instance_of(String),
- 'meta.project' => project.full_path,
- 'meta.root_namespace' => project.namespace.full_path,
- 'meta.user' => user.username,
- 'meta.client_id' => an_instance_of(String),
- 'meta.feature_category' => 'issue_tracking')
+ context 'when the endpoint is handled by the application' do
+ context 'when the endpoint supports all possible fields' do
+ it 'logs all application context fields and the route' do
+ expect(described_class::LOG_FORMATTER).to receive(:call) do |_severity, _datetime, _, data|
+ expect(data.stringify_keys)
+ .to include('correlation_id' => an_instance_of(String),
+ 'meta.caller_id' => 'GET /api/:version/projects/:id/issues',
+ 'meta.remote_ip' => an_instance_of(String),
+ 'meta.project' => project.full_path,
+ 'meta.root_namespace' => project.namespace.full_path,
+ 'meta.user' => user.username,
+ 'meta.client_id' => a_string_matching(%r{\Auser/.+}),
+ 'meta.feature_category' => 'issue_tracking',
+ 'route' => '/api/:version/projects/:id/issues')
+ end
+
+ get(api("/projects/#{project.id}/issues", user))
+
+ expect(response).to have_gitlab_http_status(:ok)
end
end
- get(api("/projects/#{project.id}/issues", user))
+ it 'skips context fields that do not apply' do
+ expect(described_class::LOG_FORMATTER).to receive(:call) do |_severity, _datetime, _, data|
+ expect(data.stringify_keys)
+ .to include('correlation_id' => an_instance_of(String),
+ 'meta.caller_id' => 'GET /api/:version/broadcast_messages',
+ 'meta.remote_ip' => an_instance_of(String),
+ 'meta.client_id' => a_string_matching(%r{\Aip/.+}),
+ 'meta.feature_category' => 'navigation',
+ 'route' => '/api/:version/broadcast_messages')
+
+ expect(data.stringify_keys).not_to include('meta.project', 'meta.root_namespace', 'meta.user')
+ end
+
+ get(api('/broadcast_messages'))
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when there is an unsupported media type' do
+ it 'logs the route and context metadata for the client' do
+ expect(described_class::LOG_FORMATTER).to receive(:call) do |_severity, _datetime, _, data|
+ expect(data.stringify_keys)
+ .to include('correlation_id' => an_instance_of(String),
+ 'meta.remote_ip' => an_instance_of(String),
+ 'meta.client_id' => a_string_matching(%r{\Aip/.+}),
+ 'route' => '/api/:version/users/:id')
+
+ expect(data.stringify_keys).not_to include('meta.caller_id', 'meta.feature_category', 'meta.user')
+ end
+
+ put(api("/users/#{user.id}", user), params: { 'name' => 'Test' }, headers: { 'Content-Type' => 'image/png' })
+
+ expect(response).to have_gitlab_http_status(:unsupported_media_type)
+ end
end
- it 'skips fields that do not apply' do
- allow_any_instance_of(Gitlab::GrapeLogging::Loggers::ContextLogger).to receive(:parameters) do
- Gitlab::ApplicationContext.current.tap do |log_context|
- expect(log_context).to match('correlation_id' => an_instance_of(String),
- 'meta.caller_id' => 'GET /api/:version/users',
- 'meta.remote_ip' => an_instance_of(String),
- 'meta.client_id' => an_instance_of(String),
- 'meta.feature_category' => 'users')
+ context 'when there is an OPTIONS request' do
+ it 'logs the route and context metadata for the client' do
+ expect(described_class::LOG_FORMATTER).to receive(:call) do |_severity, _datetime, _, data|
+ expect(data.stringify_keys)
+ .to include('correlation_id' => an_instance_of(String),
+ 'meta.remote_ip' => an_instance_of(String),
+ 'meta.client_id' => a_string_matching(%r{\Auser/.+}),
+ 'meta.user' => user.username,
+ 'meta.feature_category' => 'users',
+ 'route' => '/api/:version/users')
+
+ expect(data.stringify_keys).not_to include('meta.caller_id')
end
+
+ options(api('/users', user))
+
+ expect(response).to have_gitlab_http_status(:no_content)
end
+ end
- get(api('/users'))
+ context 'when the API version is not matched' do
+ it 'logs the route and context metadata for the client' do
+ expect(described_class::LOG_FORMATTER).to receive(:call) do |_severity, _datetime, _, data|
+ expect(data.stringify_keys)
+ .to include('correlation_id' => an_instance_of(String),
+ 'meta.remote_ip' => an_instance_of(String),
+ 'meta.client_id' => a_string_matching(%r{\Aip/.+}),
+ 'route' => '/api/:version/*path')
+
+ expect(data.stringify_keys).not_to include('meta.caller_id', 'meta.user')
+ end
+
+ get('/api/v4_or_is_it')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index 1a28687c830..1602819a02e 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -21,6 +21,15 @@ RSpec.describe API::BulkImports do
end
describe 'POST /bulk_imports' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance)
+ .to receive(:instance_version)
+ .and_return(
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION, ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT))
+ end
+ end
+
it 'starts a new migration' do
post api('/bulk_imports', user), params: {
configuration: {
diff --git a/spec/requests/api/ci/resource_groups_spec.rb b/spec/requests/api/ci/resource_groups_spec.rb
new file mode 100644
index 00000000000..f5b68557a0d
--- /dev/null
+++ b/spec/requests/api/ci/resource_groups_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::ResourceGroups do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+
+ let(:user) { developer }
+
+ describe 'GET /projects/:id/resource_groups/:key' do
+ subject { get api("/projects/#{project.id}/resource_groups/#{key}", user) }
+
+ let!(:resource_group) { create(:ci_resource_group, project: project) }
+ let(:key) { resource_group.key }
+
+ it 'returns a resource group', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(resource_group.id)
+ expect(json_response['key']).to eq(resource_group.key)
+ expect(json_response['process_mode']).to eq(resource_group.process_mode)
+ expect(Time.parse(json_response['created_at'])).to be_like_time(resource_group.created_at)
+ expect(Time.parse(json_response['updated_at'])).to be_like_time(resource_group.updated_at)
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when there is no corresponding resource group' do
+ let(:key) { 'unknown' }
+
+ it 'returns not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'PUT /projects/:id/resource_groups/:key' do
+ subject { put api("/projects/#{project.id}/resource_groups/#{key}", user), params: params }
+
+ let!(:resource_group) { create(:ci_resource_group, project: project) }
+ let(:key) { resource_group.key }
+ let(:params) { { process_mode: :oldest_first } }
+
+ it 'changes the process mode of a resource group' do
+ expect { subject }
+ .to change { resource_group.reload.process_mode }.from('unordered').to('oldest_first')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['process_mode']).to eq('oldest_first')
+ end
+
+ context 'with invalid parameter' do
+ let(:params) { { process_mode: :unknown } }
+
+ it 'returns bad request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when there is no corresponding resource group' do
+ let(:key) { 'unknown' }
+
+ it 'returns not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index adac81ff6f4..c3fbef9be48 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -816,7 +816,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
subject { request_job(id: job.id) }
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { user: user.username, project: project.full_path, client_id: "user/#{user.id}" } }
end
@@ -827,7 +827,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'when the runner is of project type' do
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { project: project.full_path, client_id: "runner/#{runner.id}" } }
end
@@ -841,7 +841,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let(:group) { create(:group) }
let(:runner) { create(:ci_runner, :group, groups: [group]) }
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { root_namespace: group.full_path_components.first, client_id: "runner/#{runner.id}" } }
end
diff --git a/spec/requests/api/ci/runner/runners_delete_spec.rb b/spec/requests/api/ci/runner/runners_delete_spec.rb
index 6c6c465f161..9d1bae7cce8 100644
--- a/spec/requests/api/ci/runner/runners_delete_spec.rb
+++ b/spec/requests/api/ci/runner/runners_delete_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let(:params) { { token: runner.token } }
end
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { client_id: "runner/#{runner.id}" } }
end
end
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 17b988a60c5..b3a7d591c93 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(runner).to be_instance_type
end
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
subject { request }
let(:expected_params) { { client_id: "runner/#{::Ci::Runner.first.id}" } }
@@ -84,7 +84,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(runner).to be_project_type
end
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
subject { request }
let(:expected_params) { { project: project.full_path, client_id: "runner/#{::Ci::Runner.first.id}" } }
@@ -190,7 +190,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(runner).to be_group_type
end
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
subject { request }
let(:expected_params) { { root_namespace: group.full_path_components.first, client_id: "runner/#{::Ci::Runner.first.id}" } }
diff --git a/spec/requests/api/ci/runner/runners_verify_post_spec.rb b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
index c2e97446738..4680076acae 100644
--- a/spec/requests/api/ci/runner/runners_verify_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:ok)
end
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { client_id: "runner/#{runner.id}" } }
end
end
diff --git a/spec/requests/api/ci/runners_reset_registration_token_spec.rb b/spec/requests/api/ci/runners_reset_registration_token_spec.rb
index 7623d3f1b17..df64c0bd22b 100644
--- a/spec/requests/api/ci/runners_reset_registration_token_spec.rb
+++ b/spec/requests/api/ci/runners_reset_registration_token_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe API::Ci::Runners do
end
include_context 'when authorized', 'group' do
- let_it_be(:user) { create_default(:group_member, :maintainer, user: create(:user), group: group ).user }
+ let_it_be(:user) { create_default(:group_member, :owner, user: create(:user), group: group ).user }
def get_token
group.reload.runners_token
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 902938d7d02..6879dfc9572 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -291,6 +291,16 @@ RSpec.describe API::Ci::Runners do
end
end
+ context 'when the runner is a group runner' do
+ it "returns the runner's details" do
+ get api("/runners/#{group_runner_a.id}", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['description']).to eq(group_runner_a.description)
+ expect(json_response['groups'].first['id']).to eq(group.id)
+ end
+ end
+
context "runner project's administrative user" do
context 'when runner is not shared' do
it "returns runner's details" do
@@ -600,6 +610,94 @@ RSpec.describe API::Ci::Runners do
end
end
+ describe 'POST /runners/:id/reset_authentication_token' do
+ context 'admin user' do
+ it 'resets shared runner authentication token' do
+ expect do
+ post api("/runners/#{shared_runner.id}/reset_authentication_token", admin)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => shared_runner.reload.token })
+ end.to change { shared_runner.reload.token }
+ end
+
+ it 'returns 404 if runner does not exist' do
+ post api('/runners/0/reset_authentication_token', admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'authorized user' do
+ it 'does not reset project runner authentication token without access to it' do
+ expect do
+ post api("/runners/#{project_runner.id}/reset_authentication_token", user2)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { project_runner.reload.token }
+ end
+
+ it 'resets project runner authentication token for owned project' do
+ expect do
+ post api("/runners/#{project_runner.id}/reset_authentication_token", user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => project_runner.reload.token })
+ end.to change { project_runner.reload.token }
+ end
+
+ it 'does not reset group runner authentication token with guest access' do
+ expect do
+ post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_guest)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { group_runner_a.reload.token }
+ end
+
+ it 'does not reset group runner authentication token with reporter access' do
+ expect do
+ post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_reporter)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { group_runner_a.reload.token }
+ end
+
+ it 'does not reset group runner authentication token with developer access' do
+ expect do
+ post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { group_runner_a.reload.token }
+ end
+
+ it 'does not reset group runner authentication token with maintainer access' do
+ expect do
+ post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_maintainer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { group_runner_a.reload.token }
+ end
+
+ it 'resets group runner authentication token with owner access' do
+ expect do
+ post api("/runners/#{group_runner_a.id}/reset_authentication_token", user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => group_runner_a.reload.token })
+ end.to change { group_runner_a.reload.token }
+ end
+ end
+
+ context 'unauthorized user' do
+ it 'does not reset authentication token' do
+ expect do
+ post api("/runners/#{shared_runner.id}/reset_authentication_token")
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end.not_to change { shared_runner.reload.token }
+ end
+ end
+ end
+
describe 'GET /runners/:id/jobs' do
let_it_be(:job_1) { create(:ci_build) }
let_it_be(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
diff --git a/spec/requests/api/ci/triggers_spec.rb b/spec/requests/api/ci/triggers_spec.rb
index 410e2ae405e..d270a16d28d 100644
--- a/spec/requests/api/ci/triggers_spec.rb
+++ b/spec/requests/api/ci/triggers_spec.rb
@@ -131,7 +131,7 @@ RSpec.describe API::Ci::Triggers do
let(:subject_proc) { proc { post api("/projects/#{project.id}/ref/master/trigger/pipeline?token=#{trigger_token}"), params: { ref: 'refs/heads/other-branch' } } }
context 'when triggering a pipeline from a trigger token' do
- it_behaves_like 'storing arguments in the application context'
+ it_behaves_like 'storing arguments in the application context for the API'
it_behaves_like 'not executing any extra queries for the application context'
end
@@ -142,7 +142,7 @@ RSpec.describe API::Ci::Triggers do
context 'when other job is triggered by a user' do
let(:trigger_token) { create(:ci_build, :running, project: project, user: user).token }
- it_behaves_like 'storing arguments in the application context'
+ it_behaves_like 'storing arguments in the application context for the API'
it_behaves_like 'not executing any extra queries for the application context'
end
@@ -151,7 +151,7 @@ RSpec.describe API::Ci::Triggers do
let(:runner) { create(:ci_runner) }
let(:expected_params) { { client_id: "runner/#{runner.id}", project: project.full_path } }
- it_behaves_like 'storing arguments in the application context'
+ it_behaves_like 'storing arguments in the application context for the API'
it_behaves_like 'not executing any extra queries for the application context', 1
end
end
diff --git a/spec/requests/api/container_repositories_spec.rb b/spec/requests/api/container_repositories_spec.rb
index 8d7494ffce1..9809702467d 100644
--- a/spec/requests/api/container_repositories_spec.rb
+++ b/spec/requests/api/container_repositories_spec.rb
@@ -48,6 +48,19 @@ RSpec.describe API::ContainerRepositories do
expect(response).to match_response_schema('registry/repository')
end
+ context 'with a network error' do
+ before do
+ stub_container_registry_network_error(client_method: :repository_tags)
+ end
+
+ it 'returns a matching schema' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('registry/repository')
+ end
+ end
+
context 'with tags param' do
let(:url) { "/registry/repositories/#{repository.id}?tags=true" }
@@ -61,6 +74,19 @@ RSpec.describe API::ContainerRepositories do
expect(json_response['id']).to eq(repository.id)
expect(response.body).to include('tags')
end
+
+ context 'with a network error' do
+ before do
+ stub_container_registry_network_error(client_method: :repository_tags)
+ end
+
+ it 'returns a connection error message' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ expect(json_response['message']).to include('We are having trouble connecting to the Container Registry')
+ end
+ end
end
context 'with tags_count param' do
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 38c96cd37af..69f7b54c277 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -376,6 +376,16 @@ RSpec.describe API::Deployments do
expect(json_response['status']).to eq('success')
end
+ it 'returns an error when an invalid status transition is detected' do
+ put(
+ api("/projects/#{project.id}/deployments/#{deploy.id}", user),
+ params: { status: 'running' }
+ )
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']['status']).to include(%Q{cannot transition via \"run\"})
+ end
+
it 'links merge requests when the deployment status changes to success', :sidekiq_inline do
mr = create(
:merge_request,
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index bc7bb7523c9..5fb24dc91a4 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe API::Environments do
get api("/projects/#{project.id}/environments", user)
expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/environments')
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -167,6 +168,7 @@ RSpec.describe API::Environments do
post api("/projects/#{project.id}/environments", user), params: { name: "mepmep" }
expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/environment')
expect(json_response['name']).to eq('mepmep')
expect(json_response['slug']).to eq('mepmep')
expect(json_response['external']).to be nil
@@ -212,6 +214,7 @@ RSpec.describe API::Environments do
params: { name: 'Mepmep', external_url: url }
expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/environment')
expect(json_response['name']).to eq('Mepmep')
expect(json_response['external_url']).to eq(url)
end
@@ -250,7 +253,7 @@ RSpec.describe API::Environments do
expect(response).to have_gitlab_http_status(:forbidden)
end
- it 'returns a 200 for stopped environment' do
+ it 'returns a 204 for stopped environment' do
environment.stop
delete api("/projects/#{project.id}/environments/#{environment.id}", user)
@@ -294,6 +297,7 @@ RSpec.describe API::Environments do
it 'returns a 200' do
expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/environment')
end
it 'actually stops the environment' do
@@ -327,6 +331,7 @@ RSpec.describe API::Environments do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/environment')
+ expect(json_response['last_deployment']).to be_present
end
end
diff --git a/spec/requests/api/error_tracking_client_keys_spec.rb b/spec/requests/api/error_tracking/client_keys_spec.rb
index 886ec5ade3d..00c1e8799e6 100644
--- a/spec/requests/api/error_tracking_client_keys_spec.rb
+++ b/spec/requests/api/error_tracking/client_keys_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::ErrorTrackingClientKeys do
+RSpec.describe API::ErrorTracking::ClientKeys do
let_it_be(:guest) { create(:user) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:setting) { create(:project_error_tracking_setting) }
diff --git a/spec/requests/api/error_tracking_collector_spec.rb b/spec/requests/api/error_tracking/collector_spec.rb
index 35d3ea01f87..7acadeb1287 100644
--- a/spec/requests/api/error_tracking_collector_spec.rb
+++ b/spec/requests/api/error_tracking/collector_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::ErrorTrackingCollector do
+RSpec.describe API::ErrorTracking::Collector do
let_it_be(:project) { create(:project, :private) }
let_it_be(:setting) { create(:project_error_tracking_setting, :integrated, project: project) }
let_it_be(:client_key) { create(:error_tracking_client_key, project: project) }
diff --git a/spec/requests/api/error_tracking_spec.rb b/spec/requests/api/error_tracking/project_settings_spec.rb
index ec9a3378acc..161e4f01ea5 100644
--- a/spec/requests/api/error_tracking_spec.rb
+++ b/spec/requests/api/error_tracking/project_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::ErrorTracking do
+RSpec.describe API::ErrorTracking::ProjectSettings do
let_it_be(:user) { create(:user) }
let(:setting) { create(:project_error_tracking_setting) }
diff --git a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
index 008241b8055..241c658441b 100644
--- a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'get board lists' do
nodes {
lists {
nodes {
- issues(filters: {labelName: "#{label2.title}"}) {
+ issues(filters: {labelName: "#{label2.title}"}, first: 3) {
count
nodes {
#{all_graphql_fields_for('issues'.classify)}
@@ -44,6 +44,10 @@ RSpec.describe 'get board lists' do
)
end
+ def issue_id
+ issues_data.map { |i| i['id'] }
+ end
+
def issue_titles
issues_data.map { |i| i['title'] }
end
@@ -60,6 +64,7 @@ RSpec.describe 'get board lists' do
let!(:issue3) { create(:issue, project: issue_project, labels: [label, label2], relative_position: nil) }
let!(:issue4) { create(:issue, project: issue_project, labels: [label], relative_position: 9) }
let!(:issue5) { create(:issue, project: issue_project, labels: [label2], relative_position: 432) }
+ let!(:issue6) { create(:issue, project: issue_project, labels: [label, label2], relative_position: nil) }
context 'when the user does not have access to the board' do
it 'returns nil' do
@@ -72,14 +77,19 @@ RSpec.describe 'get board lists' do
context 'when user can read the board' do
before do
board_parent.add_reporter(user)
+ post_graphql(query("id: \"#{global_id_of(label_list)}\""), current_user: user)
end
it 'can access the issues', :aggregate_failures do
- post_graphql(query("id: \"#{global_id_of(label_list)}\""), current_user: user)
-
+ # ties for relative positions are broken by id in ascending order by default
expect(issue_titles).to eq([issue2.title, issue1.title, issue3.title])
expect(issue_relative_positions).not_to include(nil)
end
+
+ it 'does not set the relative positions of the issues not being returned', :aggregate_failures do
+ expect(issue_id).not_to include(issue6.id)
+ expect(issue3.relative_position).to be_nil
+ end
end
end
diff --git a/spec/requests/api/graphql/boards/board_list_query_spec.rb b/spec/requests/api/graphql/boards/board_list_query_spec.rb
new file mode 100644
index 00000000000..dec7ca715f2
--- /dev/null
+++ b/spec/requests/api/graphql/boards/board_list_query_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Querying a Board list' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:board) { create(:board, resource_parent: project) }
+ let_it_be(:label) { create(:label, project: project, name: 'foo') }
+ let_it_be(:list) { create(:list, board: board, label: label) }
+ let_it_be(:issue1) { create(:issue, project: project, labels: [label]) }
+ let_it_be(:issue2) { create(:issue, project: project, labels: [label], assignees: [current_user]) }
+
+ let(:filters) { {} }
+ let(:query) do
+ graphql_query_for(
+ :board_list,
+ { id: list.to_global_id.to_s, issueFilters: filters },
+ %w[title issuesCount]
+ )
+ end
+
+ subject { graphql_data['boardList'] }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ context 'when the user has access to the list' do
+ before_all do
+ project.add_guest(current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to include({ 'issuesCount' => 2, 'title' => list.title }) }
+
+ context 'with matching issue filters' do
+ let(:filters) { { assigneeUsername: current_user.username } }
+
+ it 'filters issues metadata' do
+ is_expected.to include({ 'issuesCount' => 1, 'title' => list.title })
+ end
+ end
+
+ context 'with unmatching issue filters' do
+ let(:filters) { { assigneeUsername: 'foo' } }
+
+ it 'filters issues metadata' do
+ is_expected.to include({ 'issuesCount' => 0, 'title' => list.title })
+ end
+ end
+ end
+
+ context 'when the user does not have access to the list' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when ID argument is missing' do
+ let(:query) do
+ graphql_query_for('boardList', {}, 'title')
+ end
+
+ it 'raises an exception' do
+ expect(graphql_errors).to include(a_hash_including('message' => "Field 'boardList' is missing required arguments: id"))
+ end
+ end
+
+ context 'when list ID is not found' do
+ let(:query) do
+ graphql_query_for('boardList', { id: "gid://gitlab/List/#{non_existing_record_id}" }, 'title')
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ it 'does not have an N+1 performance issue' do
+ a, b = create_list(:list, 2, board: board)
+ ctx = { current_user: current_user }
+ project.add_guest(current_user)
+
+ baseline = graphql_query_for(:board_list, { id: global_id_of(a) }, 'title')
+ query = <<~GQL
+ query {
+ a: #{query_graphql_field(:board_list, { id: global_id_of(a) }, 'title')}
+ b: #{query_graphql_field(:board_list, { id: global_id_of(b) }, 'title')}
+ }
+ GQL
+
+ control = ActiveRecord::QueryRecorder.new do
+ run_with_clean_state(baseline, context: ctx)
+ end
+
+ expect { run_with_clean_state(query, context: ctx) }.not_to exceed_query_limit(control)
+ end
+end
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
index 2d52cddcacc..ace8c59e82d 100644
--- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -92,9 +92,9 @@ RSpec.describe 'get board lists' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { }
- let(:first_param) { 2 }
- let(:expected_results) { lists.map { |list| global_id_of(list) } }
+ let(:sort_param) { }
+ let(:first_param) { 2 }
+ let(:all_records) { lists.map { |list| global_id_of(list) } }
end
end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 74547196445..ab53ff654e9 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Query.runner(id)' do
include GraphqlHelpers
let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:group) { create(:group) }
let_it_be(:active_instance_runner) do
create(:ci_runner, :instance, description: 'Runner 1', contacted_at: 2.hours.ago,
@@ -18,12 +19,20 @@ RSpec.describe 'Query.runner(id)' do
version: 'adfe157', revision: 'b', ip_address: '10.10.10.10', access_level: 1, run_untagged: true)
end
+ let_it_be(:active_group_runner) do
+ create(:ci_runner, :group, groups: [group], description: 'Group runner 1', contacted_at: 2.hours.ago,
+ active: true, version: 'adfe156', revision: 'a', locked: true, ip_address: '127.0.0.1', maximum_timeout: 600,
+ access_level: 0, tag_list: %w[tag1 tag2], run_untagged: true)
+ end
+
def get_runner(id)
case id
when :active_instance_runner
active_instance_runner
when :inactive_instance_runner
inactive_instance_runner
+ when :active_group_runner
+ active_group_runner
end
end
@@ -61,7 +70,39 @@ RSpec.describe 'Query.runner(id)' do
'ipAddress' => runner.ip_address,
'runnerType' => runner.instance_type? ? 'INSTANCE_TYPE' : 'PROJECT_TYPE',
'jobCount' => 0,
- 'projectCount' => nil
+ 'projectCount' => nil,
+ 'adminUrl' => "http://localhost/admin/runners/#{runner.id}",
+ 'userPermissions' => {
+ 'readRunner' => true,
+ 'updateRunner' => true,
+ 'deleteRunner' => true
+ }
+ )
+ expect(runner_data['tagList']).to match_array runner.tag_list
+ end
+ end
+
+ shared_examples 'retrieval with no admin url' do |runner_id|
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, all_graphql_fields_for('CiRunner')))
+ end
+
+ let(:query_path) do
+ [
+ [:runner, { id: get_runner(runner_id).to_global_id.to_s }]
+ ]
+ end
+
+ it 'retrieves expected fields' do
+ post_graphql(query, current_user: user)
+
+ runner_data = graphql_data_at(:runner)
+ expect(runner_data).not_to be_nil
+
+ runner = get_runner(runner_id)
+ expect(runner_data).to match a_hash_including(
+ 'id' => "gid://gitlab/Ci::Runner/#{runner.id}",
+ 'adminUrl' => nil
)
expect(runner_data['tagList']).to match_array runner.tag_list
end
@@ -147,6 +188,39 @@ RSpec.describe 'Query.runner(id)' do
it_behaves_like 'runner details fetch', :inactive_instance_runner
end
+ describe 'for runner inside group request' do
+ let(:query) do
+ %(
+ query {
+ group(fullPath: "#{group.full_path}") {
+ runners {
+ edges {
+ webUrl
+ node {
+ id
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ it 'retrieves webUrl field with expected value' do
+ post_graphql(query, current_user: user)
+
+ runner_data = graphql_data_at(:group, :runners, :edges)
+ expect(runner_data).to match_array [
+ a_hash_including(
+ 'webUrl' => "http://localhost/groups/#{group.full_path}/-/runners/#{active_group_runner.id}",
+ 'node' => {
+ 'id' => "gid://gitlab/Ci::Runner/#{active_group_runner.id}"
+ }
+ )
+ ]
+ end
+ end
+
describe 'for multiple runners' do
let_it_be(:project1) { create(:project, :test_repo) }
let_it_be(:project2) { create(:project, :test_repo) }
@@ -176,7 +250,7 @@ RSpec.describe 'Query.runner(id)' do
end
before do
- project_runner2.projects.clear
+ project_runner2.runner_projects.clear
post_graphql(query, current_user: user)
end
@@ -205,6 +279,16 @@ RSpec.describe 'Query.runner(id)' do
it_behaves_like 'retrieval by unauthorized user', :active_instance_runner
end
+ describe 'by non-admin user' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_user(user, Gitlab::Access::OWNER)
+ end
+
+ it_behaves_like 'retrieval with no admin url', :active_group_runner
+ end
+
describe 'by unauthenticated user' do
let(:user) { nil }
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index 778fe5b129e..51a07e60e15 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -95,9 +95,9 @@ RSpec.describe 'Query.runners' do
let(:ordered_runners) { runners.sort_by(&:contacted_at) }
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :CONTACTED_ASC }
- let(:first_param) { 2 }
- let(:expected_results) { ordered_runners.map(&:id) }
+ let(:sort_param) { :CONTACTED_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { ordered_runners.map(&:id) }
end
end
@@ -105,9 +105,9 @@ RSpec.describe 'Query.runners' do
let(:ordered_runners) { runners.sort_by(&:created_at).reverse }
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :CREATED_DESC }
- let(:first_param) { 2 }
- let(:expected_results) { ordered_runners.map(&:id) }
+ let(:sort_param) { :CREATED_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { ordered_runners.map(&:id) }
end
end
end
diff --git a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
index 356e1e11def..d93afcc0f33 100644
--- a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
+++ b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
@@ -153,4 +153,6 @@ RSpec.describe 'container repository details' do
end
end
end
+
+ it_behaves_like 'handling graphql network errors with the container registry'
end
diff --git a/spec/requests/api/graphql/group/container_repositories_spec.rb b/spec/requests/api/graphql/group/container_repositories_spec.rb
index 939d7791d92..be0b866af4a 100644
--- a/spec/requests/api/graphql/group/container_repositories_spec.rb
+++ b/spec/requests/api/graphql/group/container_repositories_spec.rb
@@ -14,11 +14,12 @@ RSpec.describe 'getting container repositories in a group' do
let_it_be(:container_repositories) { [container_repository, container_repositories_delete_scheduled, container_repositories_delete_failed].flatten }
let_it_be(:container_expiration_policy) { project.container_expiration_policy }
+ let(:excluded_fields) { [] }
let(:container_repositories_fields) do
<<~GQL
edges {
node {
- #{all_graphql_fields_for('container_repositories'.classify, max_depth: 1)}
+ #{all_graphql_fields_for('container_repositories'.classify, max_depth: 1, excluded: excluded_fields)}
}
}
GQL
@@ -152,6 +153,12 @@ RSpec.describe 'getting container repositories in a group' do
end
end
+ it_behaves_like 'handling graphql network errors with the container registry'
+
+ it_behaves_like 'not hitting graphql network errors with the container registry' do
+ let(:excluded_fields) { %w[tags tagsCount] }
+ end
+
it 'returns the total count of container repositories' do
subject
diff --git a/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
index c5c6d85d1e6..de3dbc5c324 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
@@ -33,46 +33,59 @@ RSpec.describe 'getting dependency proxy settings for a group' do
before do
stub_config(dependency_proxy: { enabled: true })
- group.create_dependency_proxy_setting!(enabled: true)
end
subject { post_graphql(query, current_user: user, variables: variables) }
- it_behaves_like 'a working graphql query' do
- before do
- subject
- end
- end
-
- context 'with different permissions' do
- where(:group_visibility, :role, :access_granted) do
- :private | :maintainer | true
- :private | :developer | true
- :private | :reporter | true
- :private | :guest | true
- :private | :anonymous | false
- :public | :maintainer | true
- :public | :developer | true
- :public | :reporter | true
- :public | :guest | true
- :public | :anonymous | false
+ shared_examples 'dependency proxy group setting query' do
+ it_behaves_like 'a working graphql query' do
+ before do
+ subject
+ end
end
- with_them do
- before do
- group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
- group.add_user(user, role) unless role == :anonymous
+ context 'with different permissions' do
+ where(:group_visibility, :role, :access_granted) do
+ :private | :maintainer | true
+ :private | :developer | true
+ :private | :reporter | true
+ :private | :guest | true
+ :private | :anonymous | false
+ :public | :maintainer | true
+ :public | :developer | true
+ :public | :reporter | true
+ :public | :guest | true
+ :public | :anonymous | false
end
- it 'return the proper response' do
- subject
+ with_them do
+ before do
+ group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
+ group.add_user(user, role) unless role == :anonymous
+ end
+
+ it 'return the proper response' do
+ subject
- if access_granted
- expect(dependency_proxy_group_setting_response).to eq('enabled' => true)
- else
- expect(dependency_proxy_group_setting_response).to be_blank
+ if access_granted
+ expect(dependency_proxy_group_setting_response).to eq('enabled' => true)
+ else
+ expect(dependency_proxy_group_setting_response).to be_blank
+ end
end
end
end
end
+
+ context 'with the settings model created' do
+ before do
+ group.create_dependency_proxy_setting!(enabled: true)
+ end
+
+ it_behaves_like 'dependency proxy group setting query'
+ end
+
+ context 'without the settings model created' do
+ it_behaves_like 'dependency proxy group setting query'
+ end
end
diff --git a/spec/requests/api/graphql/group/issues_spec.rb b/spec/requests/api/graphql/group/issues_spec.rb
new file mode 100644
index 00000000000..332bf242e9c
--- /dev/null
+++ b/spec/requests/api/graphql/group/issues_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting an issue list for a group' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group1) { create(:group) }
+ let_it_be(:group2) { create(:group) }
+ let_it_be(:project1) { create(:project, :public, group: group1) }
+ let_it_be(:project2) { create(:project, :private, group: group1) }
+ let_it_be(:project3) { create(:project, :public, group: group2) }
+ let_it_be(:issue1) { create(:issue, project: project1) }
+ let_it_be(:issue2) { create(:issue, project: project2) }
+ let_it_be(:issue3) { create(:issue, project: project3) }
+
+ let(:issue1_gid) { issue1.to_global_id.to_s }
+ let(:issue2_gid) { issue2.to_global_id.to_s }
+ let(:issues_data) { graphql_data['group']['issues']['edges'] }
+ let(:issue_filter_params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ edges {
+ node {
+ #{all_graphql_fields_for('issues'.classify)}
+ }
+ }
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'group',
+ { 'fullPath' => group1.full_path },
+ query_graphql_field('issues', issue_filter_params, fields)
+ )
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ context 'when there is a confidential issue' do
+ let_it_be(:confidential_issue1) { create(:issue, :confidential, project: project1) }
+ let_it_be(:confidential_issue2) { create(:issue, :confidential, project: project2) }
+ let_it_be(:confidential_issue3) { create(:issue, :confidential, project: project3) }
+
+ let(:confidential_issue1_gid) { confidential_issue1.to_global_id.to_s }
+ let(:confidential_issue2_gid) { confidential_issue2.to_global_id.to_s }
+
+ context 'when the user cannot see confidential issues' do
+ before do
+ group1.add_guest(current_user)
+ end
+
+ it 'returns issues without confidential issues for the group' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue1_gid, issue2_gid)
+ end
+
+ context 'filtering for confidential issues' do
+ let(:issue_filter_params) { { confidential: true } }
+
+ it 'returns no issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to be_empty
+ end
+ end
+
+ context 'filtering for non-confidential issues' do
+ let(:issue_filter_params) { { confidential: false } }
+
+ it 'returns correctly filtered issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue1_gid, issue2_gid)
+ end
+ end
+ end
+
+ context 'when the user can see confidential issues' do
+ before do
+ group1.add_developer(current_user)
+ end
+
+ it 'returns issues with confidential issues for the group' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue1_gid, issue2_gid, confidential_issue1_gid, confidential_issue2_gid)
+ end
+
+ context 'filtering for confidential issues' do
+ let(:issue_filter_params) { { confidential: true } }
+
+ it 'returns correctly filtered issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(confidential_issue1_gid, confidential_issue2_gid)
+ end
+ end
+
+ context 'filtering for non-confidential issues' do
+ let(:issue_filter_params) { { confidential: false } }
+
+ it 'returns correctly filtered issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue1_gid, issue2_gid)
+ end
+ end
+ end
+ end
+
+ def issues_ids
+ graphql_dig_at(issues_data, :node, :id)
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb b/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb
index 07b05ead651..0fd8fdc3f59 100644
--- a/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe 'RunnersRegistrationTokenReset' do
end
include_context 'when authorized', 'group' do
- let_it_be(:user) { create_default(:group_member, :maintainer, user: create(:user), group: group ).user }
+ let_it_be(:user) { create_default(:group_member, :owner, user: create(:user), group: group ).user }
def get_token
group.reload.runners_token
diff --git a/spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb b/spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb
new file mode 100644
index 00000000000..aac8eb22771
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create a new cluster agent token' do
+ include GraphqlHelpers
+
+ let_it_be(:cluster_agent) { create(:cluster_agent) }
+ let_it_be(:current_user) { create(:user) }
+
+ let(:description) { 'create token' }
+ let(:name) { 'token name' }
+ let(:mutation) do
+ graphql_mutation(
+ :cluster_agent_token_create,
+ { cluster_agent_id: cluster_agent.to_global_id.to_s, description: description, name: name }
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:cluster_agent_token_create)
+ end
+
+ context 'without user permissions' do
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ["The resource that you are attempting to access does not exist "\
+ "or you don't have permission to perform this action"]
+
+ it 'does not create a token' do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change(Clusters::AgentToken, :count)
+ end
+ end
+
+ context 'with project permissions' do
+ before do
+ cluster_agent.project.add_maintainer(current_user)
+ end
+
+ it 'creates a new token', :aggregate_failures do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }.to change { Clusters::AgentToken.count }.by(1)
+ expect(mutation_response['errors']).to eq([])
+ end
+
+ it 'returns token information', :aggregate_failures do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['secret']).not_to be_nil
+ expect(mutation_response.dig('token', 'description')).to eq(description)
+ expect(mutation_response.dig('token', 'name')).to eq(name)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb b/spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb
new file mode 100644
index 00000000000..c2ef2362d66
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create a new cluster agent' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:project_name) { 'agent-test' }
+ let(:current_user) { create(:user) }
+
+ let(:mutation) do
+ graphql_mutation(
+ :create_cluster_agent,
+ { project_path: project.full_path, name: project_name }
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:create_cluster_agent)
+ end
+
+ context 'without project permissions' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not create cluster agent' do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change(Clusters::Agent, :count)
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ it 'creates a new cluster agent', :aggregate_failures do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }.to change { Clusters::Agent.count }.by(1)
+ expect(mutation_response.dig('clusterAgent', 'name')).to eq(project_name)
+ expect(mutation_response['errors']).to eq([])
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb b/spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb
new file mode 100644
index 00000000000..5f6822223ca
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Delete a cluster agent' do
+ include GraphqlHelpers
+
+ let(:cluster_agent) { create(:cluster_agent) }
+ let(:project) { cluster_agent.project }
+ let(:current_user) { create(:user) }
+
+ let(:mutation) do
+ graphql_mutation(
+ :cluster_agent_delete,
+ { id: cluster_agent.to_global_id.uri }
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:cluster_agent_delete)
+ end
+
+ context 'without project permissions' do
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist '\
+ 'or you don\'t have permission to perform this action']
+
+ it 'does not delete cluster agent' do
+ expect { cluster_agent.reload }.not_to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with project permissions' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ it 'deletes a cluster agent', :aggregate_failures do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }.to change { Clusters::Agent.count }.by(-1)
+ expect(mutation_response['errors']).to eq([])
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb b/spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
new file mode 100644
index 00000000000..f05bf23ad27
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Updating the dependency proxy group settings' do
+ include GraphqlHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+
+ let(:params) do
+ {
+ group_path: group.full_path,
+ enabled: false
+ }
+ end
+
+ let(:mutation) do
+ graphql_mutation(:update_dependency_proxy_settings, params) do
+ <<~QL
+ dependencyProxySetting {
+ enabled
+ }
+ errors
+ QL
+ end
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:update_dependency_proxy_settings) }
+ let(:group_settings) { mutation_response['dependencyProxySetting'] }
+
+ before do
+ stub_config(dependency_proxy: { enabled: true })
+ end
+
+ describe 'post graphql mutation' do
+ subject { post_graphql_mutation(mutation, current_user: user) }
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:group_settings) { create(:dependency_proxy_group_setting, group: group) }
+
+ context 'without permission' do
+ it 'returns no response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to be_nil
+ end
+ end
+
+ context 'with permission' do
+ before do
+ group.add_developer(user)
+ end
+
+ it 'returns the updated dependency proxy settings', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to be_empty
+ expect(group_settings[:enabled]).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
index dec9afd1310..608b36e4f15 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled do
context 'when passing append as true' do
let(:mode) { Types::MutationOperationModeEnum.enum[:append] }
let(:input) { { assignee_usernames: [assignee2.username], operation_mode: mode } }
- let(:db_query_limit) { 21 }
+ let(:db_query_limit) { 22 }
before do
# In CE, APPEND is a NOOP as you can't have multiple assignees
diff --git a/spec/requests/api/graphql/namespace/projects_spec.rb b/spec/requests/api/graphql/namespace/projects_spec.rb
index 414847c9c93..d5410f1a7cb 100644
--- a/spec/requests/api/graphql/namespace/projects_spec.rb
+++ b/spec/requests/api/graphql/namespace/projects_spec.rb
@@ -106,10 +106,10 @@ RSpec.describe 'getting projects' do
context 'when sorting by similarity' do
it_behaves_like 'sorted paginated query' do
- let(:node_path) { %w[name] }
- let(:sort_param) { :SIMILARITY }
- let(:first_param) { 2 }
- let(:expected_results) { [project_3.name, project_2.name, project_4.name] }
+ let(:node_path) { %w[name] }
+ let(:sort_param) { :SIMILARITY }
+ let(:first_param) { 2 }
+ let(:all_records) { [project_3.name, project_2.name, project_4.name] }
end
end
end
diff --git a/spec/requests/api/graphql/project/cluster_agents_spec.rb b/spec/requests/api/graphql/project/cluster_agents_spec.rb
new file mode 100644
index 00000000000..dc7254dd552
--- /dev/null
+++ b/spec/requests/api/graphql/project/cluster_agents_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project.cluster_agents' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+ let_it_be(:agents) { create_list(:cluster_agent, 5, project: project) }
+
+ let(:first) { var('Int') }
+ let(:cluster_agents_fields) { nil }
+ let(:project_fields) do
+ query_nodes(:cluster_agents, cluster_agents_fields, args: { first: first }, max_depth: 3)
+ end
+
+ let(:query) do
+ args = { full_path: project.full_path }
+
+ with_signature([first], graphql_query_for(:project, args, project_fields))
+ end
+
+ before do
+ allow(Gitlab::Kas::Client).to receive(:new).and_return(double(get_connected_agents: []))
+ end
+
+ it 'can retrieve cluster agents' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :cluster_agents, :nodes)).to match_array(
+ agents.map { |agent| a_hash_including('id' => global_id_of(agent)) }
+ )
+ end
+
+ context 'selecting page info' do
+ let(:project_fields) do
+ query_nodes(:cluster_agents, args: { first: first }, include_pagination_info: true)
+ end
+
+ it 'can paginate cluster agents' do
+ post_graphql(query, current_user: current_user, variables: first.with(2))
+
+ expect(graphql_data_at(:project, :cluster_agents, :page_info)).to include(
+ 'hasNextPage' => be_truthy,
+ 'hasPreviousPage' => be_falsey
+ )
+ expect(graphql_data_at(:project, :cluster_agents, :nodes)).to have_attributes(size: 2)
+ end
+ end
+
+ context 'selecting tokens' do
+ let_it_be(:token_1) { create(:cluster_agent_token, agent: agents.second) }
+ let_it_be(:token_2) { create(:cluster_agent_token, agent: agents.second, last_used_at: 3.days.ago) }
+ let_it_be(:token_3) { create(:cluster_agent_token, agent: agents.second, last_used_at: 2.days.ago) }
+
+ let(:cluster_agents_fields) { [:id, query_nodes(:tokens, of: 'ClusterAgentToken')] }
+
+ it 'can select tokens in last_used_at order' do
+ post_graphql(query, current_user: current_user)
+
+ tokens = graphql_data_at(:project, :cluster_agents, :nodes, :tokens, :nodes)
+
+ expect(tokens).to match([
+ a_hash_including('id' => global_id_of(token_3)),
+ a_hash_including('id' => global_id_of(token_2)),
+ a_hash_including('id' => global_id_of(token_1))
+ ])
+ end
+
+ it 'does not suffer from N+1 performance issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect do
+ post_graphql(query, current_user: current_user)
+ end.to issue_same_number_of_queries_as { post_graphql(query, current_user: current_user, variables: [first.with(1)]) }
+ end
+ end
+
+ context 'selecting connections' do
+ let(:agent_meta) { double(version: '1', commit_id: 'abc', pod_namespace: 'namespace', pod_name: 'pod') }
+ let(:connected_agent) { double(agent_id: agents.first.id, connected_at: 123456, connection_id: 1, agent_meta: agent_meta) }
+
+ let(:metadata_fields) { query_graphql_field(:metadata, {}, [:version, :commit, :pod_namespace, :pod_name], 'AgentMetadata') }
+ let(:cluster_agents_fields) { [:id, query_nodes(:connections, [:connection_id, :connected_at, metadata_fields])] }
+
+ before do
+ allow(Gitlab::Kas::Client).to receive(:new).and_return(double(get_connected_agents: [connected_agent]))
+ end
+
+ it 'can retrieve connections and agent metadata' do
+ post_graphql(query, current_user: current_user)
+
+ connection = graphql_data_at(:project, :cluster_agents, :nodes, :connections, :nodes).first
+
+ expect(connection).to include({
+ 'connectionId' => connected_agent.connection_id.to_s,
+ 'connectedAt' => Time.at(connected_agent.connected_at),
+ 'metadata' => {
+ 'version' => agent_meta.version,
+ 'commit' => agent_meta.commit_id,
+ 'podNamespace' => agent_meta.pod_namespace,
+ 'podName' => agent_meta.pod_name
+ }
+ })
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/container_repositories_spec.rb b/spec/requests/api/graphql/project/container_repositories_spec.rb
index 3ad56223b61..692143b2215 100644
--- a/spec/requests/api/graphql/project/container_repositories_spec.rb
+++ b/spec/requests/api/graphql/project/container_repositories_spec.rb
@@ -12,11 +12,12 @@ RSpec.describe 'getting container repositories in a project' do
let_it_be(:container_repositories) { [container_repository, container_repositories_delete_scheduled, container_repositories_delete_failed].flatten }
let_it_be(:container_expiration_policy) { project.container_expiration_policy }
+ let(:excluded_fields) { %w[pipeline jobs] }
let(:container_repositories_fields) do
<<~GQL
edges {
node {
- #{all_graphql_fields_for('container_repositories'.classify, excluded: %w(pipeline jobs))}
+ #{all_graphql_fields_for('container_repositories'.classify, excluded: excluded_fields)}
}
}
GQL
@@ -151,6 +152,12 @@ RSpec.describe 'getting container repositories in a project' do
end
end
+ it_behaves_like 'handling graphql network errors with the container registry'
+
+ it_behaves_like 'not hitting graphql network errors with the container registry' do
+ let(:excluded_fields) { %w[pipeline jobs tags tagsCount] }
+ end
+
it 'returns the total count of container repositories' do
subject
@@ -190,7 +197,7 @@ RSpec.describe 'getting container repositories in a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :NAME_ASC }
let(:first_param) { 2 }
- let(:expected_results) { [container_repository2.name, container_repository1.name, container_repository4.name, container_repository3.name, container_repository5.name] }
+ let(:all_records) { [container_repository2.name, container_repository1.name, container_repository4.name, container_repository3.name, container_repository5.name] }
end
end
@@ -198,7 +205,7 @@ RSpec.describe 'getting container repositories in a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :NAME_DESC }
let(:first_param) { 2 }
- let(:expected_results) { [container_repository5.name, container_repository3.name, container_repository4.name, container_repository1.name, container_repository2.name] }
+ let(:all_records) { [container_repository5.name, container_repository3.name, container_repository4.name, container_repository1.name, container_repository2.name] }
end
end
end
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index c6b4d82bf15..1c6d6ce4707 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -5,12 +5,15 @@ require 'spec_helper'
RSpec.describe 'getting an issue list for a project' do
include GraphqlHelpers
- let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, :public, group: group) }
let_it_be(:current_user) { create(:user) }
let_it_be(:issue_a, reload: true) { create(:issue, project: project, discussion_locked: true) }
let_it_be(:issue_b, reload: true) { create(:issue, :with_alert, project: project) }
let_it_be(:issues, reload: true) { [issue_a, issue_b] }
+ let(:issue_a_gid) { issue_a.to_global_id.to_s }
+ let(:issue_b_gid) { issue_b.to_global_id.to_s }
let(:issues_data) { graphql_data['project']['issues']['edges'] }
let(:issue_filter_params) { {} }
@@ -66,9 +69,6 @@ RSpec.describe 'getting an issue list for a project' do
let_it_be(:upvote_award) { create(:award_emoji, :upvote, user: current_user, awardable: issue_a) }
- let(:issue_a_gid) { issue_a.to_global_id.to_s }
- let(:issue_b_gid) { issue_b.to_global_id.to_s }
-
where(:value, :gids) do
'thumbsup' | lazy { [issue_a_gid] }
'ANY' | lazy { [issue_a_gid] }
@@ -84,7 +84,7 @@ RSpec.describe 'getting an issue list for a project' do
it 'returns correctly filtered issues' do
post_graphql(query, current_user: current_user)
- expect(graphql_dig_at(issues_data, :node, :id)).to eq(gids)
+ expect(issues_ids).to eq(gids)
end
end
end
@@ -149,6 +149,8 @@ RSpec.describe 'getting an issue list for a project' do
create(:issue, :confidential, project: project)
end
+ let(:confidential_issue_gid) { confidential_issue.to_global_id.to_s }
+
context 'when the user cannot see confidential issues' do
it 'returns issues without confidential issues' do
post_graphql(query, current_user: current_user)
@@ -159,12 +161,34 @@ RSpec.describe 'getting an issue list for a project' do
expect(issue.dig('node', 'confidential')).to eq(false)
end
end
+
+ context 'filtering for confidential issues' do
+ let(:issue_filter_params) { { confidential: true } }
+
+ it 'returns no issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_data.size).to eq(0)
+ end
+ end
+
+ context 'filtering for non-confidential issues' do
+ let(:issue_filter_params) { { confidential: false } }
+
+ it 'returns correctly filtered issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue_a_gid, issue_b_gid)
+ end
+ end
end
context 'when the user can see confidential issues' do
- it 'returns issues with confidential issues' do
+ before do
project.add_developer(current_user)
+ end
+ it 'returns issues with confidential issues' do
post_graphql(query, current_user: current_user)
expect(issues_data.size).to eq(3)
@@ -175,6 +199,26 @@ RSpec.describe 'getting an issue list for a project' do
expect(confidentials).to eq([true, false, false])
end
+
+ context 'filtering for confidential issues' do
+ let(:issue_filter_params) { { confidential: true } }
+
+ it 'returns correctly filtered issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(confidential_issue_gid)
+ end
+ end
+
+ context 'filtering for non-confidential issues' do
+ let(:issue_filter_params) { { confidential: false } }
+
+ it 'returns correctly filtered issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue_a_gid, issue_b_gid)
+ end
+ end
end
end
@@ -205,7 +249,7 @@ RSpec.describe 'getting an issue list for a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :DUE_DATE_ASC }
let(:first_param) { 2 }
- let(:expected_results) { [due_issue3.iid, due_issue5.iid, due_issue1.iid, due_issue4.iid, due_issue2.iid] }
+ let(:all_records) { [due_issue3.iid, due_issue5.iid, due_issue1.iid, due_issue4.iid, due_issue2.iid] }
end
end
@@ -213,7 +257,7 @@ RSpec.describe 'getting an issue list for a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :DUE_DATE_DESC }
let(:first_param) { 2 }
- let(:expected_results) { [due_issue1.iid, due_issue5.iid, due_issue3.iid, due_issue4.iid, due_issue2.iid] }
+ let(:all_records) { [due_issue1.iid, due_issue5.iid, due_issue3.iid, due_issue4.iid, due_issue2.iid] }
end
end
end
@@ -230,10 +274,10 @@ RSpec.describe 'getting an issue list for a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :RELATIVE_POSITION_ASC }
let(:first_param) { 2 }
- let(:expected_results) do
+ let(:all_records) do
[
relative_issue5.iid, relative_issue3.iid, relative_issue1.iid,
- relative_issue4.iid, relative_issue2.iid
+ relative_issue2.iid, relative_issue4.iid
]
end
end
@@ -256,7 +300,7 @@ RSpec.describe 'getting an issue list for a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :PRIORITY_ASC }
let(:first_param) { 2 }
- let(:expected_results) do
+ let(:all_records) do
[
priority_issue3.iid, priority_issue1.iid,
priority_issue2.iid, priority_issue4.iid
@@ -269,7 +313,7 @@ RSpec.describe 'getting an issue list for a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :PRIORITY_DESC }
let(:first_param) { 2 }
- let(:expected_results) do
+ let(:all_records) do
[priority_issue1.iid, priority_issue3.iid, priority_issue2.iid, priority_issue4.iid]
end
end
@@ -288,17 +332,17 @@ RSpec.describe 'getting an issue list for a project' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :LABEL_PRIORITY_ASC }
- let(:first_param) { 2 }
- let(:expected_results) { [label_issue3.iid, label_issue1.iid, label_issue2.iid, label_issue4.iid] }
+ let(:sort_param) { :LABEL_PRIORITY_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { [label_issue3.iid, label_issue1.iid, label_issue2.iid, label_issue4.iid] }
end
end
context 'when descending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :LABEL_PRIORITY_DESC }
- let(:first_param) { 2 }
- let(:expected_results) { [label_issue2.iid, label_issue3.iid, label_issue1.iid, label_issue4.iid] }
+ let(:sort_param) { :LABEL_PRIORITY_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { [label_issue2.iid, label_issue3.iid, label_issue1.iid, label_issue4.iid] }
end
end
end
@@ -313,17 +357,17 @@ RSpec.describe 'getting an issue list for a project' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :MILESTONE_DUE_ASC }
- let(:first_param) { 2 }
- let(:expected_results) { [milestone_issue2.iid, milestone_issue3.iid, milestone_issue1.iid] }
+ let(:sort_param) { :MILESTONE_DUE_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { [milestone_issue2.iid, milestone_issue3.iid, milestone_issue1.iid] }
end
end
context 'when descending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :MILESTONE_DUE_DESC }
- let(:first_param) { 2 }
- let(:expected_results) { [milestone_issue3.iid, milestone_issue2.iid, milestone_issue1.iid] }
+ let(:sort_param) { :MILESTONE_DUE_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { [milestone_issue3.iid, milestone_issue2.iid, milestone_issue1.iid] }
end
end
end
@@ -366,6 +410,35 @@ RSpec.describe 'getting an issue list for a project' do
end
end
+ context 'when fetching customer_relations_contacts' do
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ id
+ customerRelationsContacts {
+ nodes {
+ firstName
+ }
+ }
+ }
+ QUERY
+ end
+
+ def clean_state_query
+ run_with_clean_state(query, context: { current_user: current_user })
+ end
+
+ it 'avoids N+1 queries' do
+ create(:contact, group_id: group.id, issues: [issue_a])
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { clean_state_query }
+
+ create(:contact, group_id: group.id, issues: [issue_a])
+
+ expect { clean_state_query }.not_to exceed_all_query_limit(control)
+ end
+ end
+
context 'when fetching labels' do
let(:fields) do
<<~QUERY
@@ -526,4 +599,8 @@ RSpec.describe 'getting an issue list for a project' do
include_examples 'N+1 query check'
end
end
+
+ def issues_ids
+ graphql_dig_at(issues_data, :node, :id)
+ end
end
diff --git a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
index 70c5bda35e1..820a5d818c7 100644
--- a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
before do
merge_requests.each do |mr|
- shas = mr.all_commits.limit(2).pluck(:sha)
+ shas = mr.recent_diff_head_shas
shas.each do |sha|
create(:ci_pipeline, :success, project: project, ref: mr.source_branch, sha: sha)
@@ -52,7 +52,7 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
p_nodes = graphql_data_at(:project, :merge_requests, :nodes)
- expect(p_nodes).to all(match('iid' => be_present, 'pipelines' => match('count' => 2)))
+ expect(p_nodes).to all(match('iid' => be_present, 'pipelines' => match('count' => 1)))
end
it 'is scalable', :request_store, :use_clean_rails_memory_store_caching do
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index 1b0405be09c..b0bedd99fce 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -385,7 +385,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
context 'when sorting by merged_at DESC' do
let(:sort_param) { :MERGED_AT_DESC }
- let(:expected_results) do
+ let(:all_records) do
[
merge_request_b,
merge_request_d,
@@ -418,14 +418,14 @@ RSpec.describe 'getting merge request listings nested in a project' do
query = pagination_query(params)
post_graphql(query, current_user: current_user)
- expect(results.map { |item| item["id"] }).to eq(expected_results.last(2))
+ expect(results.map { |item| item["id"] }).to eq(all_records.last(2))
end
end
end
context 'when sorting by closed_at DESC' do
let(:sort_param) { :CLOSED_AT_DESC }
- let(:expected_results) do
+ let(:all_records) do
[
merge_request_b,
merge_request_d,
@@ -458,7 +458,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
query = pagination_query(params)
post_graphql(query, current_user: current_user)
- expect(results.map { |item| item["id"] }).to eq(expected_results.last(2))
+ expect(results.map { |item| item["id"] }).to eq(all_records.last(2))
end
end
end
diff --git a/spec/requests/api/graphql/project/releases_spec.rb b/spec/requests/api/graphql/project/releases_spec.rb
index 8ccdb955ed9..2816ce90a6b 100644
--- a/spec/requests/api/graphql/project/releases_spec.rb
+++ b/spec/requests/api/graphql/project/releases_spec.rb
@@ -322,17 +322,17 @@ RSpec.describe 'Query.project(fullPath).releases()' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :RELEASED_AT_ASC }
- let(:first_param) { 2 }
- let(:expected_results) { [release1.tag, release2.tag, release3.tag, release4.tag, release5.tag] }
+ let(:sort_param) { :RELEASED_AT_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { [release1.tag, release2.tag, release3.tag, release4.tag, release5.tag] }
end
end
context 'when descending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :RELEASED_AT_DESC }
- let(:first_param) { 2 }
- let(:expected_results) { [release5.tag, release4.tag, release3.tag, release2.tag, release1.tag] }
+ let(:sort_param) { :RELEASED_AT_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { [release5.tag, release4.tag, release3.tag, release2.tag, release1.tag] }
end
end
end
@@ -346,17 +346,17 @@ RSpec.describe 'Query.project(fullPath).releases()' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :CREATED_ASC }
- let(:first_param) { 2 }
- let(:expected_results) { [release1.tag, release2.tag, release3.tag, release4.tag, release5.tag] }
+ let(:sort_param) { :CREATED_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { [release1.tag, release2.tag, release3.tag, release4.tag, release5.tag] }
end
end
context 'when descending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :CREATED_DESC }
- let(:first_param) { 2 }
- let(:expected_results) { [release5.tag, release4.tag, release3.tag, release2.tag, release1.tag] }
+ let(:sort_param) { :CREATED_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { [release5.tag, release4.tag, release3.tag, release2.tag, release1.tag] }
end
end
end
diff --git a/spec/requests/api/graphql/users_spec.rb b/spec/requests/api/graphql/users_spec.rb
index 22b68fbc9bb..67cd35ee545 100644
--- a/spec/requests/api/graphql/users_spec.rb
+++ b/spec/requests/api/graphql/users_spec.rb
@@ -114,17 +114,17 @@ RSpec.describe 'Users' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :CREATED_ASC }
- let(:first_param) { 1 }
- let(:expected_results) { ascending_users }
+ let(:sort_param) { :CREATED_ASC }
+ let(:first_param) { 1 }
+ let(:all_records) { ascending_users }
end
end
context 'when descending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :CREATED_DESC }
- let(:first_param) { 1 }
- let(:expected_results) { ascending_users.reverse }
+ let(:sort_param) { :CREATED_DESC }
+ let(:first_param) { 1 }
+ let(:all_records) { ascending_users.reverse }
end
end
end
diff --git a/spec/requests/api/group_container_repositories_spec.rb b/spec/requests/api/group_container_repositories_spec.rb
index fdbf910e4bc..bf29bd91414 100644
--- a/spec/requests/api/group_container_repositories_spec.rb
+++ b/spec/requests/api/group_container_repositories_spec.rb
@@ -20,12 +20,14 @@ RSpec.describe API::GroupContainerRepositories do
end
let(:api_user) { reporter }
+ let(:params) { {} }
before do
group.add_reporter(reporter)
group.add_guest(guest)
stub_container_registry_config(enabled: true)
+ stub_container_registry_info
root_repository
test_repository
@@ -35,10 +37,13 @@ RSpec.describe API::GroupContainerRepositories do
let(:url) { "/groups/#{group.id}/registry/repositories" }
let(:snowplow_gitlab_standard_context) { { user: api_user, namespace: group } }
- subject { get api(url, api_user) }
+ subject { get api(url, api_user), params: params }
it_behaves_like 'rejected container repository access', :guest, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'handling network errors with the container registry' do
+ let(:params) { { tags: true } }
+ end
it_behaves_like 'returns repositories for allowed users', :reporter, 'group' do
let(:object) { group }
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 38abedde7da..2c7e2ecff85 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -728,16 +728,16 @@ RSpec.describe API::Groups do
end
it 'avoids N+1 queries with project links' do
- get api("/groups/#{group1.id}", admin)
+ get api("/groups/#{group1.id}", user1)
control_count = ActiveRecord::QueryRecorder.new do
- get api("/groups/#{group1.id}", admin)
+ get api("/groups/#{group1.id}", user1)
end.count
create(:project, namespace: group1)
expect do
- get api("/groups/#{group1.id}", admin)
+ get api("/groups/#{group1.id}", user1)
end.not_to exceed_query_limit(control_count)
end
@@ -746,7 +746,7 @@ RSpec.describe API::Groups do
create(:group_group_link, shared_group: group1, shared_with_group: create(:group))
control_count = ActiveRecord::QueryRecorder.new do
- get api("/groups/#{group1.id}", admin)
+ get api("/groups/#{group1.id}", user1)
end.count
# setup "n" more shared groups
@@ -755,7 +755,7 @@ RSpec.describe API::Groups do
# test that no of queries for 1 shared group is same as for n shared groups
expect do
- get api("/groups/#{group1.id}", admin)
+ get api("/groups/#{group1.id}", user1)
end.not_to exceed_query_limit(control_count)
end
end
@@ -1179,6 +1179,20 @@ RSpec.describe API::Groups do
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project1.name)
end
+
+ it 'avoids N+1 queries' do
+ get api("/groups/#{group1.id}/projects", user1)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ get api("/groups/#{group1.id}/projects", user1)
+ end.count
+
+ create(:project, namespace: group1)
+
+ expect do
+ get api("/groups/#{group1.id}/projects", user1)
+ end.not_to exceed_query_limit(control_count)
+ end
end
context "when authenticated as admin" do
@@ -1196,20 +1210,6 @@ RSpec.describe API::Groups do
expect(response).to have_gitlab_http_status(:not_found)
end
-
- it 'avoids N+1 queries' do
- get api("/groups/#{group1.id}/projects", admin)
-
- control_count = ActiveRecord::QueryRecorder.new do
- get api("/groups/#{group1.id}/projects", admin)
- end.count
-
- create(:project, namespace: group1)
-
- expect do
- get api("/groups/#{group1.id}/projects", admin)
- end.not_to exceed_query_limit(control_count)
- end
end
context 'when using group path in URL' do
diff --git a/spec/requests/api/helm_packages_spec.rb b/spec/requests/api/helm_packages_spec.rb
index 3236857c5fc..5212e225351 100644
--- a/spec/requests/api/helm_packages_spec.rb
+++ b/spec/requests/api/helm_packages_spec.rb
@@ -18,11 +18,11 @@ RSpec.describe API::HelmPackages do
let_it_be(:other_package) { create(:npm_package, project: project) }
describe 'GET /api/v4/projects/:id/packages/helm/:channel/index.yaml' do
- let(:url) { "/projects/#{project_id}/packages/helm/stable/index.yaml" }
+ let(:project_id) { project.id }
+ let(:channel) { 'stable' }
+ let(:url) { "/projects/#{project_id}/packages/helm/#{channel}/index.yaml" }
context 'with a project id' do
- let(:project_id) { project.id }
-
it_behaves_like 'handling helm chart index requests'
end
@@ -31,6 +31,18 @@ RSpec.describe API::HelmPackages do
it_behaves_like 'handling helm chart index requests'
end
+
+ context 'with dot in channel' do
+ let(:channel) { 'with.dot' }
+
+ subject { get api(url) }
+
+ before do
+ project.update!(visibility: 'public')
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
end
describe 'GET /api/v4/projects/:id/packages/helm/:channel/charts/:file_name.tgz' do
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
new file mode 100644
index 00000000000..649647804c0
--- /dev/null
+++ b/spec/requests/api/integrations_spec.rb
@@ -0,0 +1,363 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe API::Integrations do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let_it_be(:project, reload: true) do
+ create(:project, creator_id: user.id, namespace: user.namespace)
+ end
+
+ %w[integrations services].each do |endpoint|
+ describe "GET /projects/:id/#{endpoint}" do
+ it 'returns authentication error when unauthenticated' do
+ get api("/projects/#{project.id}/#{endpoint}")
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it "returns error when authenticated but user is not a project owner" do
+ project.add_developer(user2)
+ get api("/projects/#{project.id}/#{endpoint}", user2)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'with integrations' do
+ let!(:active_integration) { create(:emails_on_push_integration, project: project, active: true) }
+ let!(:integration) { create(:custom_issue_tracker_integration, project: project, active: false) }
+
+ it "returns a list of all active integrations" do
+ get api("/projects/#{project.id}/#{endpoint}", user)
+
+ aggregate_failures 'expect successful response with all active integrations' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.count).to eq(1)
+ expect(json_response.first['slug']).to eq('emails-on-push')
+ expect(response).to match_response_schema('public_api/v4/integrations')
+ end
+ end
+ end
+ end
+
+ Integration.available_integration_names.each do |integration|
+ describe "PUT /projects/:id/#{endpoint}/#{integration.dasherize}" do
+ include_context integration
+
+ it "updates #{integration} settings" do
+ put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user), params: integration_attrs
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ current_integration = project.integrations.first
+ events = current_integration.event_names.empty? ? ["foo"].freeze : current_integration.event_names
+ query_strings = []
+ events.each do |event|
+ query_strings << "#{event}=#{!current_integration[event]}"
+ end
+ query_strings = query_strings.join('&')
+
+ put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}?#{query_strings}", user), params: integration_attrs
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['slug']).to eq(dashed_integration)
+ events.each do |event|
+ next if event == "foo"
+
+ expect(project.integrations.first[event]).not_to eq(current_integration[event]),
+ "expected #{!current_integration[event]} for event #{event} for #{endpoint} #{current_integration.title}, got #{current_integration[event]}"
+ end
+ end
+
+ it "returns if required fields missing" do
+ required_attributes = integration_attrs_list.select do |attr|
+ integration_klass.validators_on(attr).any? do |v|
+ v.instance_of?(ActiveRecord::Validations::PresenceValidator) &&
+ # exclude presence validators with conditional since those are not really required
+ ![:if, :unless].any? { |cond| v.options.include?(cond) }
+ end
+ end
+
+ if required_attributes.empty?
+ expected_code = :ok
+ else
+ integration_attrs.delete(required_attributes.sample)
+ expected_code = :bad_request
+ end
+
+ put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user), params: integration_attrs
+
+ expect(response).to have_gitlab_http_status(expected_code)
+ end
+ end
+
+ describe "DELETE /projects/:id/#{endpoint}/#{integration.dasherize}" do
+ include_context integration
+
+ before do
+ initialize_integration(integration)
+ end
+
+ it "deletes #{integration}" do
+ delete api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ project.send(integration_method).reload
+ expect(project.send(integration_method).activated?).to be_falsey
+ end
+ end
+
+ describe "GET /projects/:id/#{endpoint}/#{integration.dasherize}" do
+ include_context integration
+
+ let!(:initialized_integration) { initialize_integration(integration, active: true) }
+
+ let_it_be(:project2) do
+ create(:project, creator_id: user.id, namespace: user.namespace)
+ end
+
+ def deactive_integration!
+ return initialized_integration.update!(active: false) unless initialized_integration.is_a?(::Integrations::Prometheus)
+
+ # Integrations::Prometheus sets `#active` itself within a `before_save`:
+ initialized_integration.manual_configuration = false
+ initialized_integration.save!
+ end
+
+ it 'returns authentication error when unauthenticated' do
+ get api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}")
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it "returns all properties of active integration #{integration}" do
+ get api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user)
+
+ expect(initialized_integration).to be_active
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
+ end
+
+ it "returns all properties of inactive integration #{integration}" do
+ deactive_integration!
+
+ get api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user)
+
+ expect(initialized_integration).not_to be_active
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
+ end
+
+ it "returns not found if integration does not exist" do
+ get api("/projects/#{project2.id}/#{endpoint}/#{dashed_integration}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Integration Not Found')
+ end
+
+ it "returns not found if integration exists but is in `Project#disabled_integrations`" do
+ expect_next_found_instance_of(Project) do |project|
+ expect(project).to receive(:disabled_integrations).at_least(:once).and_return([integration])
+ end
+
+ get api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Integration Not Found')
+ end
+
+ it "returns error when authenticated but not a project owner" do
+ project.add_developer(user2)
+ get api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user2)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ describe "POST /projects/:id/#{endpoint}/:slug/trigger" do
+ describe 'Mattermost integration' do
+ let(:integration_name) { 'mattermost_slash_commands' }
+
+ context 'when no integration is available' do
+ it 'returns a not found message' do
+ post api("/projects/#{project.id}/#{endpoint}/idonotexist/trigger")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response["error"]).to eq("404 Not Found")
+ end
+ end
+
+ context 'when the integration exists' do
+ let(:params) { { token: 'token' } }
+
+ context 'when the integration is not active' do
+ before do
+ project.create_mattermost_slash_commands_integration(
+ active: false,
+ properties: params
+ )
+ end
+
+ it 'when the integration is inactive' do
+ post api("/projects/#{project.id}/#{endpoint}/#{integration_name}/trigger"), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the integration is active' do
+ before do
+ project.create_mattermost_slash_commands_integration(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'returns status 200' do
+ post api("/projects/#{project.id}/#{endpoint}/#{integration_name}/trigger"), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when the project can not be found' do
+ it 'returns a generic 404' do
+ post api("/projects/404/#{endpoint}/#{integration_name}/trigger"), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response["message"]).to eq("404 Integration Not Found")
+ end
+ end
+ end
+ end
+
+ describe 'Slack Integration' do
+ let(:integration_name) { 'slack_slash_commands' }
+
+ before do
+ project.create_slack_slash_commands_integration(
+ active: true,
+ properties: { token: 'token' }
+ )
+ end
+
+ it 'returns status 200' do
+ post api("/projects/#{project.id}/#{endpoint}/#{integration_name}/trigger"), params: { token: 'token', text: 'help' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['response_type']).to eq("ephemeral")
+ end
+ end
+ end
+
+ describe 'Mattermost integration' do
+ let(:integration_name) { 'mattermost' }
+ let(:params) do
+ { webhook: 'https://hook.example.com', username: 'username' }
+ end
+
+ before do
+ project.create_mattermost_integration(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'accepts a username for update' do
+ put api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user), params: params.merge(username: 'new_username')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']['username']).to eq('new_username')
+ end
+ end
+
+ describe 'Microsoft Teams integration' do
+ let(:integration_name) { 'microsoft-teams' }
+ let(:params) do
+ {
+ webhook: 'https://hook.example.com',
+ branches_to_be_notified: 'default',
+ notify_only_broken_pipelines: false
+ }
+ end
+
+ before do
+ project.create_microsoft_teams_integration(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'accepts branches_to_be_notified for update' do
+ put api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user),
+ params: params.merge(branches_to_be_notified: 'all')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']['branches_to_be_notified']).to eq('all')
+ end
+
+ it 'accepts notify_only_broken_pipelines for update' do
+ put api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user),
+ params: params.merge(notify_only_broken_pipelines: true)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
+ end
+ end
+
+ describe 'Hangouts Chat integration' do
+ let(:integration_name) { 'hangouts-chat' }
+ let(:params) do
+ {
+ webhook: 'https://hook.example.com',
+ branches_to_be_notified: 'default'
+ }
+ end
+
+ before do
+ project.create_hangouts_chat_integration(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'accepts branches_to_be_notified for update', :aggregate_failures do
+ put api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user), params: params.merge(branches_to_be_notified: 'all')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']['branches_to_be_notified']).to eq('all')
+ end
+
+ it 'only requires the webhook param' do
+ put api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user), params: { webhook: 'https://hook.example.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ describe 'Pipelines Email Integration' do
+ let(:integration_name) { 'pipelines-email' }
+
+ context 'notify_only_broken_pipelines property was saved as a string' do
+ before do
+ project.create_pipelines_email_integration(
+ active: false,
+ properties: {
+ "notify_only_broken_pipelines": "true",
+ "branches_to_be_notified": "default"
+ }
+ )
+ end
+
+ it 'returns boolean values for notify_only_broken_pipelines' do
+ get api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user)
+
+ expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 49756df61c6..aeca4e435f4 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -609,7 +609,7 @@ RSpec.describe API::Internal::Base do
end
context 'with Project' do
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { user: key.user.username, project: project.full_path, caller_id: "POST /api/:version/internal/allowed" } }
subject { push(key, project) }
@@ -617,7 +617,7 @@ RSpec.describe API::Internal::Base do
end
context 'with PersonalSnippet' do
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { user: key.user.username, caller_id: "POST /api/:version/internal/allowed" } }
subject { push(key, personal_snippet) }
@@ -625,7 +625,7 @@ RSpec.describe API::Internal::Base do
end
context 'with ProjectSnippet' do
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { { user: key.user.username, project: project_snippet.project.full_path, caller_id: "POST /api/:version/internal/allowed" } }
subject { push(key, project_snippet) }
@@ -1197,7 +1197,7 @@ RSpec.describe API::Internal::Base do
subject
end
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let(:expected_params) { expected_context }
end
end
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 24422f7b0dd..245e4e6ba15 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -177,94 +177,4 @@ RSpec.describe API::Internal::Kubernetes do
end
end
end
-
- describe 'GET /internal/kubernetes/project_info' do
- def send_request(headers: {}, params: {})
- get api('/internal/kubernetes/project_info'), params: params, headers: headers.reverse_merge(jwt_auth_headers)
- end
-
- include_examples 'authorization'
- include_examples 'agent authentication'
-
- context 'an agent is found' do
- let_it_be(:agent_token) { create(:cluster_agent_token) }
-
- shared_examples 'agent token tracking'
-
- context 'project is public' do
- let(:project) { create(:project, :public) }
-
- it 'returns expected data', :aggregate_failures do
- send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:success)
-
- expect(json_response).to match(
- a_hash_including(
- 'project_id' => project.id,
- 'gitaly_info' => a_hash_including(
- 'address' => match(/\.socket$/),
- 'token' => 'secret',
- 'features' => {}
- ),
- 'gitaly_repository' => a_hash_including(
- 'storage_name' => project.repository_storage,
- 'relative_path' => project.disk_path + '.git',
- 'gl_repository' => "project-#{project.id}",
- 'gl_project_path' => project.full_path
- )
- )
- )
- end
-
- context 'repository is for project members only' do
- let(:project) { create(:project, :public, :repository_private) }
-
- it 'returns 404' do
- send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'project is private' do
- let(:project) { create(:project, :private) }
-
- it 'returns 404' do
- send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- context 'and agent belongs to project' do
- let(:agent_token) { create(:cluster_agent_token, agent: create(:cluster_agent, project: project)) }
-
- it 'returns 200' do
- send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:success)
- end
- end
- end
-
- context 'project is internal' do
- let(:project) { create(:project, :internal) }
-
- it 'returns 404' do
- send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'project does not exist' do
- it 'returns 404' do
- send_request(params: { id: non_existing_record_id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
end
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 8a33e63b80b..9204ee4d7f0 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -138,6 +138,12 @@ RSpec.describe API::Issues do
expect(json_response).to be_an Array
end
+ it_behaves_like 'issuable anonymous search' do
+ let(:url) { '/issues' }
+ let(:issuable) { issue }
+ let(:result) { issuable.id }
+ end
+
it 'returns authentication error without any scope' do
get api('/issues')
@@ -256,6 +262,38 @@ RSpec.describe API::Issues do
it_behaves_like 'issues statistics'
end
+
+ context 'with search param' do
+ let(:params) { { scope: 'all', search: 'foo' } }
+ let(:counts) { { all: 1, closed: 0, opened: 1 } }
+
+ it_behaves_like 'issues statistics'
+
+ context 'with anonymous user' do
+ let(:user) { nil }
+
+ context 'with disable_anonymous_search disabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: false)
+ end
+
+ it_behaves_like 'issues statistics'
+ end
+
+ context 'with disable_anonymous_search enabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: true)
+ end
+
+ it 'returns a unprocessable entity 422' do
+ get api("/issues_statistics"), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to include('User must be authenticated to use search')
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index 9d3bd26a200..82692366589 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -8,15 +8,15 @@ RSpec.describe API::Issues do
create(:project, :public, creator_id: user.id, namespace: user.namespace)
end
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- let_it_be(:guest) { create(:user) }
- let_it_be(:author) { create(:author) }
- let_it_be(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
- let(:issue_title) { 'foo' }
- let(:issue_description) { 'closed' }
- let!(:closed_issue) do
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:author) { create(:author) }
+ let_it_be(:milestone) { create(:milestone, title: '1.0.0', project: project) }
+ let_it_be(:assignee) { create(:assignee) }
+ let_it_be(:admin) { create(:user, :admin) }
+
+ let_it_be(:closed_issue) do
create :closed_issue,
author: user,
assignees: [user],
@@ -28,7 +28,7 @@ RSpec.describe API::Issues do
closed_at: 1.hour.ago
end
- let!(:confidential_issue) do
+ let_it_be(:confidential_issue) do
create :issue,
:confidential,
project: project,
@@ -38,7 +38,7 @@ RSpec.describe API::Issues do
updated_at: 2.hours.ago
end
- let!(:issue) do
+ let_it_be(:issue) do
create :issue,
author: user,
assignees: [user],
@@ -46,22 +46,21 @@ RSpec.describe API::Issues do
milestone: milestone,
created_at: generate(:past_time),
updated_at: 1.hour.ago,
- title: issue_title,
- description: issue_description
+ title: 'foo',
+ description: 'closed'
end
+ let_it_be(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
+
let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
let!(:label_link) { create(:label_link, label: label, target: issue) }
- let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
let_it_be(:empty_milestone) do
create(:milestone, title: '2.0.0', project: project)
end
- let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
-
let(:no_milestone_title) { 'None' }
let(:any_milestone_title) { 'Any' }
@@ -400,16 +399,15 @@ RSpec.describe API::Issues do
end
context 'when request exceeds the rate limit' do
- before do
+ it 'prevents users from creating more issues' do
allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
- end
- it 'prevents users from creating more issues' do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', labels: 'label, label2', weight: 3, assignee_ids: [user2.id] }
- expect(response).to have_gitlab_http_status(:too_many_requests)
expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.')
+
+ expect(response).to have_gitlab_http_status(:too_many_requests)
end
end
end
@@ -517,7 +515,7 @@ RSpec.describe API::Issues do
end
context 'when using the issue ID instead of iid' do
- it 'returns 404 when trying to move an issue' do
+ it 'returns 404 when trying to move an issue', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do
post api("/projects/#{project.id}/issues/#{issue.id}/move", user),
params: { to_project_id: target_project.id }
@@ -556,6 +554,114 @@ RSpec.describe API::Issues do
end
end
+ describe '/projects/:id/issues/:issue_iid/clone' do
+ let_it_be(:valid_target_project) { create(:project) }
+ let_it_be(:invalid_target_project) { create(:project) }
+
+ before_all do
+ valid_target_project.add_maintainer(user)
+ end
+
+ context 'when user can admin the issue' do
+ context 'when the user can admin the target project' do
+ it 'clones the issue' do
+ expect do
+ post_clone_issue(user, issue, valid_target_project)
+ end.to change { valid_target_project.issues.count }.by(1)
+
+ cloned_issue = Issue.last
+
+ expect(cloned_issue.notes.count).to eq(2)
+ expect(cloned_issue.notes.pluck(:note)).not_to include(issue.notes.first.note)
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(cloned_issue.id)
+ expect(json_response['project_id']).to eq(valid_target_project.id)
+ end
+
+ context 'when target project is the same source project' do
+ it 'clones the issue' do
+ expect do
+ post_clone_issue(user, issue, issue.project)
+ end.to change { issue.reset.project.issues.count }.by(1)
+
+ cloned_issue = Issue.last
+
+ expect(cloned_issue.notes.count).to eq(2)
+ expect(cloned_issue.notes.pluck(:note)).not_to include(issue.notes.first.note)
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(cloned_issue.id)
+ expect(json_response['project_id']).to eq(issue.project.id)
+ end
+ end
+ end
+ end
+
+ context 'when the user does not have the permission to clone issues' do
+ it 'returns 400' do
+ post api("/projects/#{project.id}/issues/#{issue.iid}/clone", user),
+ params: { to_project_id: invalid_target_project.id }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(s_('CloneIssue|Cannot clone issue due to insufficient permissions!'))
+ end
+ end
+
+ context 'when using the issue ID instead of iid' do
+ it 'returns 404', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do
+ post api("/projects/#{project.id}/issues/#{issue.id}/clone", user),
+ params: { to_project_id: valid_target_project.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Issue Not Found')
+ end
+ end
+
+ context 'when issue does not exist' do
+ it 'returns 404' do
+ post api("/projects/#{project.id}/issues/12300/clone", user),
+ params: { to_project_id: valid_target_project.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Issue Not Found')
+ end
+ end
+
+ context 'when source project does not exist' do
+ it 'returns 404' do
+ post api("/projects/0/issues/#{issue.iid}/clone", user),
+ params: { to_project_id: valid_target_project.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Project Not Found')
+ end
+ end
+
+ context 'when target project does not exist' do
+ it 'returns 404' do
+ post api("/projects/#{project.id}/issues/#{issue.iid}/clone", user),
+ params: { to_project_id: 0 }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Project Not Found')
+ end
+ end
+
+ it 'clones the issue with notes when with_notes is true' do
+ expect do
+ post api("/projects/#{project.id}/issues/#{issue.iid}/clone", user),
+ params: { to_project_id: valid_target_project.id, with_notes: true }
+ end.to change { valid_target_project.issues.count }.by(1)
+
+ cloned_issue = Issue.last
+
+ expect(cloned_issue.notes.count).to eq(3)
+ expect(cloned_issue.notes.pluck(:note)).to include(issue.notes.first.note)
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(cloned_issue.id)
+ expect(json_response['project_id']).to eq(valid_target_project.id)
+ end
+ end
+
describe 'POST :id/issues/:issue_iid/subscribe' do
it 'subscribes to an issue' do
post api("/projects/#{project.id}/issues/#{issue.iid}/subscribe", user2)
@@ -576,7 +682,7 @@ RSpec.describe API::Issues do
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'returns 404 if the issue ID is used instead of the iid' do
+ it 'returns 404 if the issue ID is used instead of the iid', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do
post api("/projects/#{project.id}/issues/#{issue.id}/subscribe", user)
expect(response).to have_gitlab_http_status(:not_found)
@@ -609,7 +715,7 @@ RSpec.describe API::Issues do
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'returns 404 if using the issue ID instead of iid' do
+ it 'returns 404 if using the issue ID instead of iid', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do
post api("/projects/#{project.id}/issues/#{issue.id}/unsubscribe", user)
expect(response).to have_gitlab_http_status(:not_found)
@@ -621,4 +727,9 @@ RSpec.describe API::Issues do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ def post_clone_issue(current_user, issue, target_project)
+ post api("/projects/#{issue.project.id}/issues/#{issue.iid}/clone", current_user),
+ params: { to_project_id: target_project.id }
+ end
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 07111dd1d62..5a682ee8532 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -798,8 +798,6 @@ RSpec.describe API::MavenPackages do
end
describe 'PUT /api/v4/projects/:id/packages/maven/*path/:file_name' do
- include_context 'workhorse headers'
-
let(:send_rewritten_field) { true }
let(:file_upload) { fixture_file_upload('spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar') }
@@ -833,6 +831,8 @@ RSpec.describe API::MavenPackages do
context 'when params from workhorse are correct' do
let(:params) { { file: file_upload } }
+ subject { upload_file_with_token(params: params) }
+
context 'file size is too large' do
it 'rejects the request' do
allow_next_instance_of(UploadedFile) do |uploaded_file|
@@ -851,18 +851,20 @@ RSpec.describe API::MavenPackages do
expect(response).to have_gitlab_http_status(:bad_request)
end
- context 'without workhorse header' do
- let(:workhorse_headers) { {} }
-
- subject { upload_file_with_token(params: params) }
-
- it_behaves_like 'package workhorse uploads'
- end
+ it_behaves_like 'package workhorse uploads'
context 'event tracking' do
- subject { upload_file_with_token(params: params) }
-
it_behaves_like 'a package tracking event', described_class.name, 'push_package'
+
+ context 'when the package file fails to be created' do
+ before do
+ allow_next_instance_of(::Packages::CreatePackageFileService) do |create_package_file_service|
+ allow(create_package_file_service).to receive(:execute).and_raise(StandardError)
+ end
+ end
+
+ it_behaves_like 'not a package tracking event'
+ end
end
it 'creates package and stores package file' do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 7a587e82683..bdbc73a59d8 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -49,6 +49,12 @@ RSpec.describe API::MergeRequests do
expect_successful_response_with_paginated_array
end
+
+ it_behaves_like 'issuable anonymous search' do
+ let(:url) { endpoint_path }
+ let(:issuable) { merge_request }
+ let(:result) { [merge_request_merged.id, merge_request_locked.id, merge_request_closed.id, merge_request.id] }
+ end
end
context 'when authenticated' do
@@ -613,6 +619,12 @@ RSpec.describe API::MergeRequests do
)
end
+ it_behaves_like 'issuable anonymous search' do
+ let(:url) { '/merge_requests' }
+ let(:issuable) { merge_request }
+ let(:result) { [merge_request_merged.id, merge_request_locked.id, merge_request_closed.id, merge_request.id] }
+ end
+
it "returns authentication error without any scope" do
get api("/merge_requests")
diff --git a/spec/requests/api/package_files_spec.rb b/spec/requests/api/package_files_spec.rb
index 137ded050c5..eb1f04d193e 100644
--- a/spec/requests/api/package_files_spec.rb
+++ b/spec/requests/api/package_files_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe API::PackageFiles do
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'returns 404 for a user without access to the project' do
+ it 'returns 404 for a user without access to the project', :sidekiq_inline do
project.team.truncate
get api(url, user)
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 1170a9ba6cb..196b0395ec0 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe API::ProjectContainerRepositories do
test_repository
stub_container_registry_config(enabled: true)
+ stub_container_registry_info
end
shared_context 'using API user' do
@@ -105,6 +106,9 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :guest, :forbidden unless context == 'using job token'
it_behaves_like 'rejected container repository access', :anonymous, :not_found
it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
+ it_behaves_like 'handling network errors with the container registry' do
+ let(:params) { { tags: true } }
+ end
it_behaves_like 'returns repositories for allowed users', :reporter, 'project' do
let(:object) { project }
@@ -154,6 +158,7 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :guest, :forbidden unless context == 'using job token'
it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'handling network errors with the container registry'
context 'for reporter' do
let(:api_user) { reporter }
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 06f4475ef79..b9c458373a8 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -457,4 +457,143 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache do
end
end
end
+
+ describe 'export relations' do
+ let(:relation) { 'labels' }
+ let(:download_path) { "/projects/#{project.id}/export_relations/download?relation=#{relation}" }
+ let(:path) { "/projects/#{project.id}/export_relations" }
+
+ let_it_be(:status_path) { "/projects/#{project.id}/export_relations/status" }
+
+ context 'when user is a maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe 'POST /projects/:id/export_relations' do
+ it 'accepts the request' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+
+ context 'when response is not success' do
+ it 'returns api error' do
+ allow_next_instance_of(BulkImports::ExportService) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: 'error', http_status: :error))
+ end
+
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(:error)
+ end
+ end
+ end
+
+ describe 'GET /projects/:id/export_relations/download' do
+ let_it_be(:export) { create(:bulk_import_export, project: project, relation: 'labels') }
+ let_it_be(:upload) { create(:bulk_import_export_upload, export: export) }
+
+ context 'when export file exists' do
+ it 'downloads exported project relation archive' do
+ upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz'))
+
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.header['Content-Disposition']).to eq("attachment; filename=\"labels.ndjson.gz\"; filename*=UTF-8''labels.ndjson.gz")
+ end
+ end
+
+ context 'when relation is not portable' do
+ let(:relation) { ::BulkImports::FileTransfer::ProjectConfig.new(project).skipped_relations.first }
+
+ it_behaves_like '400 response' do
+ let(:request) { get api(download_path, user) }
+ end
+ end
+
+ context 'when export file does not exist' do
+ it 'returns 404' do
+ allow(upload).to receive(:export_file).and_return(nil)
+
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET /projects/:id/export_relations/status' do
+ it 'returns a list of relation export statuses' do
+ create(:bulk_import_export, :started, project: project, relation: 'labels')
+ create(:bulk_import_export, :finished, project: project, relation: 'milestones')
+ create(:bulk_import_export, :failed, project: project, relation: 'project_badges')
+
+ get api(status_path, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.pluck('relation')).to contain_exactly('labels', 'milestones', 'project_badges')
+ expect(json_response.pluck('status')).to contain_exactly(-1, 0, 1)
+ end
+ end
+
+ context 'with bulk_import FF disabled' do
+ before do
+ stub_feature_flags(bulk_import: false)
+ end
+
+ describe 'POST /projects/:id/export_relations' do
+ it_behaves_like '404 response' do
+ let(:request) { post api(path, user) }
+ end
+ end
+
+ describe 'GET /projects/:id/export_relations/download' do
+ let_it_be(:export) { create(:bulk_import_export, project: project, relation: 'labels') }
+ let_it_be(:upload) { create(:bulk_import_export_upload, export: export) }
+
+ before do
+ upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz'))
+ end
+
+ it_behaves_like '404 response' do
+ let(:request) { post api(path, user) }
+ end
+ end
+
+ describe 'GET /projects/:id/export_relations/status' do
+ it_behaves_like '404 response' do
+ let(:request) { get api(status_path, user) }
+ end
+ end
+ end
+ end
+
+ context 'when user is a developer' do
+ let_it_be(:developer) { create(:user) }
+
+ before do
+ project.add_developer(developer)
+ end
+
+ describe 'POST /projects/:id/export_relations' do
+ it_behaves_like '403 response' do
+ let(:request) { post api(path, developer) }
+ end
+ end
+
+ describe 'GET /projects/:id/export_relations/download' do
+ it_behaves_like '403 response' do
+ let(:request) { get api(download_path, developer) }
+ end
+ end
+
+ describe 'GET /projects/:id/export_relations/status' do
+ it_behaves_like '403 response' do
+ let(:request) { get api(status_path, developer) }
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index be8a6c7bdcf..b5d3dcee804 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -2591,7 +2591,7 @@ RSpec.describe API::Projects do
end
end
- it_behaves_like 'storing arguments in the application context' do
+ it_behaves_like 'storing arguments in the application context for the API' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let(:expected_params) { { user: user.username, project: project.full_path } }
@@ -2684,26 +2684,9 @@ RSpec.describe API::Projects do
context 'when authenticated' do
context 'valid request' do
- context 'when sort_by_project_authorizations_user_id FF is off' do
- before do
- stub_feature_flags(sort_by_project_users_by_project_authorizations_user_id: false)
- end
-
- it_behaves_like 'project users response' do
- let(:project) { project4 }
- let(:current_user) { user4 }
- end
- end
-
- context 'when sort_by_project_authorizations_user_id FF is on' do
- before do
- stub_feature_flags(sort_by_project_users_by_project_authorizations_user_id: true)
- end
-
- it_behaves_like 'project users response' do
- let(:project) { project4 }
- let(:current_user) { user4 }
- end
+ it_behaves_like 'project users response' do
+ let(:project) { project4 }
+ let(:current_user) { user4 }
end
end
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index a576e1ab1ee..f05f125c974 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -305,6 +305,18 @@ RSpec.describe API::Repositories do
end
end
+ it 'returns only a part of the repository with path set' do
+ path = 'bar'
+ get api("#{route}?path=#{path}", current_user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ type, params = workhorse_send_data
+
+ expect(type).to eq('git-archive')
+ expect(params['ArchivePath']).to match(/#{project.path}\-[^\.]+\-#{path}\.tar.gz/)
+ end
+
it 'rate limits user when thresholds hit' do
allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
deleted file mode 100644
index e550132e776..00000000000
--- a/spec/requests/api/services_spec.rb
+++ /dev/null
@@ -1,361 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe API::Services do
- let_it_be(:user) { create(:user) }
- let_it_be(:user2) { create(:user) }
-
- let_it_be(:project, reload: true) do
- create(:project, creator_id: user.id, namespace: user.namespace)
- end
-
- describe "GET /projects/:id/services" do
- it 'returns authentication error when unauthenticated' do
- get api("/projects/#{project.id}/services")
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
-
- it "returns error when authenticated but user is not a project owner" do
- project.add_developer(user2)
- get api("/projects/#{project.id}/services", user2)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- context 'with integrations' do
- let!(:active_integration) { create(:emails_on_push_integration, project: project, active: true) }
- let!(:integration) { create(:custom_issue_tracker_integration, project: project, active: false) }
-
- it "returns a list of all active integrations" do
- get api("/projects/#{project.id}/services", user)
-
- aggregate_failures 'expect successful response with all active integrations' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_an Array
- expect(json_response.count).to eq(1)
- expect(json_response.first['slug']).to eq('emails-on-push')
- expect(response).to match_response_schema('public_api/v4/services')
- end
- end
- end
- end
-
- Integration.available_integration_names.each do |integration|
- describe "PUT /projects/:id/services/#{integration.dasherize}" do
- include_context integration
-
- it "updates #{integration} settings" do
- put api("/projects/#{project.id}/services/#{dashed_integration}", user), params: integration_attrs
-
- expect(response).to have_gitlab_http_status(:ok)
-
- current_integration = project.integrations.first
- events = current_integration.event_names.empty? ? ["foo"].freeze : current_integration.event_names
- query_strings = []
- events.each do |event|
- query_strings << "#{event}=#{!current_integration[event]}"
- end
- query_strings = query_strings.join('&')
-
- put api("/projects/#{project.id}/services/#{dashed_integration}?#{query_strings}", user), params: integration_attrs
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['slug']).to eq(dashed_integration)
- events.each do |event|
- next if event == "foo"
-
- expect(project.integrations.first[event]).not_to eq(current_integration[event]),
- "expected #{!current_integration[event]} for event #{event} for service #{current_integration.title}, got #{current_integration[event]}"
- end
- end
-
- it "returns if required fields missing" do
- required_attributes = integration_attrs_list.select do |attr|
- integration_klass.validators_on(attr).any? do |v|
- v.instance_of?(ActiveRecord::Validations::PresenceValidator) &&
- # exclude presence validators with conditional since those are not really required
- ![:if, :unless].any? { |cond| v.options.include?(cond) }
- end
- end
-
- if required_attributes.empty?
- expected_code = :ok
- else
- integration_attrs.delete(required_attributes.sample)
- expected_code = :bad_request
- end
-
- put api("/projects/#{project.id}/services/#{dashed_integration}", user), params: integration_attrs
-
- expect(response).to have_gitlab_http_status(expected_code)
- end
- end
-
- describe "DELETE /projects/:id/services/#{integration.dasherize}" do
- include_context integration
-
- before do
- initialize_integration(integration)
- end
-
- it "deletes #{integration}" do
- delete api("/projects/#{project.id}/services/#{dashed_integration}", user)
-
- expect(response).to have_gitlab_http_status(:no_content)
- project.send(integration_method).reload
- expect(project.send(integration_method).activated?).to be_falsey
- end
- end
-
- describe "GET /projects/:id/services/#{integration.dasherize}" do
- include_context integration
-
- let!(:initialized_integration) { initialize_integration(integration, active: true) }
-
- let_it_be(:project2) do
- create(:project, creator_id: user.id, namespace: user.namespace)
- end
-
- def deactive_integration!
- return initialized_integration.update!(active: false) unless initialized_integration.is_a?(::Integrations::Prometheus)
-
- # Integrations::Prometheus sets `#active` itself within a `before_save`:
- initialized_integration.manual_configuration = false
- initialized_integration.save!
- end
-
- it 'returns authentication error when unauthenticated' do
- get api("/projects/#{project.id}/services/#{dashed_integration}")
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
-
- it "returns all properties of active service #{integration}" do
- get api("/projects/#{project.id}/services/#{dashed_integration}", user)
-
- expect(initialized_integration).to be_active
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
- end
-
- it "returns all properties of inactive integration #{integration}" do
- deactive_integration!
-
- get api("/projects/#{project.id}/services/#{dashed_integration}", user)
-
- expect(initialized_integration).not_to be_active
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
- end
-
- it "returns not found if integration does not exist" do
- get api("/projects/#{project2.id}/services/#{dashed_integration}", user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 Service Not Found')
- end
-
- it "returns not found if service exists but is in `Project#disabled_integrations`" do
- expect_next_found_instance_of(Project) do |project|
- expect(project).to receive(:disabled_integrations).at_least(:once).and_return([integration])
- end
-
- get api("/projects/#{project.id}/services/#{dashed_integration}", user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 Service Not Found')
- end
-
- it "returns error when authenticated but not a project owner" do
- project.add_developer(user2)
- get api("/projects/#{project.id}/services/#{dashed_integration}", user2)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- describe 'POST /projects/:id/services/:slug/trigger' do
- describe 'Mattermost integration' do
- let(:integration_name) { 'mattermost_slash_commands' }
-
- context 'when no integration is available' do
- it 'returns a not found message' do
- post api("/projects/#{project.id}/services/idonotexist/trigger")
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response["error"]).to eq("404 Not Found")
- end
- end
-
- context 'when the integration exists' do
- let(:params) { { token: 'token' } }
-
- context 'when the integration is not active' do
- before do
- project.create_mattermost_slash_commands_integration(
- active: false,
- properties: params
- )
- end
-
- it 'when the integration is inactive' do
- post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when the integration is active' do
- before do
- project.create_mattermost_slash_commands_integration(
- active: true,
- properties: params
- )
- end
-
- it 'returns status 200' do
- post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: params
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'when the project can not be found' do
- it 'returns a generic 404' do
- post api("/projects/404/services/#{integration_name}/trigger"), params: params
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response["message"]).to eq("404 Service Not Found")
- end
- end
- end
- end
-
- describe 'Slack Integration' do
- let(:integration_name) { 'slack_slash_commands' }
-
- before do
- project.create_slack_slash_commands_integration(
- active: true,
- properties: { token: 'token' }
- )
- end
-
- it 'returns status 200' do
- post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: { token: 'token', text: 'help' }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['response_type']).to eq("ephemeral")
- end
- end
- end
-
- describe 'Mattermost integration' do
- let(:integration_name) { 'mattermost' }
- let(:params) do
- { webhook: 'https://hook.example.com', username: 'username' }
- end
-
- before do
- project.create_mattermost_integration(
- active: true,
- properties: params
- )
- end
-
- it 'accepts a username for update' do
- put api("/projects/#{project.id}/services/#{integration_name}", user), params: params.merge(username: 'new_username')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties']['username']).to eq('new_username')
- end
- end
-
- describe 'Microsoft Teams integration' do
- let(:integration_name) { 'microsoft-teams' }
- let(:params) do
- {
- webhook: 'https://hook.example.com',
- branches_to_be_notified: 'default',
- notify_only_broken_pipelines: false
- }
- end
-
- before do
- project.create_microsoft_teams_integration(
- active: true,
- properties: params
- )
- end
-
- it 'accepts branches_to_be_notified for update' do
- put api("/projects/#{project.id}/services/#{integration_name}", user),
- params: params.merge(branches_to_be_notified: 'all')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties']['branches_to_be_notified']).to eq('all')
- end
-
- it 'accepts notify_only_broken_pipelines for update' do
- put api("/projects/#{project.id}/services/#{integration_name}", user),
- params: params.merge(notify_only_broken_pipelines: true)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
- end
- end
-
- describe 'Hangouts Chat integration' do
- let(:integration_name) { 'hangouts-chat' }
- let(:params) do
- {
- webhook: 'https://hook.example.com',
- branches_to_be_notified: 'default'
- }
- end
-
- before do
- project.create_hangouts_chat_integration(
- active: true,
- properties: params
- )
- end
-
- it 'accepts branches_to_be_notified for update', :aggregate_failures do
- put api("/projects/#{project.id}/services/#{integration_name}", user), params: params.merge(branches_to_be_notified: 'all')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties']['branches_to_be_notified']).to eq('all')
- end
-
- it 'only requires the webhook param' do
- put api("/projects/#{project.id}/services/#{integration_name}", user), params: { webhook: 'https://hook.example.com' }
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- describe 'Pipelines Email Integration' do
- let(:integration_name) { 'pipelines-email' }
-
- context 'notify_only_broken_pipelines property was saved as a string' do
- before do
- project.create_pipelines_email_integration(
- active: false,
- properties: {
- "notify_only_broken_pipelines": "true",
- "branches_to_be_notified": "default"
- }
- )
- end
-
- it 'returns boolean values for notify_only_broken_pipelines' do
- get api("/projects/#{project.id}/services/#{integration_name}", user)
-
- expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
- end
- end
- end
-end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index f5d261ba4c6..423e19c3971 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -48,6 +48,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['admin_mode']).to be(false)
expect(json_response['whats_new_variant']).to eq('all_tiers')
expect(json_response['user_deactivation_emails_enabled']).to be(true)
+ expect(json_response['suggest_pipeline_enabled']).to be(true)
end
end
@@ -135,7 +136,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
wiki_page_max_content_bytes: 12345,
personal_access_token_prefix: "GL-",
user_deactivation_emails_enabled: false,
- admin_mode: true
+ admin_mode: true,
+ suggest_pipeline_enabled: false
}
expect(response).to have_gitlab_http_status(:ok)
@@ -187,6 +189,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['personal_access_token_prefix']).to eq("GL-")
expect(json_response['admin_mode']).to be(true)
expect(json_response['user_deactivation_emails_enabled']).to be(false)
+ expect(json_response['suggest_pipeline_enabled']).to be(false)
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index ee1911b0a26..fb01845b63a 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1457,10 +1457,20 @@ RSpec.describe API::Users do
describe "PUT /user/:id/credit_card_validation" do
let(:credit_card_validated_time) { Time.utc(2020, 1, 1) }
+ let(:expiration_year) { Date.today.year + 10 }
+ let(:params) do
+ {
+ credit_card_validated_at: credit_card_validated_time,
+ credit_card_expiration_year: expiration_year,
+ credit_card_expiration_month: 1,
+ credit_card_holder_name: 'John Smith',
+ credit_card_mask_number: '1111'
+ }
+ end
context 'when unauthenticated' do
it 'returns authentication error' do
- put api("/user/#{user.id}/credit_card_validation"), params: { credit_card_validated_at: credit_card_validated_time }
+ put api("/user/#{user.id}/credit_card_validation"), params: {}
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -1468,7 +1478,7 @@ RSpec.describe API::Users do
context 'when authenticated as non-admin' do
it "does not allow updating user's credit card validation", :aggregate_failures do
- put api("/user/#{user.id}/credit_card_validation", user), params: { credit_card_validated_at: credit_card_validated_time }
+ put api("/user/#{user.id}/credit_card_validation", user), params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
@@ -1476,10 +1486,17 @@ RSpec.describe API::Users do
context 'when authenticated as admin' do
it "updates user's credit card validation", :aggregate_failures do
- put api("/user/#{user.id}/credit_card_validation", admin), params: { credit_card_validated_at: credit_card_validated_time }
+ put api("/user/#{user.id}/credit_card_validation", admin), params: params
+
+ user.reload
expect(response).to have_gitlab_http_status(:ok)
- expect(user.reload.credit_card_validated_at).to eq(credit_card_validated_time)
+ expect(user.credit_card_validation).to have_attributes(
+ credit_card_validated_at: credit_card_validated_time,
+ expiration_date: Date.new(expiration_year, 1, 31),
+ last_digits: 1111,
+ holder_name: 'John Smith'
+ )
end
it "returns 400 error if credit_card_validated_at is missing" do
@@ -1489,7 +1506,7 @@ RSpec.describe API::Users do
end
it 'returns 404 error if user not found' do
- put api("/user/#{non_existing_record_id}/credit_card_validation", admin), params: { credit_card_validated_at: credit_card_validated_time }
+ put api("/user/#{non_existing_record_id}/credit_card_validation", admin), params: params
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
diff --git a/spec/requests/groups/registry/repositories_controller_spec.rb b/spec/requests/groups/registry/repositories_controller_spec.rb
index 89cbd3e4100..0699f48c2be 100644
--- a/spec/requests/groups/registry/repositories_controller_spec.rb
+++ b/spec/requests/groups/registry/repositories_controller_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Groups::Registry::RepositoriesController do
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
+ stub_container_registry_info
group.add_reporter(user)
login_as(user)
end
diff --git a/spec/requests/import/url_controller_spec.rb b/spec/requests/import/url_controller_spec.rb
new file mode 100644
index 00000000000..63af5e8b469
--- /dev/null
+++ b/spec/requests/import/url_controller_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::UrlController do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ login_as(user)
+ end
+
+ describe 'POST #validate' do
+ it 'reports success when service reports success status' do
+ allow_next_instance_of(Import::ValidateRemoteGitEndpointService) do |validate_endpoint_service|
+ allow(validate_endpoint_service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+
+ post import_url_validate_path, params: { url: 'https://fake.repo' }
+
+ expect(json_response).to eq({ 'success' => true })
+ end
+
+ it 'exposes error message when service reports error' do
+ expect_next_instance_of(Import::ValidateRemoteGitEndpointService) do |validate_endpoint_service|
+ expect(validate_endpoint_service).to receive(:execute).and_return(ServiceResponse.error(message: 'foobar'))
+ end
+
+ post import_url_validate_path, params: { url: 'https://fake.repo' }
+
+ expect(json_response).to eq({ 'success' => false, 'message' => 'foobar' })
+ end
+
+ context 'with an anonymous user' do
+ before do
+ sign_out(user)
+ end
+
+ it 'redirects to sign-in page' do
+ post import_url_validate_path
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/cluster_agents_controller_spec.rb b/spec/requests/projects/cluster_agents_controller_spec.rb
new file mode 100644
index 00000000000..e4c4f537699
--- /dev/null
+++ b/spec/requests/projects/cluster_agents_controller_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ClusterAgentsController do
+ let_it_be(:cluster_agent) { create(:cluster_agent) }
+
+ let(:project) { cluster_agent.project }
+
+ describe 'GET #show' do
+ subject { get project_cluster_agent_path(project, cluster_agent.name) }
+
+ context 'when user is unauthorized' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ subject
+ end
+
+ it 'shows 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is authorized' do
+ let(:user) { project.creator }
+
+ before do
+ sign_in(user)
+ subject
+ end
+
+ it 'renders content' do
+ expect(response).to be_successful
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/google_cloud_controller_spec.rb b/spec/requests/projects/google_cloud_controller_spec.rb
new file mode 100644
index 00000000000..3b43f0d1dfb
--- /dev/null
+++ b/spec/requests/projects/google_cloud_controller_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::GoogleCloudController do
+ let_it_be(:project) { create(:project, :public) }
+
+ describe 'GET index' do
+ let_it_be(:url) { "#{project_google_cloud_index_path(project)}" }
+
+ let(:subject) { get url }
+
+ context 'when user is authorized' do
+ let(:user) { project.creator }
+
+ before do
+ sign_in(user)
+ subject
+ end
+
+ it 'renders content' do
+ expect(response).to be_successful
+ end
+ end
+
+ context 'when user is unauthorized' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_guest(user)
+ sign_in(user)
+ subject
+ end
+
+ it 'shows 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when no user is present' do
+ before do
+ subject
+ end
+
+ it 'shows 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index 8057a091bba..4921a43ab8b 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -5,11 +5,13 @@ require 'spec_helper'
RSpec.describe 'merge requests discussions' do
# Further tests can be found at merge_requests_controller_spec.rb
describe 'GET /:namespace/:project/-/merge_requests/:iid/discussions' do
- let(:project) { create(:project, :repository) }
- let(:user) { project.owner }
+ let(:project) { create(:project, :repository, :public) }
+ let(:owner) { project.owner }
+ let(:user) { create(:user) }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
before do
+ project.add_maintainer(owner)
project.add_developer(user)
login_as(user)
end
@@ -232,7 +234,7 @@ RSpec.describe 'merge requests discussions' do
context 'when author role changes' do
before do
- Members::UpdateService.new(user, access_level: Gitlab::Access::GUEST).execute(author_membership)
+ Members::UpdateService.new(owner, access_level: Gitlab::Access::GUEST).execute(author_membership)
end
it_behaves_like 'cache miss' do
@@ -240,9 +242,9 @@ RSpec.describe 'merge requests discussions' do
end
end
- context 'when merge_request_discussion_cache is disabled' do
+ context 'when current_user role changes' do
before do
- stub_feature_flags(merge_request_discussion_cache: false)
+ Members::UpdateService.new(owner, access_level: Gitlab::Access::GUEST).execute(project.project_member(user))
end
it_behaves_like 'cache miss' do
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index be942f6ae86..35ce942ed7e 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -30,7 +30,11 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
throttle_unauthenticated_files_api_requests_per_period: 100,
throttle_unauthenticated_files_api_period_in_seconds: 1,
throttle_authenticated_files_api_requests_per_period: 100,
- throttle_authenticated_files_api_period_in_seconds: 1
+ throttle_authenticated_files_api_period_in_seconds: 1,
+ throttle_unauthenticated_deprecated_api_requests_per_period: 100,
+ throttle_unauthenticated_deprecated_api_period_in_seconds: 1,
+ throttle_authenticated_deprecated_api_requests_per_period: 100,
+ throttle_authenticated_deprecated_api_period_in_seconds: 1
}
end
@@ -479,6 +483,67 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
end
end
+ describe 'dependency proxy' do
+ include DependencyProxyHelpers
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:other_group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ let(:throttle_setting_prefix) { 'throttle_authenticated_web' }
+ let(:jwt_token) { build_jwt(user) }
+ let(:other_jwt_token) { build_jwt(other_user) }
+ let(:request_args) { [path, headers: jwt_token_authorization_headers(jwt_token)] }
+ let(:other_user_request_args) { [other_path, headers: jwt_token_authorization_headers(other_jwt_token)] }
+
+ before do
+ group.add_owner(user)
+ group.create_dependency_proxy_setting!(enabled: true)
+ other_group.add_owner(other_user)
+ other_group.create_dependency_proxy_setting!(enabled: true)
+
+ allow(Gitlab.config.dependency_proxy)
+ .to receive(:enabled).and_return(true)
+ token_response = { status: :success, token: 'abcd1234' }
+ allow_next_instance_of(DependencyProxy::RequestTokenService) do |instance|
+ allow(instance).to receive(:execute).and_return(token_response)
+ end
+ end
+
+ context 'getting a manifest' do
+ let_it_be(:manifest) { create(:dependency_proxy_manifest) }
+
+ let(:path) { "/v2/#{group.path}/dependency_proxy/containers/alpine/manifests/latest" }
+ let(:other_path) { "/v2/#{other_group.path}/dependency_proxy/containers/alpine/manifests/latest" }
+ let(:pull_response) { { status: :success, manifest: manifest, from_cache: false } }
+
+ before do
+ allow_next_instance_of(DependencyProxy::FindOrCreateManifestService) do |instance|
+ allow(instance).to receive(:execute).and_return(pull_response)
+ end
+ end
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'getting a blob' do
+ let_it_be(:blob) { create(:dependency_proxy_blob) }
+
+ let(:path) { "/v2/#{group.path}/dependency_proxy/containers/alpine/blobs/sha256:a0d0a0d46f8b52473982a3c466318f479767577551a53ffc9074c9fa7035982e" }
+ let(:other_path) { "/v2/#{other_group.path}/dependency_proxy/containers/alpine/blobs/sha256:a0d0a0d46f8b52473982a3c466318f479767577551a53ffc9074c9fa7035982e" }
+ let(:blob_response) { { status: :success, blob: blob, from_cache: false } }
+
+ before do
+ allow_next_instance_of(DependencyProxy::FindOrCreateBlobService) do |instance|
+ allow(instance).to receive(:execute).and_return(blob_response)
+ end
+ end
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+ end
+
describe 'authenticated git lfs requests', :api do
let_it_be(:project) { create(:project, :internal) }
let_it_be(:user) { create(:user) }
@@ -790,6 +855,213 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
end
end
+ describe 'Deprecated API', :api do
+ let_it_be(:group) { create(:group, :public) }
+
+ let(:request_method) { 'GET' }
+ let(:path) { "/groups/#{group.id}" }
+ let(:params) { {} }
+
+ context 'unauthenticated' do
+ let(:throttle_setting_prefix) { 'throttle_unauthenticated_deprecated_api' }
+
+ def do_request
+ get(api(path), params: params)
+ end
+
+ before do
+ settings_to_set[:throttle_unauthenticated_deprecated_api_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_unauthenticated_deprecated_api_period_in_seconds] = period_in_seconds
+ end
+
+ context 'when unauthenticated deprecated api throttle is disabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_deprecated_api_enabled] = false
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'allows requests over the rate limit' do
+ (1 + requests_per_period).times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when unauthenticated api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_api_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_unauthenticated_api_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_unauthenticated_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the unauthenticated api rate limit' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+
+ context 'when unauthenticated web throttle is enabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_web_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_unauthenticated_web_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_unauthenticated_web_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'ignores unauthenticated web throttle' do
+ (1 + requests_per_period).times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+ end
+
+ context 'when unauthenticated deprecated api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_deprecated_api_requests_per_period] = requests_per_period # 1
+ settings_to_set[:throttle_unauthenticated_deprecated_api_period_in_seconds] = period_in_seconds # 10_000
+ settings_to_set[:throttle_unauthenticated_deprecated_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ context 'when group endpoint is given with_project=false' do
+ let(:params) { { with_projects: false } }
+
+ it 'permits requests over the rate limit' do
+ (1 + requests_per_period).times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ it 'rejects requests over the rate limit' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+
+ context 'when unauthenticated api throttle is lower' do
+ before do
+ settings_to_set[:throttle_unauthenticated_api_requests_per_period] = 0
+ settings_to_set[:throttle_unauthenticated_api_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_unauthenticated_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'ignores unauthenticated api throttle' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+
+ it_behaves_like 'tracking when dry-run mode is set' do
+ let(:throttle_name) { 'throttle_unauthenticated_deprecated_api' }
+ end
+ end
+ end
+
+ context 'authenticated' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:member) { group.add_owner(user) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:other_user_token) { create(:personal_access_token, user: other_user) }
+
+ let(:throttle_setting_prefix) { 'throttle_authenticated_deprecated_api' }
+
+ before do
+ stub_application_setting(settings_to_set)
+ end
+
+ context 'with the token in the query string' do
+ let(:request_args) { [api(path, personal_access_token: token), {}] }
+ let(:other_user_request_args) { [api(path, personal_access_token: other_user_token), {}] }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'with the token in the headers' do
+ let(:request_args) { api_get_args_with_token_headers(path, personal_access_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(path, personal_access_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'precedence over authenticated api throttle' do
+ before do
+ settings_to_set[:throttle_authenticated_deprecated_api_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_authenticated_deprecated_api_period_in_seconds] = period_in_seconds
+ end
+
+ def do_request
+ get(api(path, personal_access_token: token), params: params)
+ end
+
+ context 'when authenticated deprecated api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_authenticated_deprecated_api_enabled] = true
+ end
+
+ context 'when authenticated api throttle is lower' do
+ before do
+ settings_to_set[:throttle_authenticated_api_requests_per_period] = 0
+ settings_to_set[:throttle_authenticated_api_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_authenticated_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'ignores authenticated api throttle' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+ end
+
+ context 'when authenticated deprecated api throttle is disabled' do
+ before do
+ settings_to_set[:throttle_authenticated_deprecated_api_enabled] = false
+ end
+
+ context 'when authenticated api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_authenticated_api_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_authenticated_api_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_authenticated_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the authenticated api rate limit' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+ end
+ end
+ end
+ end
+
describe 'throttle bypass header' do
let(:headers) { {} }
let(:bypass_header) { 'gitlab-bypass-rate-limiting' }
diff --git a/spec/routing/admin/serverless/domains_controller_routing_spec.rb b/spec/routing/admin/serverless/domains_controller_routing_spec.rb
deleted file mode 100644
index 60b60809f4d..00000000000
--- a/spec/routing/admin/serverless/domains_controller_routing_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Admin::Serverless::DomainsController do
- it 'routes to #index' do
- expect(get: '/admin/serverless/domains').to route_to('admin/serverless/domains#index')
- end
-
- it 'routes to #create' do
- expect(post: '/admin/serverless/domains/').to route_to('admin/serverless/domains#create')
- end
-
- it 'routes to #update' do
- expect(put: '/admin/serverless/domains/1').to route_to(controller: 'admin/serverless/domains', action: 'update', id: '1')
- expect(patch: '/admin/serverless/domains/1').to route_to(controller: 'admin/serverless/domains', action: 'update', id: '1')
- end
-
- it 'routes #verify' do
- expect(post: '/admin/serverless/domains/1/verify').to route_to(controller: 'admin/serverless/domains', action: 'verify', id: '1')
- end
-end
diff --git a/spec/serializers/member_entity_spec.rb b/spec/serializers/member_entity_spec.rb
index dc7aa4611f2..370fa14b1e8 100644
--- a/spec/serializers/member_entity_spec.rb
+++ b/spec/serializers/member_entity_spec.rb
@@ -39,6 +39,10 @@ RSpec.describe MemberEntity do
expect(entity_hash[:invite][:can_resend]).to be(true)
end
+
+ it 'exposes `invite.user_state` as empty string' do
+ expect(entity_hash[:invite][:user_state]).to eq('')
+ end
end
shared_examples 'is_direct_member' do
@@ -59,6 +63,12 @@ RSpec.describe MemberEntity do
end
end
+ shared_examples 'user state is blocked_pending_approval' do
+ it 'displays proper user state' do
+ expect(entity_hash[:invite][:user_state]).to eq('blocked_pending_approval')
+ end
+ end
+
context 'group member' do
let(:group) { create(:group) }
let(:source) { group }
@@ -79,6 +89,14 @@ RSpec.describe MemberEntity do
it_behaves_like 'is_direct_member'
end
+
+ context 'new member user state is blocked_pending_approval' do
+ let(:user) { create(:user, :blocked_pending_approval) }
+ let(:group_member) { create(:group_member, :invited, group: group, invite_email: user.email) }
+ let(:member) { GroupMemberPresenter.new(GroupMember.with_invited_user_state.find(group_member.id), current_user: current_user) }
+
+ it_behaves_like 'user state is blocked_pending_approval'
+ end
end
context 'project member' do
@@ -102,5 +120,13 @@ RSpec.describe MemberEntity do
it_behaves_like 'is_direct_member'
end
+
+ context 'new members user state is blocked_pending_approval' do
+ let(:user) { create(:user, :blocked_pending_approval) }
+ let(:project_member) { create(:project_member, :invited, project: project, invite_email: user.email) }
+ let(:member) { ProjectMemberPresenter.new(ProjectMember.with_invited_user_state.find(project_member.id), current_user: current_user) }
+
+ it_behaves_like 'user state is blocked_pending_approval'
+ end
end
end
diff --git a/spec/serializers/merge_request_metrics_helper_spec.rb b/spec/serializers/merge_request_metrics_helper_spec.rb
new file mode 100644
index 00000000000..8f683df1faa
--- /dev/null
+++ b/spec/serializers/merge_request_metrics_helper_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequestMetricsHelper do
+ let_it_be(:user) { create(:user) }
+
+ let(:merge_request) { create(:merge_request) }
+ let(:helper) { Class.new.include(described_class).new }
+
+ describe '#build_metrics' do
+ subject do
+ helper.build_metrics(merge_request)
+ end
+
+ shared_examples 'does not rebuild the metrics' do
+ it 'does not call the merge request metrics class' do
+ expect(MergeRequest::Metrics).not_to receive(:new)
+
+ subject
+ end
+
+ it 'returns the metrics for the given merge request' do
+ expect(subject).to be_kind_of(MergeRequest::Metrics)
+ expect(subject[:merge_request_id]).to eq(merge_request.id)
+ end
+ end
+
+ context 'when closed and metrics exists' do
+ before do
+ merge_request.close!
+ merge_request.metrics.update!(latest_closed_by: user)
+ end
+
+ include_examples 'does not rebuild the metrics'
+ end
+
+ context 'when merged and metrics exists' do
+ before do
+ merge_request.mark_as_merged!
+ merge_request.metrics.update!(merged_by: user)
+ end
+
+ include_examples 'does not rebuild the metrics'
+ end
+
+ context 'when merged and metrics do not exists' do
+ before do
+ merge_request.mark_as_merged!
+ merge_request.metrics.destroy!
+ merge_request.reload
+ end
+
+ it 'rebuilds the merge request metrics' do
+ closed_event = merge_request.closed_event
+ merge_event = merge_request.merge_event
+
+ expect(MergeRequest::Metrics).to receive(:new)
+ .with(latest_closed_at: closed_event&.updated_at,
+ latest_closed_by: closed_event&.author,
+ merged_at: merge_event&.updated_at,
+ merged_by: merge_event&.author)
+ .and_call_original
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 5f4b734fcea..ecc93219b53 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -275,7 +275,7 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
expect(subject[:merge_pipeline]).to be_nil
end
- context 'when is merged' do
+ context 'when is merged', :sidekiq_inline do
let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 35846b0d4ea..fcfdbfc0967 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -283,28 +283,6 @@ RSpec.describe MergeRequestWidgetEntity do
it 'provides a valid value for suggest pipeline feature id' do
expect(subject[:suggest_pipeline_feature_id]).to eq described_class::SUGGEST_PIPELINE
end
-
- it 'provides a valid value for if it is dismissed' do
- expect(subject[:is_dismissed_suggest_pipeline]).to be(false)
- end
-
- context 'when the suggest pipeline has been dismissed' do
- before do
- create(:user_callout, user: user, feature_name: described_class::SUGGEST_PIPELINE)
- end
-
- it 'indicates suggest pipeline has been dismissed' do
- expect(subject[:is_dismissed_suggest_pipeline]).to be(true)
- end
- end
-
- context 'when user is not logged in' do
- let(:request) { double('request', current_user: nil, project: project) }
-
- it 'returns a blank is dismissed value' do
- expect(subject[:is_dismissed_suggest_pipeline]).to be_nil
- end
- end
end
it 'has human access' do
@@ -395,4 +373,46 @@ RSpec.describe MergeRequestWidgetEntity do
end
end
end
+
+ describe 'is_dismissed_suggest_pipeline' do
+ context 'when user is logged in' do
+ context 'when the suggest pipeline feature is enabled' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:suggest_pipeline_enabled?).and_return(true)
+ end
+
+ it 'is false' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be(false)
+ end
+
+ context 'when suggest pipeline has been dismissed' do
+ before do
+ create(:user_callout, user: user, feature_name: described_class::SUGGEST_PIPELINE)
+ end
+
+ it 'is true' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be(true)
+ end
+ end
+ end
+
+ context 'when the suggest pipeline feature is disabled' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:suggest_pipeline_enabled?).and_return(false)
+ end
+
+ it 'is true' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be(true)
+ end
+ end
+ end
+
+ context 'when user is not logged in' do
+ let(:request) { double('request', current_user: nil, project: project) }
+
+ it 'is true' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be(true)
+ end
+ end
+ end
end
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index a1fd89bcad7..5c9d2c5e680 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -413,6 +413,32 @@ RSpec.describe ApplicationSettings::UpdateService do
end
end
+ context 'when deprecated API rate limits are passed' do
+ let(:params) do
+ {
+ throttle_unauthenticated_deprecated_api_enabled: 1,
+ throttle_unauthenticated_deprecated_api_period_in_seconds: 500,
+ throttle_unauthenticated_deprecated_api_requests_per_period: 20,
+ throttle_authenticated_deprecated_api_enabled: 1,
+ throttle_authenticated_deprecated_api_period_in_seconds: 600,
+ throttle_authenticated_deprecated_api_requests_per_period: 10
+ }
+ end
+
+ it 'updates deprecated API throttle settings' do
+ subject.execute
+
+ application_settings.reload
+
+ expect(application_settings.throttle_unauthenticated_deprecated_api_enabled).to be_truthy
+ expect(application_settings.throttle_unauthenticated_deprecated_api_period_in_seconds).to eq(500)
+ expect(application_settings.throttle_unauthenticated_deprecated_api_requests_per_period).to eq(20)
+ expect(application_settings.throttle_authenticated_deprecated_api_enabled).to be_truthy
+ expect(application_settings.throttle_authenticated_deprecated_api_period_in_seconds).to eq(600)
+ expect(application_settings.throttle_authenticated_deprecated_api_requests_per_period).to eq(10)
+ end
+ end
+
context 'when git lfs rate limits are passed' do
let(:params) do
{
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index d1f854f72bc..72027911e51 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -56,12 +56,23 @@ RSpec.describe Boards::Issues::ListService do
it_behaves_like 'issues list service'
end
- context 'when filtering by type' do
- it 'only returns the specified type' do
- issue = create(:labeled_issue, project: project, milestone: m1, labels: [development, p1], issue_type: 'incident')
- params = { board_id: board.id, id: list1.id, issue_types: 'incident' }
+ context 'when filtering' do
+ let_it_be(:incident) { create(:labeled_issue, project: project, milestone: m1, labels: [development, p1], issue_type: 'incident') }
- expect(described_class.new(parent, user, params).execute).to eq [issue]
+ context 'when filtering by type' do
+ it 'only returns the specified type' do
+ params = { board_id: board.id, id: list1.id, issue_types: 'incident' }
+
+ expect(described_class.new(parent, user, params).execute).to eq [incident]
+ end
+ end
+
+ context 'when filtering by negated type' do
+ it 'only returns the specified type' do
+ params = { board_id: board.id, id: list1.id, not: { issue_types: ['issue'] } }
+
+ expect(described_class.new(parent, user, params).execute).to contain_exactly(incident)
+ end
end
end
end
diff --git a/spec/services/bulk_import_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index 1b60a5cb0f8..67ec6fee1ae 100644
--- a/spec/services/bulk_import_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImportService do
+RSpec.describe BulkImports::CreateService do
let(:user) { create(:user) }
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
let(:params) do
@@ -31,8 +31,25 @@ RSpec.describe BulkImportService do
subject { described_class.new(user, params, credentials) }
describe '#execute' do
+ let_it_be(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ end
+ end
+
it 'creates bulk import' do
expect { subject.execute }.to change { BulkImport.count }.by(1)
+
+ last_bulk_import = BulkImport.last
+
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_version).to eq(source_version.to_s)
+ expect(last_bulk_import.user).to eq(user)
end
it 'creates bulk import entities' do
diff --git a/spec/services/bulk_imports/file_export_service_spec.rb b/spec/services/bulk_imports/file_export_service_spec.rb
new file mode 100644
index 00000000000..0d129c75384
--- /dev/null
+++ b/spec/services/bulk_imports/file_export_service_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FileExportService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:export_path) { Dir.mktmpdir }
+ let_it_be(:relation) { 'uploads' }
+
+ subject(:service) { described_class.new(project, export_path, relation) }
+
+ describe '#execute' do
+ it 'executes export service and archives exported data' do
+ expect_next_instance_of(BulkImports::UploadsExportService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ expect(subject).to receive(:tar_cf).with(archive: File.join(export_path, 'uploads.tar'), dir: export_path)
+
+ subject.execute
+ end
+
+ context 'when unsupported relation is passed' do
+ it 'raises an error' do
+ service = described_class.new(project, export_path, 'unsupported')
+
+ expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
+ end
+ end
+ end
+
+ describe '#exported_filename' do
+ it 'returns filename of the exported file' do
+ expect(subject.exported_filename).to eq('uploads.tar')
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/get_importable_data_service_spec.rb b/spec/services/bulk_imports/get_importable_data_service_spec.rb
new file mode 100644
index 00000000000..eccd3e5f49d
--- /dev/null
+++ b/spec/services/bulk_imports/get_importable_data_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::GetImportableDataService do
+ describe '#execute' do
+ include_context 'bulk imports requests context', 'https://gitlab.example.com'
+
+ let_it_be(:params) { { per_page: 20, page: 1 } }
+ let_it_be(:query_params) { { top_level_only: true, min_access_level: 50, search: '' } }
+ let_it_be(:credentials) { { url: 'https://gitlab.example.com', access_token: 'demo-pat' } }
+ let_it_be(:expected_version_validation) do
+ {
+ features: {
+ project_migration: {
+ available: true,
+ min_version: BulkImport.min_gl_version_for_project_migration.to_s
+ },
+ 'source_instance_version': BulkImport.min_gl_version_for_project_migration.to_s
+ }
+ }
+ end
+
+ let_it_be(:expected_parsed_response) do
+ [
+ {
+ 'id' => 2595438,
+ 'web_url' => 'https://gitlab.com/groups/auto-breakfast',
+ 'name' => 'Stub',
+ 'path' => 'stub-group',
+ 'full_name' => 'Stub',
+ 'full_path' => 'stub-group'
+ }
+ ]
+ end
+
+ subject do
+ described_class.new(params, query_params, credentials).execute
+ end
+
+ it 'returns version_validation and a response' do
+ expect(subject[:version_validation]).to eq(expected_version_validation)
+ expect(subject[:response].parsed_response).to eq(expected_parsed_response)
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/relation_export_service_spec.rb b/spec/services/bulk_imports/relation_export_service_spec.rb
index 333cd9201d8..27a6ca60515 100644
--- a/spec/services/bulk_imports/relation_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_export_service_spec.rb
@@ -7,12 +7,14 @@ RSpec.describe BulkImports::RelationExportService do
let_it_be(:relation) { 'labels' }
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
let_it_be(:label) { create(:group_label, group: group) }
let_it_be(:export_path) { "#{Dir.tmpdir}/relation_export_service_spec/tree" }
let_it_be_with_reload(:export) { create(:bulk_import_export, group: group, relation: relation) }
before do
group.add_owner(user)
+ project.add_maintainer(user)
allow(export).to receive(:export_path).and_return(export_path)
end
@@ -25,6 +27,10 @@ RSpec.describe BulkImports::RelationExportService do
describe '#execute' do
it 'exports specified relation and marks export as finished' do
+ expect_next_instance_of(BulkImports::TreeExportService) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
subject.execute
expect(export.reload.upload.export_file).to be_present
@@ -43,6 +49,18 @@ RSpec.describe BulkImports::RelationExportService do
expect(export.upload.export_file).to be_present
end
+ context 'when exporting a file relation' do
+ it 'uses file export service' do
+ service = described_class.new(user, project, 'uploads', jid)
+
+ expect_next_instance_of(BulkImports::FileExportService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ service.execute
+ end
+ end
+
context 'when export record does not exist' do
let(:another_group) { create(:group) }
diff --git a/spec/services/bulk_imports/tree_export_service_spec.rb b/spec/services/bulk_imports/tree_export_service_spec.rb
new file mode 100644
index 00000000000..f2ed747b64e
--- /dev/null
+++ b/spec/services/bulk_imports/tree_export_service_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::TreeExportService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:export_path) { Dir.mktmpdir }
+ let_it_be(:relation) { 'issues' }
+
+ subject(:service) { described_class.new(project, export_path, relation) }
+
+ describe '#execute' do
+ it 'executes export service and archives exported data' do
+ expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
+ expect(serializer).to receive(:serialize_relation)
+ end
+
+ subject.execute
+ end
+
+ context 'when unsupported relation is passed' do
+ it 'raises an error' do
+ service = described_class.new(project, export_path, 'unsupported')
+
+ expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
+ end
+ end
+ end
+
+ describe '#exported_filename' do
+ it 'returns filename of the exported file' do
+ expect(subject.exported_filename).to eq('issues.ndjson')
+ end
+ end
+end
diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb
index 071b5c3b2f9..b08ba6fd5e5 100644
--- a/spec/services/ci/archive_trace_service_spec.rb
+++ b/spec/services/ci/archive_trace_service_spec.rb
@@ -88,6 +88,32 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
subject
end
+
+ context 'job has archive and chunks' do
+ let(:job) { create(:ci_build, :success, :trace_artifact) }
+
+ before do
+ create(:ci_build_trace_chunk, build: job, chunk_index: 0)
+ end
+
+ context 'archive is not completed' do
+ before do
+ job.job_artifacts_trace.file.remove!
+ end
+
+ it 'cleanups any stale archive data' do
+ expect(job.job_artifacts_trace).to be_present
+
+ subject
+
+ expect(job.reload.job_artifacts_trace).to be_nil
+ end
+ end
+
+ it 'removes trace chunks' do
+ expect { subject }.to change { job.trace_chunks.count }.to(0)
+ end
+ end
end
context 'when the archival process is backed off' do
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 46271ee36c0..5e7dace8e15 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -58,17 +58,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline).to be_created_successfully
expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
end
-
- context 'when the FF ci_include_rules is disabled' do
- before do
- stub_feature_flags(ci_include_rules: false)
- end
-
- it 'includes the job in the file' do
- expect(pipeline).to be_created_successfully
- expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
- end
- end
end
context 'when the rules does not match' do
@@ -78,17 +67,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline).to be_created_successfully
expect(pipeline.processables.pluck(:name)).to contain_exactly('job')
end
-
- context 'when the FF ci_include_rules is disabled' do
- before do
- stub_feature_flags(ci_include_rules: false)
- end
-
- it 'includes the job in the file' do
- expect(pipeline).to be_created_successfully
- expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
- end
- end
end
end
end
diff --git a/spec/services/ci/drop_pipeline_service_spec.rb b/spec/services/ci/drop_pipeline_service_spec.rb
index c6a118c6083..ddb53712d9c 100644
--- a/spec/services/ci/drop_pipeline_service_spec.rb
+++ b/spec/services/ci/drop_pipeline_service_spec.rb
@@ -50,13 +50,14 @@ RSpec.describe Ci::DropPipelineService do
end.count
writes_per_build = 2
+ load_balancer_queries = 3
expected_reads_count = control_count - writes_per_build
create_list(:ci_build, 5, :running, pipeline: cancelable_pipeline)
expect do
drop_pipeline!(cancelable_pipeline)
- end.not_to exceed_query_limit(expected_reads_count + (5 * writes_per_build))
+ end.not_to exceed_query_limit(expected_reads_count + (5 * writes_per_build) + load_balancer_queries)
end
end
end
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
index 3a77d26dd9e..709a840c644 100644
--- a/spec/services/ci/pipelines/add_job_service_spec.rb
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -77,19 +77,6 @@ RSpec.describe Ci::Pipelines::AddJobService do
expect(execute).to be_success
expect(execute.payload[:job]).to eq(job)
end
-
- context 'when the FF ci_pipeline_add_job_with_lock is disabled' do
- before do
- stub_feature_flags(ci_pipeline_add_job_with_lock: false)
- end
-
- it 'does not use exclusive lock' do
- expect(Gitlab::ExclusiveLease).not_to receive(:new).with(lock_key, timeout: lock_timeout)
-
- expect(execute).to be_success
- expect(execute.payload[:job]).to eq(job)
- end
- end
end
end
end
diff --git a/spec/services/ci/pipelines/hook_service_spec.rb b/spec/services/ci/pipelines/hook_service_spec.rb
new file mode 100644
index 00000000000..0e1ef6afd0d
--- /dev/null
+++ b/spec/services/ci/pipelines/hook_service_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Pipelines::HookService do
+ describe '#execute_hooks' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, :created, project: project) }
+
+ let(:hook_enabled) { true }
+ let!(:hook) { create(:project_hook, project: project, pipeline_events: hook_enabled) }
+ let(:hook_data) { double }
+
+ subject(:service) { described_class.new(pipeline) }
+
+ describe 'HOOK_NAME' do
+ specify { expect(described_class::HOOK_NAME).to eq(:pipeline_hooks) }
+ end
+
+ context 'with pipeline hooks enabled' do
+ before do
+ allow(Gitlab::DataBuilder::Pipeline).to receive(:build).with(pipeline).once.and_return(hook_data)
+ end
+
+ it 'calls pipeline.project.execute_hooks and pipeline.project.execute_integrations' do
+ create(:pipelines_email_integration, project: project)
+
+ expect(pipeline.project).to receive(:execute_hooks).with(hook_data, described_class::HOOK_NAME)
+ expect(pipeline.project).to receive(:execute_integrations).with(hook_data, described_class::HOOK_NAME)
+
+ service.execute
+ end
+ end
+
+ context 'with pipeline hooks and integrations disabled' do
+ let(:hook_enabled) { false }
+
+ it 'does not call pipeline.project.execute_hooks and pipeline.project.execute_integrations' do
+ expect(pipeline.project).not_to receive(:execute_hooks)
+ expect(pipeline.project).not_to receive(:execute_integrations)
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/play_bridge_service_spec.rb b/spec/services/ci/play_bridge_service_spec.rb
index 3f97bfdf5ae..56b1615a56d 100644
--- a/spec/services/ci/play_bridge_service_spec.rb
+++ b/spec/services/ci/play_bridge_service_spec.rb
@@ -23,18 +23,18 @@ RSpec.describe Ci::PlayBridgeService, '#execute' do
expect(bridge.reload).to be_pending
end
- it 'enqueues Ci::CreateCrossProjectPipelineWorker' do
- expect(::Ci::CreateCrossProjectPipelineWorker).to receive(:perform_async).with(bridge.id)
-
- execute_service
- end
-
it "updates bridge's user" do
execute_service
expect(bridge.reload.user).to eq(user)
end
+ it 'enqueues Ci::CreateDownstreamPipelineWorker' do
+ expect(::Ci::CreateDownstreamPipelineWorker).to receive(:perform_async).with(bridge.id)
+
+ execute_service
+ end
+
context 'when a subsequent job is skipped' do
let!(:job) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: bridge.stage_idx + 1) }
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index b5bf0adadaf..404e1bf7c87 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -10,11 +10,9 @@ RSpec.describe Ci::ProcessPipelineService do
end
let(:pipeline_processing_events_counter) { double(increment: true) }
- let(:legacy_update_jobs_counter) { double(increment: true) }
let(:metrics) do
- double(pipeline_processing_events_counter: pipeline_processing_events_counter,
- legacy_update_jobs_counter: legacy_update_jobs_counter)
+ double(pipeline_processing_events_counter: pipeline_processing_events_counter)
end
subject { described_class.new(pipeline) }
@@ -33,68 +31,4 @@ RSpec.describe Ci::ProcessPipelineService do
subject.execute
end
end
-
- describe 'updating a list of retried builds' do
- let!(:build_retried) { create_build('build') }
- let!(:build) { create_build('build') }
- let!(:test) { create_build('test') }
-
- context 'when FF ci_remove_update_retried_from_process_pipeline is enabled' do
- it 'does not update older builds as retried' do
- subject.execute
-
- expect(all_builds.latest).to contain_exactly(build, build_retried, test)
- expect(all_builds.retried).to be_empty
- end
- end
-
- context 'when FF ci_remove_update_retried_from_process_pipeline is disabled' do
- before do
- stub_feature_flags(ci_remove_update_retried_from_process_pipeline: false)
- end
-
- it 'returns unique statuses' do
- subject.execute
-
- expect(all_builds.latest).to contain_exactly(build, test)
- expect(all_builds.retried).to contain_exactly(build_retried)
- end
-
- it 'increments the counter' do
- expect(legacy_update_jobs_counter).to receive(:increment)
-
- subject.execute
- end
-
- it 'logs the project and pipeline id' do
- expect(Gitlab::AppJsonLogger).to receive(:info).with(event: 'update_retried_is_used',
- project_id: project.id,
- pipeline_id: pipeline.id)
-
- subject.execute
- end
-
- context 'when the previous build has already retried column true' do
- before do
- build_retried.update_columns(retried: true)
- end
-
- it 'does not increment the counter' do
- expect(legacy_update_jobs_counter).not_to receive(:increment)
-
- subject.execute
- end
- end
- end
-
- private
-
- def create_build(name, **opts)
- create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
- end
-
- def all_builds
- pipeline.builds.order(:stage_idx, :id)
- end
- end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 73ff15ec393..650353eb751 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -14,7 +14,7 @@ module Ci
let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
describe '#execute' do
- context 'checks database loadbalancing stickiness', :db_load_balancing do
+ context 'checks database loadbalancing stickiness' do
subject { described_class.new(shared_runner).execute }
before do
@@ -22,14 +22,14 @@ module Ci
end
it 'result is valid if replica did caught-up' do
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
+ expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
.with(:runner, shared_runner.id) { true }
expect(subject).to be_valid
end
it 'result is invalid if replica did not caught-up' do
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
+ expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
.with(:runner, shared_runner.id) { false }
expect(subject).not_to be_valid
@@ -87,19 +87,25 @@ module Ci
end
context 'for specific runner' do
- context 'with FF disabled' do
+ context 'with tables decoupling disabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: false,
ci_queueing_builds_enabled_checks: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
it 'does not pick a build' do
expect(execute(specific_runner)).to be_nil
end
end
- context 'with FF enabled' do
+ context 'with tables decoupling enabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: true,
@@ -266,17 +272,23 @@ module Ci
context 'and uses project runner' do
let(:build) { execute(specific_runner) }
- context 'with FF disabled' do
+ context 'with tables decoupling disabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: false,
ci_queueing_builds_enabled_checks: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
it { expect(build).to be_nil }
end
- context 'with FF enabled' do
+ context 'with tables decoupling enabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: true,
@@ -791,6 +803,12 @@ module Ci
stub_feature_flags(ci_queueing_denormalize_shared_runners_information: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
@@ -807,6 +825,12 @@ module Ci
stub_feature_flags(ci_queueing_denormalize_tags_information: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
@@ -815,6 +839,12 @@ module Ci
stub_feature_flags(ci_queueing_denormalize_namespace_traversal_ids: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
end
@@ -824,6 +854,12 @@ module Ci
stub_feature_flags(ci_pending_builds_queue_source: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
end
diff --git a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
index 53aa842bc28..194203a422c 100644
--- a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
+++ b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
@@ -48,6 +48,92 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
expect(build).to be_pending
end
end
+
+ context 'when process mode is oldest_first' do
+ let(:resource_group) { create(:ci_resource_group, process_mode: :oldest_first, project: project) }
+
+ it 'requests resource' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ end
+
+ context 'when the other job exists in the newer pipeline' do
+ let!(:build_2) { create(:ci_build, :waiting_for_resource, project: project, user: user, resource_group: resource_group) }
+
+ it 'requests resource for the job in the oldest pipeline' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ expect(build_2.reload).to be_waiting_for_resource
+ expect(build_2.resource).to be_nil
+ end
+ end
+
+ context 'when build is not `waiting_for_resource` state' do
+ let!(:build) { create(:ci_build, :created, project: project, user: user, resource_group: resource_group) }
+
+ it 'attempts to request a resource' do
+ expect_next_found_instance_of(Ci::Build) do |job|
+ expect(job).to receive(:enqueue_waiting_for_resource).and_call_original
+ end
+
+ subject
+ end
+
+ it 'does not change the job status' do
+ subject
+
+ expect(build.reload).to be_created
+ expect(build.resource).to be_nil
+ end
+ end
+ end
+
+ context 'when process mode is newest_first' do
+ let(:resource_group) { create(:ci_resource_group, process_mode: :newest_first, project: project) }
+
+ it 'requests resource' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ end
+
+ context 'when the other job exists in the newer pipeline' do
+ let!(:build_2) { create(:ci_build, :waiting_for_resource, project: project, user: user, resource_group: resource_group) }
+
+ it 'requests resource for the job in the newest pipeline' do
+ subject
+
+ expect(build.reload).to be_waiting_for_resource
+ expect(build.resource).to be_nil
+ expect(build_2.reload).to be_pending
+ expect(build_2.resource).to be_present
+ end
+ end
+
+ context 'when build is not `waiting_for_resource` state' do
+ let!(:build) { create(:ci_build, :created, project: project, user: user, resource_group: resource_group) }
+
+ it 'attempts to request a resource' do
+ expect_next_found_instance_of(Ci::Build) do |job|
+ expect(job).to receive(:enqueue_waiting_for_resource).and_call_original
+ end
+
+ subject
+ end
+
+ it 'does not change the job status' do
+ subject
+
+ expect(build.reload).to be_created
+ expect(build.resource).to be_nil
+ end
+ end
+ end
end
context 'when there are no available resources' do
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index ce2e6ba5e15..15c88c9f657 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe Ci::RetryBuildService do
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
- job_artifacts_api_fuzzing].freeze
+ job_artifacts_api_fuzzing terraform_state_versions].freeze
ignore_accessors =
%i[type lock_version target_url base_tags trace_sections
@@ -88,6 +88,7 @@ RSpec.describe Ci::RetryBuildService do
create(:ci_job_variable, job: build)
create(:ci_build_need, build: build)
+ create(:terraform_state_version, build: build)
end
describe 'clone accessors' do
@@ -276,13 +277,17 @@ RSpec.describe Ci::RetryBuildService do
end
end
- describe '#reprocess' do
+ describe '#clone!' do
let(:new_build) do
travel_to(1.second.from_now) do
- service.reprocess!(build)
+ service.clone!(build)
end
end
+ it 'raises an error when an unexpected class is passed' do
+ expect { service.clone!(create(:ci_build).present) }.to raise_error(TypeError)
+ end
+
context 'when user has ability to execute build' do
before do
stub_not_protect_default_branch
@@ -338,7 +343,7 @@ RSpec.describe Ci::RetryBuildService do
let(:user) { reporter }
it 'raises an error' do
- expect { service.reprocess!(build) }
+ expect { service.clone!(build) }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 3e2e9f07723..12106b70969 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -316,6 +316,26 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
expect(bridge.reload).to be_pending
end
end
+
+ context 'when there are skipped jobs in later stages' do
+ before do
+ create_build('build 1', :success, 0)
+ create_build('test 2', :failed, 1)
+ create_build('report 3', :skipped, 2)
+ create_bridge('deploy 4', :skipped, 2)
+ end
+
+ it 'retries failed jobs and processes skipped jobs' do
+ service.execute(pipeline)
+
+ expect(build('build 1')).to be_success
+ expect(build('test 2')).to be_pending
+ expect(build('report 3')).to be_created
+ expect(build('deploy 4')).to be_created
+
+ expect(pipeline.reload).to be_running
+ end
+ end
end
context 'when user is not allowed to retry pipeline' do
@@ -390,16 +410,25 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
pipeline.reload.statuses
end
+ # The method name can be confusing because this can actually return both Ci::Build and Ci::Bridge
def build(name)
statuses.latest.find_by(name: name)
end
def create_build(name, status, stage_num, **opts)
- create(:ci_build, name: name,
- status: status,
- stage: "stage_#{stage_num}",
- stage_idx: stage_num,
- pipeline: pipeline, **opts) do |build|
+ create_processable(:ci_build, name, status, stage_num, **opts)
+ end
+
+ def create_bridge(name, status, stage_num, **opts)
+ create_processable(:ci_bridge, name, status, stage_num, **opts)
+ end
+
+ def create_processable(type, name, status, stage_num, **opts)
+ create(type, name: name,
+ status: status,
+ stage: "stage_#{stage_num}",
+ stage_idx: stage_num,
+ pipeline: pipeline, **opts) do |_job|
::Ci::ProcessPipelineService.new(pipeline).execute
end
end
diff --git a/spec/services/ci/stuck_builds/drop_service_spec.rb b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
index 8dfd1bc1b3d..aa0526edf57 100644
--- a/spec/services/ci/stuck_builds/drop_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::StuckBuilds::DropService do
+RSpec.describe Ci::StuckBuilds::DropPendingService do
let!(:runner) { create :ci_runner }
let!(:job) { create :ci_build, runner: runner }
let(:created_at) { }
@@ -17,48 +17,6 @@ RSpec.describe Ci::StuckBuilds::DropService do
job.update!(job_attributes)
end
- shared_examples 'job is dropped' do
- it 'changes status' do
- expect(service).to receive(:drop).exactly(3).times.and_call_original
- expect(service).to receive(:drop_stuck).exactly(:once).and_call_original
-
- service.execute
- job.reload
-
- expect(job).to be_failed
- expect(job).to be_stuck_or_timeout_failure
- end
-
- context 'when job have data integrity problem' do
- it "does drop the job and logs the reason" do
- job.update_columns(yaml_variables: '[{"key" => "value"}]')
-
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(anything, a_hash_including(build_id: job.id))
- .once
- .and_call_original
-
- service.execute
- job.reload
-
- expect(job).to be_failed
- expect(job).to be_data_integrity_failure
- end
- end
- end
-
- shared_examples 'job is unchanged' do
- it 'does not change status' do
- expect(service).to receive(:drop).exactly(3).times.and_call_original
- expect(service).to receive(:drop_stuck).exactly(:once).and_call_original
-
- service.execute
- job.reload
-
- expect(job.status).to eq(status)
- end
- end
-
context 'when job is pending' do
let(:status) { 'pending' }
@@ -75,13 +33,13 @@ RSpec.describe Ci::StuckBuilds::DropService do
context 'when created_at is the same as updated_at' do
let(:created_at) { 1.5.days.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is before updated_at' do
let(:created_at) { 3.days.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is outside lookback window' do
@@ -149,13 +107,13 @@ RSpec.describe Ci::StuckBuilds::DropService do
context 'when created_at is the same as updated_at' do
let(:created_at) { 1.5.hours.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is before updated_at' do
let(:created_at) { 3.days.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is outside lookback window' do
@@ -195,7 +153,7 @@ RSpec.describe Ci::StuckBuilds::DropService do
context 'when job was updated_at more than an hour ago' do
let(:updated_at) { 2.hours.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is unchanged'
end
context 'when job was updated in less than 1 hour ago' do
@@ -238,47 +196,6 @@ RSpec.describe Ci::StuckBuilds::DropService do
job.project.update!(pending_delete: true)
end
- it_behaves_like 'job is dropped'
- end
-
- describe 'drop stale scheduled builds' do
- let(:status) { 'scheduled' }
- let(:updated_at) { }
-
- context 'when scheduled at 2 hours ago but it is not executed yet' do
- let!(:job) { create(:ci_build, :scheduled, scheduled_at: 2.hours.ago) }
-
- it 'drops the stale scheduled build' do
- expect(Ci::Build.scheduled.count).to eq(1)
- expect(job).to be_scheduled
-
- service.execute
- job.reload
-
- expect(Ci::Build.scheduled.count).to eq(0)
- expect(job).to be_failed
- expect(job).to be_stale_schedule
- end
- end
-
- context 'when scheduled at 30 minutes ago but it is not executed yet' do
- let!(:job) { create(:ci_build, :scheduled, scheduled_at: 30.minutes.ago) }
-
- it 'does not drop the stale scheduled build yet' do
- expect(Ci::Build.scheduled.count).to eq(1)
- expect(job).to be_scheduled
-
- service.execute
-
- expect(Ci::Build.scheduled.count).to eq(1)
- expect(job).to be_scheduled
- end
- end
-
- context 'when there are no stale scheduled builds' do
- it 'does not drop the stale scheduled build yet' do
- expect { service.execute }.not_to raise_error
- end
- end
+ it_behaves_like 'job is unchanged'
end
end
diff --git a/spec/services/ci/stuck_builds/drop_running_service_spec.rb b/spec/services/ci/stuck_builds/drop_running_service_spec.rb
new file mode 100644
index 00000000000..c1c92c2b8e2
--- /dev/null
+++ b/spec/services/ci/stuck_builds/drop_running_service_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropRunningService do
+ let!(:runner) { create :ci_runner }
+ let!(:job) { create(:ci_build, runner: runner, created_at: created_at, updated_at: updated_at, status: status) }
+
+ subject(:service) { described_class.new }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ shared_examples 'running builds' do
+ context 'when job is running' do
+ let(:status) { 'running' }
+ let(:outdated_time) { described_class::BUILD_RUNNING_OUTDATED_TIMEOUT.ago - 30.minutes }
+ let(:fresh_time) { described_class::BUILD_RUNNING_OUTDATED_TIMEOUT.ago + 30.minutes }
+
+ context 'when job is outdated' do
+ let(:created_at) { outdated_time }
+ let(:updated_at) { outdated_time }
+
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
+ end
+
+ context 'when job is fresh' do
+ let(:created_at) { fresh_time }
+ let(:updated_at) { fresh_time }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when job freshly updated' do
+ let(:created_at) { outdated_time }
+ let(:updated_at) { fresh_time }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+
+ include_examples 'running builds'
+
+ context 'when new query flag is disabled' do
+ before do
+ stub_feature_flags(ci_new_query_for_running_stuck_jobs: false)
+ end
+
+ include_examples 'running builds'
+ end
+
+ %w(success skipped failed canceled scheduled pending).each do |status|
+ context "when job is #{status}" do
+ let(:status) { status }
+ let(:updated_at) { 2.days.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 2.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 3.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
new file mode 100644
index 00000000000..1416fab3d25
--- /dev/null
+++ b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropScheduledService do
+ let_it_be(:runner) { create :ci_runner }
+
+ let!(:job) { create :ci_build, :scheduled, scheduled_at: scheduled_at, runner: runner }
+
+ subject(:service) { described_class.new }
+
+ context 'when job is scheduled' do
+ context 'for more than an hour ago' do
+ let(:scheduled_at) { 2.hours.ago }
+
+ it_behaves_like 'job is dropped with failure reason', 'stale_schedule'
+ end
+
+ context 'for less than 1 hour ago' do
+ let(:scheduled_at) { 30.minutes.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+
+ %w(success skipped failed canceled running pending).each do |status|
+ context "when job is #{status}" do
+ before do
+ job.update!(status: status)
+ end
+
+ context 'and scheduled for more than an hour ago' do
+ let(:scheduled_at) { 2.hours.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'and scheduled for less than 1 hour ago' do
+ let(:scheduled_at) { 30.minutes.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+
+ context 'when there are no stale scheduled builds' do
+ let(:job) { }
+
+ it 'does not drop the stale scheduled build yet' do
+ expect { service.execute }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index 5bb3843da8f..e4dd3d0500f 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -112,6 +112,14 @@ RSpec.describe Ci::UpdateBuildStateService do
.not_to have_received(:increment_trace_operation)
.with(operation: :invalid)
end
+
+ it 'does not increment chunks_invalid_checksum trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+ end
end
context 'when build trace has been migrated' do
@@ -174,6 +182,14 @@ RSpec.describe Ci::UpdateBuildStateService do
.to have_received(:increment_trace_operation)
.with(operation: :invalid)
end
+
+ it 'increments chunks_invalid_checksum trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+ end
end
context 'when trace checksum is valid' do
@@ -191,6 +207,14 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
context 'when using deprecated parameters' do
@@ -208,6 +232,14 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
end
end
@@ -227,6 +259,14 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
end
@@ -243,8 +283,16 @@ RSpec.describe Ci::UpdateBuildStateService do
.with(operation: :invalid)
expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
end
@@ -325,6 +373,10 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :invalid)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
end
context 'when build pending state is outdated' do
diff --git a/spec/services/ci/update_pending_build_service_spec.rb b/spec/services/ci/update_pending_build_service_spec.rb
index d842042de40..d36564938c8 100644
--- a/spec/services/ci/update_pending_build_service_spec.rb
+++ b/spec/services/ci/update_pending_build_service_spec.rb
@@ -3,21 +3,23 @@
require 'spec_helper'
RSpec.describe Ci::UpdatePendingBuildService do
- describe '#execute' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, namespace: group) }
- let_it_be(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
- let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
- let_it_be(:update_params) { { instance_runners_enabled: true } }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be_with_reload(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
+ let_it_be_with_reload(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+ let_it_be(:update_params) { { instance_runners_enabled: true } }
+
+ let(:service) { described_class.new(model, update_params) }
- subject(:service) { described_class.new(model, update_params).execute }
+ describe '#execute' do
+ subject(:update_pending_builds) { service.execute }
context 'validations' do
context 'when model is invalid' do
let(:model) { pending_build_1 }
it 'raises an error' do
- expect { service }.to raise_error(described_class::InvalidModelError)
+ expect { update_pending_builds }.to raise_error(described_class::InvalidModelError)
end
end
@@ -26,7 +28,7 @@ RSpec.describe Ci::UpdatePendingBuildService do
let(:update_params) { { minutes_exceeded: true } }
it 'raises an error' do
- expect { service }.to raise_error(described_class::InvalidParamsError)
+ expect { update_pending_builds }.to raise_error(described_class::InvalidParamsError)
end
end
end
@@ -35,10 +37,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
let(:model) { group }
it 'updates all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_truthy
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
@@ -47,10 +49,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
end
it 'does not update all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
end
end
@@ -59,10 +61,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
let(:model) { project }
it 'updates all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_truthy
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
@@ -71,10 +73,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
end
it 'does not update all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
end
end
diff --git a/spec/services/clusters/agent_tokens/create_service_spec.rb b/spec/services/clusters/agent_tokens/create_service_spec.rb
new file mode 100644
index 00000000000..92629af06c8
--- /dev/null
+++ b/spec/services/clusters/agent_tokens/create_service_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::AgentTokens::CreateService do
+ subject(:service) { described_class.new(container: project, current_user: user, params: params) }
+
+ let_it_be(:user) { create(:user) }
+
+ let(:cluster_agent) { create(:cluster_agent) }
+ let(:project) { cluster_agent.project }
+ let(:params) { { agent_id: cluster_agent.id, description: 'token description', name: 'token name' } }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ it 'does not create a new token due to user permissions' do
+ expect { subject }.not_to change(::Clusters::AgentToken, :count)
+ end
+
+ it 'returns permission errors', :aggregate_failures do
+ expect(subject.status).to eq(:error)
+ expect(subject.message).to eq('User has insufficient permissions to create a token for this project')
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'creates a new token' do
+ expect { subject }.to change { ::Clusters::AgentToken.count }.by(1)
+ end
+
+ it 'returns success status', :aggregate_failures do
+ expect(subject.status).to eq(:success)
+ expect(subject.message).to be_nil
+ end
+
+ it 'returns token information', :aggregate_failures do
+ token = subject.payload[:token]
+
+ expect(subject.payload[:secret]).not_to be_nil
+
+ expect(token.created_by_user).to eq(user)
+ expect(token.description).to eq(params[:description])
+ expect(token.name).to eq(params[:name])
+ end
+
+ context 'when params are invalid' do
+ let(:params) { { agent_id: 'bad_id' } }
+
+ it 'does not create a new token' do
+ expect { subject }.not_to change(::Clusters::AgentToken, :count)
+ end
+
+ it 'returns validation errors', :aggregate_failures do
+ expect(subject.status).to eq(:error)
+ expect(subject.message).to eq(["Agent must exist", "Name can't be blank"])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/agents/create_service_spec.rb b/spec/services/clusters/agents/create_service_spec.rb
new file mode 100644
index 00000000000..2b3bbcae13c
--- /dev/null
+++ b/spec/services/clusters/agents/create_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::CreateService do
+ subject(:service) { described_class.new(project, user) }
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user) }
+
+ describe '#execute' do
+ context 'without user permissions' do
+ it 'returns errors when user does not have permissions' do
+ expect(service.execute(name: 'missing-permissions')).to eq({
+ status: :error,
+ message: 'You have insufficient permissions to create a cluster agent for this project'
+ })
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'creates a new clusters_agent' do
+ expect { service.execute(name: 'with-user') }.to change { ::Clusters::Agent.count }.by(1)
+ end
+
+ it 'returns success status', :aggregate_failures do
+ result = service.execute(name: 'success')
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:message]).to be_nil
+ end
+
+ it 'returns agent values', :aggregate_failures do
+ new_agent = service.execute(name: 'new-agent')[:cluster_agent]
+
+ expect(new_agent.name).to eq('new-agent')
+ expect(new_agent.created_by_user).to eq(user)
+ end
+
+ it 'generates an error message when name is invalid' do
+ expect(service.execute(name: '@bad_agent_name!')).to eq({
+ status: :error,
+ message: ["Name can contain only lowercase letters, digits, and '-', but cannot start or end with '-'"]
+ })
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/agents/delete_service_spec.rb b/spec/services/clusters/agents/delete_service_spec.rb
new file mode 100644
index 00000000000..1d6bc9618dd
--- /dev/null
+++ b/spec/services/clusters/agents/delete_service_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::DeleteService do
+ subject(:service) { described_class.new(container: project, current_user: user) }
+
+ let(:cluster_agent) { create(:cluster_agent) }
+ let(:project) { cluster_agent.project }
+ let(:user) { create(:user) }
+
+ describe '#execute' do
+ context 'without user permissions' do
+ it 'fails to delete when the user has no permissions', :aggregate_failures do
+ response = service.execute(cluster_agent)
+
+ expect(response.status).to eq(:error)
+ expect(response.message).to eq('You have insufficient permissions to delete this cluster agent')
+
+ expect { cluster_agent.reload }.not_to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'deletes a cluster agent', :aggregate_failures do
+ expect { service.execute(cluster_agent) }.to change { ::Clusters::Agent.count }.by(-1)
+ expect { cluster_agent.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/services/concerns/rate_limited_service_spec.rb b/spec/services/concerns/rate_limited_service_spec.rb
new file mode 100644
index 00000000000..f73871b7e44
--- /dev/null
+++ b/spec/services/concerns/rate_limited_service_spec.rb
@@ -0,0 +1,196 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RateLimitedService do
+ let(:key) { :issues_create }
+ let(:scope) { [:project, :current_user] }
+ let(:opts) { { scope: scope, users_allowlist: -> { [User.support_bot.username] } } }
+ let(:rate_limiter_klass) { ::Gitlab::ApplicationRateLimiter }
+ let(:rate_limiter_instance) { rate_limiter_klass.new(key, **opts) }
+
+ describe 'RateLimitedError' do
+ subject { described_class::RateLimitedError.new(key: key, rate_limiter: rate_limiter_instance) }
+
+ describe '#headers' do
+ it 'returns a Hash of HTTP headers' do
+ # TODO: This will be fleshed out in https://gitlab.com/gitlab-org/gitlab/-/issues/342370
+ expected_headers = {}
+
+ expect(subject.headers).to eq(expected_headers)
+ end
+ end
+
+ describe '#log_request' do
+ it 'logs the request' do
+ request = instance_double(Grape::Request)
+ user = instance_double(User)
+
+ expect(rate_limiter_klass).to receive(:log_request).with(request, "#{key}_request_limit".to_sym, user)
+
+ subject.log_request(request, user)
+ end
+ end
+ end
+
+ describe 'RateLimiterScopedAndKeyed' do
+ subject { described_class::RateLimiterScopedAndKeyed.new(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass) }
+
+ describe '#rate_limit!' do
+ let(:project_with_feature_enabled) { create(:project) }
+ let(:project_without_feature_enabled) { create(:project) }
+
+ let(:project) { nil }
+
+ let(:current_user) { create(:user) }
+ let(:service) { instance_double(Issues::CreateService, project: project, current_user: current_user) }
+ let(:evaluated_scope) { [project, current_user] }
+ let(:evaluated_opts) { { scope: evaluated_scope, users_allowlist: %w[support-bot] } }
+ let(:rate_limited_service_issues_create_feature_enabled) { nil }
+
+ before do
+ allow(rate_limiter_klass).to receive(:new).with(key, **evaluated_opts).and_return(rate_limiter_instance)
+ stub_feature_flags(rate_limited_service_issues_create: rate_limited_service_issues_create_feature_enabled)
+ end
+
+ shared_examples 'a service that does not attempt to throttle' do
+ it 'does not attempt to throttle' do
+ expect(rate_limiter_instance).not_to receive(:throttled?)
+
+ expect(subject.rate_limit!(service)).to be_nil
+ end
+ end
+
+ shared_examples 'a service that does attempt to throttle' do
+ before do
+ allow(rate_limiter_instance).to receive(:throttled?).and_return(throttled)
+ end
+
+ context 'when rate limiting is not in effect' do
+ let(:throttled) { false }
+
+ it 'does not raise an exception' do
+ expect(subject.rate_limit!(service)).to be_nil
+ end
+ end
+
+ context 'when rate limiting is in effect' do
+ let(:throttled) { true }
+
+ it 'raises a RateLimitedError exception' do
+ expect { subject.rate_limit!(service) }.to raise_error(described_class::RateLimitedError, 'This endpoint has been requested too many times. Try again later.')
+ end
+ end
+ end
+
+ context 'when :rate_limited_service_issues_create feature is globally disabled' do
+ let(:rate_limited_service_issues_create_feature_enabled) { false }
+
+ it_behaves_like 'a service that does not attempt to throttle'
+ end
+
+ context 'when :rate_limited_service_issues_create feature is globally enabled' do
+ let(:throttled) { nil }
+ let(:rate_limited_service_issues_create_feature_enabled) { true }
+ let(:project) { project_without_feature_enabled }
+
+ it_behaves_like 'a service that does attempt to throttle'
+ end
+
+ context 'when :rate_limited_service_issues_create feature is enabled for project_with_feature_enabled' do
+ let(:throttled) { nil }
+ let(:rate_limited_service_issues_create_feature_enabled) { project_with_feature_enabled }
+
+ context 'for project_without_feature_enabled' do
+ let(:project) { project_without_feature_enabled }
+
+ it_behaves_like 'a service that does not attempt to throttle'
+ end
+
+ context 'for project_with_feature_enabled' do
+ let(:project) { project_with_feature_enabled }
+
+ it_behaves_like 'a service that does attempt to throttle'
+ end
+ end
+ end
+ end
+
+ describe '#execute_without_rate_limiting' do
+ let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) }
+ let(:subject) do
+ local_key = key
+ local_opts = opts
+
+ Class.new do
+ prepend RateLimitedService
+
+ rate_limit key: local_key, opts: local_opts
+
+ def execute(*args, **kwargs)
+ 'main logic here'
+ end
+ end.new
+ end
+
+ before do
+ allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass).and_return(rate_limiter_scoped_and_keyed)
+ end
+
+ context 'bypasses rate limiting' do
+ it 'calls super' do
+ expect(rate_limiter_scoped_and_keyed).not_to receive(:rate_limit!).with(subject)
+
+ expect(subject.execute_without_rate_limiting).to eq('main logic here')
+ end
+ end
+ end
+
+ describe '#execute' do
+ context 'when rate_limit has not been called' do
+ let(:subject) { Class.new { prepend RateLimitedService }.new }
+
+ it 'raises an RateLimitedNotSetupError exception' do
+ expect { subject.execute }.to raise_error(described_class::RateLimitedNotSetupError)
+ end
+ end
+
+ context 'when rate_limit has been called' do
+ let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) }
+ let(:subject) do
+ local_key = key
+ local_opts = opts
+
+ Class.new do
+ prepend RateLimitedService
+
+ rate_limit key: local_key, opts: local_opts
+
+ def execute(*args, **kwargs)
+ 'main logic here'
+ end
+ end.new
+ end
+
+ before do
+ allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass).and_return(rate_limiter_scoped_and_keyed)
+ end
+
+ context 'and applies rate limiting' do
+ it 'raises an RateLimitedService::RateLimitedError exception' do
+ expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_raise(RateLimitedService::RateLimitedError.new(key: key, rate_limiter: rate_limiter_instance))
+
+ expect { subject.execute }.to raise_error(RateLimitedService::RateLimitedError)
+ end
+ end
+
+ context 'but does not apply rate limiting' do
+ it 'calls super' do
+ expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_return(nil)
+
+ expect(subject.execute).to eq('main logic here')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/container_expiration_policies/cleanup_service_spec.rb b/spec/services/container_expiration_policies/cleanup_service_spec.rb
index 5f284b9dd8b..a1f76e5e5dd 100644
--- a/spec/services/container_expiration_policies/cleanup_service_spec.rb
+++ b/spec/services/container_expiration_policies/cleanup_service_spec.rb
@@ -24,8 +24,8 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
it 'completely clean up the repository' do
expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
- expect(cleanup_tags_service).to receive(:execute).with(repository).and_return(status: :success)
+ .to receive(:new).with(repository, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
+ expect(cleanup_tags_service).to receive(:execute).and_return(status: :success)
response = subject
@@ -69,6 +69,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
before_truncate_size: 800,
after_truncate_size: 200,
before_delete_size: 100,
+ cached_tags_count: 0,
deleted_size: 100
}
end
@@ -86,6 +87,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
cleanup_tags_service_before_truncate_size: 800,
cleanup_tags_service_after_truncate_size: 200,
cleanup_tags_service_before_delete_size: 100,
+ cleanup_tags_service_cached_tags_count: 0,
cleanup_tags_service_deleted_size: 100
)
expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
diff --git a/spec/services/customer_relations/contacts/create_service_spec.rb b/spec/services/customer_relations/contacts/create_service_spec.rb
new file mode 100644
index 00000000000..71eb447055e
--- /dev/null
+++ b/spec/services/customer_relations/contacts/create_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::Contacts::CreateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:not_found_or_does_not_belong) { 'The specified organization was not found or does not belong to this group' }
+
+ let(:params) { attributes_for(:contact, group: group) }
+
+ subject(:response) { described_class.new(group: group, current_user: user, params: params).execute }
+
+ context 'when user does not have permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_reporter(user)
+ end
+
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.message).to match_array(['You have insufficient permissions to create a contact for this group'])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_developer(user)
+ end
+
+ it 'creates a contact' do
+ expect(response).to be_success
+ end
+
+ it 'returns an error when the contact is not persisted' do
+ params[:last_name] = nil
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["Last name can't be blank"])
+ end
+
+ it 'returns an error when the organization_id is invalid' do
+ params[:organization_id] = non_existing_record_id
+
+ expect(response).to be_error
+ expect(response.message).to match_array([not_found_or_does_not_belong])
+ end
+
+ it 'returns an error when the organization belongs to a different group' do
+ organization = create(:organization)
+ params[:organization_id] = organization.id
+
+ expect(response).to be_error
+ expect(response.message).to match_array([not_found_or_does_not_belong])
+ end
+ end
+ end
+end
diff --git a/spec/services/customer_relations/contacts/update_service_spec.rb b/spec/services/customer_relations/contacts/update_service_spec.rb
new file mode 100644
index 00000000000..7c5fbabb600
--- /dev/null
+++ b/spec/services/customer_relations/contacts/update_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::Contacts::UpdateService do
+ let_it_be(:user) { create(:user) }
+
+ let(:contact) { create(:contact, first_name: 'Mark', group: group) }
+
+ subject(:update) { described_class.new(group: group, current_user: user, params: params).execute(contact) }
+
+ describe '#execute' do
+ context 'when the user has no permission' do
+ let_it_be(:group) { create(:group) }
+
+ let(:params) { { first_name: 'Gary' } }
+
+ it 'returns an error' do
+ response = update
+
+ expect(response).to be_error
+ expect(response.message).to match_array(['You have insufficient permissions to update a contact for this group'])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_developer(user)
+ end
+
+ context 'when first_name is changed' do
+ let(:params) { { first_name: 'Gary' } }
+
+ it 'updates the contact' do
+ response = update
+
+ expect(response).to be_success
+ expect(response.payload.first_name).to eq('Gary')
+ end
+ end
+
+ context 'when the contact is invalid' do
+ let(:params) { { first_name: nil } }
+
+ it 'returns an error' do
+ response = update
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["First name can't be blank"])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/customer_relations/organizations/create_service_spec.rb b/spec/services/customer_relations/organizations/create_service_spec.rb
index b4764f6b97a..d8985d8d90b 100644
--- a/spec/services/customer_relations/organizations/create_service_spec.rb
+++ b/spec/services/customer_relations/organizations/create_service_spec.rb
@@ -12,22 +12,24 @@ RSpec.describe CustomerRelations::Organizations::CreateService do
subject(:response) { described_class.new(group: group, current_user: user, params: params).execute }
it 'creates an organization' do
- group.add_reporter(user)
+ group.add_developer(user)
expect(response).to be_success
end
it 'returns an error when user does not have permission' do
+ group.add_reporter(user)
+
expect(response).to be_error
- expect(response.message).to eq('You have insufficient permissions to create an organization for this group')
+ expect(response.message).to match_array(['You have insufficient permissions to create an organization for this group'])
end
it 'returns an error when the organization is not persisted' do
- group.add_reporter(user)
+ group.add_developer(user)
params[:name] = nil
expect(response).to be_error
- expect(response.message).to eq(["Name can't be blank"])
+ expect(response.message).to match_array(["Name can't be blank"])
end
end
end
diff --git a/spec/services/customer_relations/organizations/update_service_spec.rb b/spec/services/customer_relations/organizations/update_service_spec.rb
index eb253540863..bc40cb3e8e7 100644
--- a/spec/services/customer_relations/organizations/update_service_spec.rb
+++ b/spec/services/customer_relations/organizations/update_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe CustomerRelations::Organizations::UpdateService do
response = update
expect(response).to be_error
- expect(response.message).to eq('You have insufficient permissions to update an organization for this group')
+ expect(response.message).to eq(['You have insufficient permissions to update an organization for this group'])
end
end
@@ -27,7 +27,7 @@ RSpec.describe CustomerRelations::Organizations::UpdateService do
let_it_be(:group) { create(:group) }
before_all do
- group.add_reporter(user)
+ group.add_developer(user)
end
context 'when name is changed' do
diff --git a/spec/services/dependency_proxy/auth_token_service_spec.rb b/spec/services/dependency_proxy/auth_token_service_spec.rb
index 6214d75dfa0..c686f57c5cb 100644
--- a/spec/services/dependency_proxy/auth_token_service_spec.rb
+++ b/spec/services/dependency_proxy/auth_token_service_spec.rb
@@ -4,47 +4,72 @@ require 'spec_helper'
RSpec.describe DependencyProxy::AuthTokenService do
include DependencyProxyHelpers
- describe '.decoded_token_payload' do
- let_it_be(:user) { create(:user) }
- let_it_be(:token) { build_jwt(user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:deploy_token) { create(:deploy_token) }
- subject { described_class.decoded_token_payload(token.encoded) }
+ describe '.user_or_deploy_token_from_jwt' do
+ subject { described_class.user_or_deploy_token_from_jwt(token.encoded) }
- it 'returns the user' do
- result = subject
+ shared_examples 'handling token errors' do
+ context 'with a decoding error' do
+ before do
+ allow(JWT).to receive(:decode).and_raise(JWT::DecodeError)
+ end
- expect(result['user_id']).to eq(user.id)
- expect(result['deploy_token']).to be_nil
- end
+ it { is_expected.to eq(nil) }
+ end
- context 'with a deploy token' do
- let_it_be(:deploy_token) { create(:deploy_token) }
- let_it_be(:token) { build_jwt(deploy_token) }
+ context 'with an immature signature error' do
+ before do
+ allow(JWT).to receive(:decode).and_raise(JWT::ImmatureSignature)
+ end
- it 'returns the deploy token' do
- result = subject
+ it { is_expected.to eq(nil) }
+ end
- expect(result['deploy_token']).to eq(deploy_token.token)
- expect(result['user_id']).to be_nil
+ context 'with an expired signature error' do
+ it 'returns nil' do
+ travel_to(Time.zone.now + Auth::DependencyProxyAuthenticationService.token_expire_at + 1.minute) do
+ expect(subject).to eq(nil)
+ end
+ end
end
end
- it 'raises an error if the token is expired' do
- travel_to(Time.zone.now + Auth::DependencyProxyAuthenticationService.token_expire_at + 1.minute) do
- expect { subject }.to raise_error(JWT::ExpiredSignature)
+ context 'with a user' do
+ let_it_be(:token) { build_jwt(user) }
+
+ it { is_expected.to eq(user) }
+
+ context 'with an invalid user id' do
+ let_it_be(:token) { build_jwt { |jwt| jwt['user_id'] = 'this_is_not_a_user_id' } }
+
+ it 'raises an not found error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ end
end
+
+ it_behaves_like 'handling token errors'
end
- it 'raises an error if decoding fails' do
- allow(JWT).to receive(:decode).and_raise(JWT::DecodeError)
+ context 'with a deploy token' do
+ let_it_be(:token) { build_jwt(deploy_token) }
+
+ it { is_expected.to eq(deploy_token) }
+
+ context 'with an invalid token' do
+ let_it_be(:token) { build_jwt { |jwt| jwt['deploy_token'] = 'this_is_not_a_token' } }
+
+ it { is_expected.to eq(nil) }
+ end
- expect { subject }.to raise_error(JWT::DecodeError)
+ it_behaves_like 'handling token errors'
end
- it 'raises an error if signature is immature' do
- allow(JWT).to receive(:decode).and_raise(JWT::ImmatureSignature)
+ context 'with an empty token payload' do
+ let_it_be(:token) { build_jwt(nil) }
- expect { subject }.to raise_error(JWT::ImmatureSignature)
+ it { is_expected.to eq(nil) }
end
end
end
diff --git a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
index 3fac749be29..20b0546effa 100644
--- a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe DependencyProxy::FindOrCreateBlobService do
include DependencyProxyHelpers
- let(:blob) { create(:dependency_proxy_blob) }
+ let_it_be_with_reload(:blob) { create(:dependency_proxy_blob) }
+
let(:group) { blob.group }
let(:image) { 'alpine' }
let(:tag) { '3.9' }
@@ -17,11 +18,7 @@ RSpec.describe DependencyProxy::FindOrCreateBlobService do
stub_registry_auth(image, token)
end
- context 'no cache' do
- before do
- stub_blob_download(image, blob_sha)
- end
-
+ shared_examples 'downloads the remote blob' do
it 'downloads blob from remote registry if there is no cached one' do
expect(subject[:status]).to eq(:success)
expect(subject[:blob]).to be_a(DependencyProxy::Blob)
@@ -30,15 +27,34 @@ RSpec.describe DependencyProxy::FindOrCreateBlobService do
end
end
+ context 'no cache' do
+ before do
+ stub_blob_download(image, blob_sha)
+ end
+
+ it_behaves_like 'downloads the remote blob'
+ end
+
context 'cached blob' do
let(:blob_sha) { blob.file_name.sub('.gz', '') }
it 'uses cached blob instead of downloading one' do
+ expect { subject }.to change { blob.reload.updated_at }
+
expect(subject[:status]).to eq(:success)
expect(subject[:blob]).to be_a(DependencyProxy::Blob)
expect(subject[:blob]).to eq(blob)
expect(subject[:from_cache]).to eq true
end
+
+ context 'when the cached blob is expired' do
+ before do
+ blob.update_column(:status, DependencyProxy::Blob.statuses[:expired])
+ stub_blob_download(image, blob_sha)
+ end
+
+ it_behaves_like 'downloads the remote blob'
+ end
end
context 'no such blob exists remotely' do
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
index 5896aa255f0..b3f88f91289 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
@@ -21,19 +21,19 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
describe '#execute' do
subject { described_class.new(group, image, tag, token).execute }
+ shared_examples 'downloading the manifest' do
+ it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
+ expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
+ expect(subject[:manifest]).to be_persisted
+ expect(subject[:from_cache]).to eq false
+ end
+ end
+
context 'when no manifest exists' do
let_it_be(:image) { 'new-image' }
- shared_examples 'downloading the manifest' do
- it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
- expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
- expect(subject[:status]).to eq(:success)
- expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
- expect(subject[:manifest]).to be_persisted
- expect(subject[:from_cache]).to eq false
- end
- end
-
context 'successful head request' do
before do
stub_manifest_head(image, tag, headers: headers)
@@ -60,6 +60,8 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
shared_examples 'using the cached manifest' do
it 'uses cached manifest instead of downloading one', :aggregate_failures do
+ expect { subject }.to change { dependency_proxy_manifest.reload.updated_at }
+
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
@@ -87,6 +89,16 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
end
+ context 'when the cached manifest is expired' do
+ before do
+ dependency_proxy_manifest.update_column(:status, DependencyProxy::Manifest.statuses[:expired])
+ stub_manifest_head(image, tag, headers: headers)
+ stub_manifest_download(image, tag, headers: headers)
+ end
+
+ it_behaves_like 'downloading the manifest'
+ end
+
context 'failed connection' do
before do
expect(DependencyProxy::HeadManifestService).to receive(:new).and_raise(Net::OpenTimeout)
diff --git a/spec/services/dependency_proxy/group_settings/update_service_spec.rb b/spec/services/dependency_proxy/group_settings/update_service_spec.rb
new file mode 100644
index 00000000000..6f8c55daa8d
--- /dev/null
+++ b/spec/services/dependency_proxy/group_settings/update_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::DependencyProxy::GroupSettings::UpdateService do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:group_settings) { create(:dependency_proxy_group_setting, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:params) { { enabled: false } }
+
+ describe '#execute' do
+ subject { described_class.new(container: group, current_user: user, params: params).execute }
+
+ shared_examples 'updating the dependency proxy group settings' do
+ it_behaves_like 'updating the dependency proxy group settings attributes',
+ from: { enabled: true },
+ to: { enabled: false }
+
+ it 'returns a success' do
+ result = subject
+
+ expect(result.payload[:dependency_proxy_setting]).to be_present
+ expect(result).to be_success
+ end
+ end
+
+ shared_examples 'denying access to dependency proxy group settings' do
+ context 'with existing dependency proxy group settings' do
+ it 'returns an error' do
+ result = subject
+
+ expect(result).to have_attributes(
+ message: 'Access Denied',
+ status: :error,
+ http_status: 403
+ )
+ end
+ end
+ end
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'updating the dependency proxy group settings'
+ :developer | 'updating the dependency proxy group settings'
+ :reporter | 'denying access to dependency proxy group settings'
+ :guest | 'denying access to dependency proxy group settings'
+ :anonymous | 'denying access to dependency proxy group settings'
+ end
+
+ with_them do
+ before do
+ stub_config(dependency_proxy: { enabled: true })
+ group.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+end
diff --git a/spec/services/deployments/older_deployments_drop_service_spec.rb b/spec/services/deployments/older_deployments_drop_service_spec.rb
index 6152a95cc3c..e6fd6725d7d 100644
--- a/spec/services/deployments/older_deployments_drop_service_spec.rb
+++ b/spec/services/deployments/older_deployments_drop_service_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Deployments::OlderDeploymentsDropService do
it 'does not drop an older deployment and tracks the exception' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(kind_of(RuntimeError), subject_id: deployment.id, deployment_id: older_deployment.id)
+ .with(kind_of(RuntimeError), subject_id: deployment.id, build_id: older_deployment.deployable_id)
expect { subject }.not_to change { Ci::Build.failed.count }
end
diff --git a/spec/services/deployments/update_service_spec.rb b/spec/services/deployments/update_service_spec.rb
index 16b24d0dee8..d3840189ba4 100644
--- a/spec/services/deployments/update_service_spec.rb
+++ b/spec/services/deployments/update_service_spec.rb
@@ -34,9 +34,11 @@ RSpec.describe Deployments::UpdateService do
expect(deploy).to be_canceled
end
- it 'raises ArgumentError if the status is invalid' do
- expect { described_class.new(deploy, status: 'kittens').execute }
- .to raise_error(ArgumentError)
+ it 'does not change the state if the status is invalid' do
+ expect(described_class.new(deploy, status: 'kittens').execute)
+ .to be_falsy
+
+ expect(deploy).to be_created
end
it 'links merge requests when changing the status to success', :sidekiq_inline do
diff --git a/spec/services/error_tracking/list_issues_service_spec.rb b/spec/services/error_tracking/list_issues_service_spec.rb
index b49095ab8b9..a7bd6c75df5 100644
--- a/spec/services/error_tracking/list_issues_service_spec.rb
+++ b/spec/services/error_tracking/list_issues_service_spec.rb
@@ -5,56 +5,71 @@ require 'spec_helper'
RSpec.describe ErrorTracking::ListIssuesService do
include_context 'sentry error tracking context'
- let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
- let(:list_sentry_issues_args) do
- {
- issue_status: 'unresolved',
- limit: 20,
- search_term: 'something',
- sort: 'last_seen',
- cursor: 'some-cursor'
- }
- end
+ let(:params) { {} }
subject { described_class.new(project, user, params) }
describe '#execute' do
- context 'with authorized user' do
- let(:issues) { [] }
+ context 'Sentry backend' do
+ let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
+
+ let(:list_sentry_issues_args) do
+ {
+ issue_status: 'unresolved',
+ limit: 20,
+ search_term: 'something',
+ sort: 'last_seen',
+ cursor: 'some-cursor'
+ }
+ end
+
+ context 'with authorized user' do
+ let(:issues) { [] }
+
+ described_class::ISSUE_STATUS_VALUES.each do |status|
+ it "returns the issues with #{status} issue_status" do
+ params[:issue_status] = status
+ list_sentry_issues_args[:issue_status] = status
+ expect_list_sentry_issues_with(list_sentry_issues_args)
+
+ expect(result).to eq(status: :success, pagination: {}, issues: issues)
+ end
+ end
- described_class::ISSUE_STATUS_VALUES.each do |status|
- it "returns the issues with #{status} issue_status" do
- params[:issue_status] = status
- list_sentry_issues_args[:issue_status] = status
+ it 'returns the issues with no issue_status' do
expect_list_sentry_issues_with(list_sentry_issues_args)
expect(result).to eq(status: :success, pagination: {}, issues: issues)
end
- end
- it 'returns the issues with no issue_status' do
- expect_list_sentry_issues_with(list_sentry_issues_args)
+ it 'returns bad request for an issue_status not on the whitelist' do
+ params[:issue_status] = 'assigned'
+
+ expect(error_tracking_setting).not_to receive(:list_sentry_issues)
+ expect(result).to eq(message: "Bad Request: Invalid issue_status", status: :error, http_status: :bad_request)
+ end
- expect(result).to eq(status: :success, pagination: {}, issues: issues)
+ include_examples 'error tracking service data not ready', :list_sentry_issues
+ include_examples 'error tracking service sentry error handling', :list_sentry_issues
+ include_examples 'error tracking service http status handling', :list_sentry_issues
end
- it 'returns bad request for an issue_status not on the whitelist' do
- params[:issue_status] = 'assigned'
+ include_examples 'error tracking service unauthorized user'
+ include_examples 'error tracking service disabled'
- expect(error_tracking_setting).not_to receive(:list_sentry_issues)
- expect(result).to eq(message: "Bad Request: Invalid issue_status", status: :error, http_status: :bad_request)
+ def expect_list_sentry_issues_with(list_sentry_issues_args)
+ expect(error_tracking_setting)
+ .to receive(:list_sentry_issues)
+ .with(list_sentry_issues_args)
+ .and_return(issues: [], pagination: {})
end
-
- include_examples 'error tracking service data not ready', :list_sentry_issues
- include_examples 'error tracking service sentry error handling', :list_sentry_issues
- include_examples 'error tracking service http status handling', :list_sentry_issues
end
- include_examples 'error tracking service unauthorized user'
- include_examples 'error tracking service disabled'
+ context 'GitLab backend' do
+ let_it_be(:error1) { create(:error_tracking_error, name: 'foo', project: project) }
+ let_it_be(:error2) { create(:error_tracking_error, name: 'bar', project: project) }
- context 'integrated error tracking' do
- let_it_be(:error) { create(:error_tracking_error, project: project) }
+ let(:params) { { limit: '1' } }
before do
error_tracking_setting.update!(integrated: true)
@@ -63,7 +78,9 @@ RSpec.describe ErrorTracking::ListIssuesService do
it 'returns the error in expected format' do
expect(result[:status]).to eq(:success)
expect(result[:issues].size).to eq(1)
- expect(result[:issues].first.to_json).to eq(error.to_sentry_error.to_json)
+ expect(result[:issues].first.to_json).to eq(error2.to_sentry_error.to_json)
+ expect(result[:pagination][:next][:cursor]).to be_present
+ expect(result[:pagination][:previous]).to be_nil
end
end
end
@@ -76,10 +93,3 @@ RSpec.describe ErrorTracking::ListIssuesService do
end
end
end
-
-def expect_list_sentry_issues_with(list_sentry_issues_args)
- expect(error_tracking_setting)
- .to receive(:list_sentry_issues)
- .with(list_sentry_issues_args)
- .and_return(issues: [], pagination: {})
-end
diff --git a/spec/services/feature_flags/hook_service_spec.rb b/spec/services/feature_flags/hook_service_spec.rb
new file mode 100644
index 00000000000..02cdbbd86ac
--- /dev/null
+++ b/spec/services/feature_flags/hook_service_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::HookService do
+ describe '#execute_hooks' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+ let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let_it_be(:user) { namespace.owner }
+
+ let!(:hook) { create(:project_hook, project: project) }
+ let(:hook_data) { double }
+
+ subject(:service) { described_class.new(feature_flag, user) }
+
+ describe 'HOOK_NAME' do
+ specify { expect(described_class::HOOK_NAME).to eq(:feature_flag_hooks) }
+ end
+
+ before do
+ allow(Gitlab::DataBuilder::FeatureFlag).to receive(:build).with(feature_flag, user).once.and_return(hook_data)
+ end
+
+ it 'calls feature_flag.project.execute_hooks' do
+ expect(feature_flag.project).to receive(:execute_hooks).with(hook_data, described_class::HOOK_NAME)
+
+ service.execute
+ end
+ end
+end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 889b5551746..ee38c0fbb44 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -3,6 +3,19 @@
require 'spec_helper'
RSpec.describe Groups::TransferService do
+ shared_examples 'project namespace path is in sync with project path' do
+ it 'keeps project and project namespace attributes in sync' do
+ projects_with_project_namespace.each do |project|
+ project.reload
+
+ expect(project.full_path).to eq("#{group_full_path}/#{project.path}")
+ expect(project.project_namespace.full_path).to eq(project.full_path)
+ expect(project.project_namespace.parent).to eq(project.namespace)
+ expect(project.project_namespace.visibility_level).to eq(project.visibility_level)
+ end
+ end
+ end
+
let_it_be(:user) { create(:user) }
let_it_be(:new_parent_group) { create(:group, :public) }
@@ -169,6 +182,18 @@ RSpec.describe Groups::TransferService do
expect(project.full_path).to eq("#{group.path}/#{project.path}")
end
end
+
+ context 'when projects have project namespaces' do
+ let_it_be(:project1) { create(:project, :private, namespace: group) }
+ let_it_be(:project_namespace1) { create(:project_namespace, project: project1) }
+ let_it_be(:project2) { create(:project, :private, namespace: group) }
+ let_it_be(:project_namespace2) { create(:project_namespace, project: project2) }
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
+ end
end
end
@@ -222,10 +247,10 @@ RSpec.describe Groups::TransferService do
context 'when the parent group has a project with the same path' do
let_it_be_with_reload(:group) { create(:group, :public, :nested, path: 'foo') }
+ let_it_be(:membership) { create(:group_member, :owner, group: new_parent_group, user: user) }
+ let_it_be(:project) { create(:project, path: 'foo', namespace: new_parent_group) }
before do
- create(:group_member, :owner, group: new_parent_group, user: user)
- create(:project, path: 'foo', namespace: new_parent_group)
group.update_attribute(:path, 'foo')
end
@@ -237,6 +262,19 @@ RSpec.describe Groups::TransferService do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to eq('Transfer failed: Validation failed: Group URL has already been taken')
end
+
+ context 'when projects have project namespaces' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ before do
+ transfer_service.execute(new_parent_group)
+ end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.full_path}" }
+ let(:projects_with_project_namespace) { [project] }
+ end
+ end
end
context 'when the group is allowed to be transferred' do
@@ -324,7 +362,7 @@ RSpec.describe Groups::TransferService do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true) }
it 'calls update service' do
- expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_with_override' }).and_call_original
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE }).and_call_original
transfer_service.execute(new_parent_group)
end
@@ -334,7 +372,7 @@ RSpec.describe Groups::TransferService do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false) }
it 'calls update service' do
- expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_and_unoverridable' }).and_call_original
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE }).and_call_original
transfer_service.execute(new_parent_group)
end
@@ -407,6 +445,8 @@ RSpec.describe Groups::TransferService do
context 'when transferring a group with project descendants' do
let!(:project1) { create(:project, :repository, :private, namespace: group) }
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
+ let!(:project_namespace1) { create(:project_namespace, project: project1) }
+ let!(:project_namespace2) { create(:project_namespace, project: project2) }
before do
TestEnv.clean_test_path
@@ -432,18 +472,30 @@ RSpec.describe Groups::TransferService do
expect(project1.private?).to be_truthy
expect(project2.internal?).to be_truthy
end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
end
context 'when the new parent has a lower visibility than the projects' do
let!(:project1) { create(:project, :repository, :public, namespace: group) }
let!(:project2) { create(:project, :repository, :public, namespace: group) }
- let(:new_parent_group) { create(:group, :private) }
+ let!(:new_parent_group) { create(:group, :private) }
+ let!(:project_namespace1) { create(:project_namespace, project: project1) }
+ let!(:project_namespace2) { create(:project_namespace, project: project2) }
it 'updates projects visibility to match the new parent' do
group.projects.each do |project|
expect(project.private?).to be_truthy
end
end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
end
end
@@ -452,6 +504,8 @@ RSpec.describe Groups::TransferService do
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
let!(:subgroup1) { create(:group, :private, parent: group) }
let!(:subgroup2) { create(:group, :internal, parent: group) }
+ let!(:project_namespace1) { create(:project_namespace, project: project1) }
+ let!(:project_namespace2) { create(:project_namespace, project: project2) }
before do
TestEnv.clean_test_path
@@ -480,6 +534,11 @@ RSpec.describe Groups::TransferService do
expect(project1.redirect_routes.count).to eq(1)
expect(project2.redirect_routes.count).to eq(1)
end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
end
context 'when transferring a group with nested groups and projects' do
@@ -651,6 +710,30 @@ RSpec.describe Groups::TransferService do
expect(project1.public?).to be_truthy
end
end
+
+ context 'when group has pending builds' do
+ let_it_be(:project) { create(:project, :public, namespace: group.reload) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:pending_build) { create(:ci_pending_build, project: project) }
+ let_it_be(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ before do
+ group.add_owner(user)
+ new_parent_group.add_owner(user)
+ end
+
+ it 'updates pending builds for the group', :aggregate_failures do
+ transfer_service.execute(new_parent_group)
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(group.id)
+ expect(pending_build.namespace_traversal_ids).to eq(group.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
end
context 'when transferring a subgroup into root group' do
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index bc7c066fa04..e1bd3732820 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -287,7 +287,7 @@ RSpec.describe Groups::UpdateService do
let(:group) { create(:group) }
let(:project) { create(:project, shared_runners_enabled: true, group: group) }
- subject { described_class.new(group, user, shared_runners_setting: 'disabled_and_unoverridable').execute }
+ subject { described_class.new(group, user, shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE).execute }
before do
group.add_owner(user)
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index fe18277b5cd..53870e810b1 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -85,10 +85,10 @@ RSpec.describe Groups::UpdateSharedRunnersService do
context 'disable shared Runners' do
let_it_be(:group) { create(:group) }
- let(:params) { { shared_runners_setting: 'disabled_and_unoverridable' } }
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE } }
it 'receives correct method and succeeds' do
- expect(group).to receive(:update_shared_runners_setting!).with('disabled_and_unoverridable')
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_AND_UNOVERRIDABLE)
expect(subject[:status]).to eq(:success)
end
@@ -108,13 +108,13 @@ RSpec.describe Groups::UpdateSharedRunnersService do
end
context 'allow descendants to override' do
- let(:params) { { shared_runners_setting: 'disabled_with_override' } }
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE } }
context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
it 'receives correct method and succeeds' do
- expect(group).to receive(:update_shared_runners_setting!).with('disabled_with_override')
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_WITH_OVERRIDE)
expect(subject[:status]).to eq(:success)
end
diff --git a/spec/services/import/validate_remote_git_endpoint_service_spec.rb b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
new file mode 100644
index 00000000000..fbd8a3cb323
--- /dev/null
+++ b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::ValidateRemoteGitEndpointService do
+ include StubRequests
+
+ let_it_be(:base_url) { 'http://demo.host/path' }
+ let_it_be(:endpoint_url) { "#{base_url}/info/refs?service=git-upload-pack" }
+ let_it_be(:error_message) { "#{base_url} is not a valid HTTP Git repository" }
+
+ describe '#execute' do
+ let(:valid_response) do
+ { status: 200,
+ body: '001e# service=git-upload-pack',
+ headers: { 'Content-Type': 'application/x-git-upload-pack-advertisement' } }
+ end
+
+ it 'correctly handles URLs with fragment' do
+ allow(Gitlab::HTTP).to receive(:get)
+
+ described_class.new(url: "#{base_url}#somehash").execute
+
+ expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: nil, stream_body: true, follow_redirects: false)
+ end
+
+ context 'when receiving HTTP response' do
+ subject { described_class.new(url: base_url) }
+
+ it 'returns success when HTTP response is valid and contains correct payload' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response)
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.success?).to be(true)
+ end
+
+ it 'reports error when status code is not 200' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ status: 301 }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+
+ it 'reports error when invalid URL is provided' do
+ result = described_class.new(url: 1).execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('1 is not a valid URL')
+ end
+
+ it 'reports error when required header is missing' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ headers: nil }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+
+ it 'reports error when body is in invalid format' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid content' }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+
+ it 'reports error when exception is raised' do
+ stub_full_request(endpoint_url, method: :get).to_raise(SocketError.new('dummy message'))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+ end
+
+ it 'passes basic auth when credentials are provided' do
+ allow(Gitlab::HTTP).to receive(:get)
+
+ described_class.new(url: "#{base_url}#somehash", user: 'user', password: 'password').execute
+
+ expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: { username: 'user', password: 'password' }, stream_body: true, follow_redirects: false)
+ end
+ end
+end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 14e6b44f7b0..93ef046a632 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -22,6 +22,18 @@ RSpec.describe Issues::CloseService do
describe '#execute' do
let(:service) { described_class.new(project: project, current_user: user) }
+ context 'when skip_authorization is true' do
+ it 'does close the issue even if user is not authorized' do
+ non_authorized_user = create(:user)
+
+ service = described_class.new(project: project, current_user: non_authorized_user)
+
+ expect do
+ service.execute(issue, skip_authorization: true)
+ end.to change { issue.reload.state }.from('opened').to('closed')
+ end
+ end
+
it 'checks if the user is authorized to update the issue' do
expect(service).to receive(:can?).with(user, :update_issue, issue)
.and_call_original
@@ -156,7 +168,7 @@ RSpec.describe Issues::CloseService do
context 'updating `metrics.first_mentioned_in_commit_at`' do
context 'when `metrics.first_mentioned_in_commit_at` is not set' do
it 'uses the first commit authored timestamp' do
- expected = closing_merge_request.commits.first.authored_date
+ expected = closing_merge_request.commits.take(100).last.authored_date
close_issue
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 3988069d83a..1887be4896e 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -10,6 +10,25 @@ RSpec.describe Issues::CreateService do
let(:spam_params) { double }
+ describe '.rate_limiter_scoped_and_keyed' do
+ it 'is set via the rate_limit call' do
+ expect(described_class.rate_limiter_scoped_and_keyed).to be_a(RateLimitedService::RateLimiterScopedAndKeyed)
+
+ expect(described_class.rate_limiter_scoped_and_keyed.key).to eq(:issues_create)
+ expect(described_class.rate_limiter_scoped_and_keyed.opts[:scope]).to eq(%i[project current_user])
+ expect(described_class.rate_limiter_scoped_and_keyed.opts[:users_allowlist].call).to eq(%w[support-bot])
+ expect(described_class.rate_limiter_scoped_and_keyed.rate_limiter_klass).to eq(Gitlab::ApplicationRateLimiter)
+ end
+ end
+
+ describe '#rate_limiter_bypassed' do
+ let(:subject) { described_class.new(project: project, spam_params: {}) }
+
+ it 'is nil by default' do
+ expect(subject.rate_limiter_bypassed).to be_nil
+ end
+ end
+
describe '#execute' do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
diff --git a/spec/services/issues/relative_position_rebalancing_service_spec.rb b/spec/services/issues/relative_position_rebalancing_service_spec.rb
index d5d81770817..20064bd7e4b 100644
--- a/spec/services/issues/relative_position_rebalancing_service_spec.rb
+++ b/spec/services/issues/relative_position_rebalancing_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_shared_state do
- let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project, :repository_disabled, skip_disk_validation: true) }
let_it_be(:user) { project.creator }
let_it_be(:start) { RelativePositioning::START_POSITION }
let_it_be(:max_pos) { RelativePositioning::MAX_POSITION }
@@ -28,12 +28,18 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
end
end
+ let_it_be(:nil_clump, reload: true) do
+ (1..100).to_a.map do |i|
+ create(:issue, project: project, author: user, relative_position: nil)
+ end
+ end
+
before do
stub_feature_flags(issue_rebalancing_with_retry: false)
end
def issues_in_position_order
- project.reload.issues.reorder(relative_position: :asc).to_a
+ project.reload.issues.order_by_relative_position.to_a
end
subject(:service) { described_class.new(Project.id_in(project)) }
@@ -44,16 +50,19 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
expect { service.execute }.not_to change { issues_in_position_order.map(&:id) }
+ caching = service.send(:caching)
all_issues.each(&:reset)
gaps = all_issues.take(all_issues.count - 1).zip(all_issues.drop(1)).map do |a, b|
b.relative_position - a.relative_position
end
+ expect(caching.issue_count).to eq(900)
expect(gaps).to all(be > RelativePositioning::MIN_GAP)
expect(all_issues.first.relative_position).to be > (RelativePositioning::MIN_POSITION * 0.9999)
expect(all_issues.last.relative_position).to be < (RelativePositioning::MAX_POSITION * 0.9999)
expect(project.root_namespace.issue_repositioning_disabled?).to be false
+ expect(project.issues.with_null_relative_position.count).to eq(100)
end
it 'is idempotent' do
@@ -111,7 +120,7 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
allow(caching).to receive(:concurrent_running_rebalances_count).and_return(10)
allow(service).to receive(:caching).and_return(caching)
- expect { service.execute }.not_to raise_error(Issues::RelativePositionRebalancingService::TooManyConcurrentRebalances)
+ expect { service.execute }.not_to raise_error
end
context 're-balancing is retried on statement timeout exceptions' do
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index 86190c4e475..c9469b861ac 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -8,18 +8,26 @@ RSpec.describe Issues::ReopenService do
describe '#execute' do
context 'when user is not authorized to reopen issue' do
- before do
+ it 'does not reopen the issue' do
guest = create(:user)
project.add_guest(guest)
- perform_enqueued_jobs do
- described_class.new(project: project, current_user: guest).execute(issue)
- end
- end
+ described_class.new(project: project, current_user: guest).execute(issue)
- it 'does not reopen the issue' do
expect(issue).to be_closed
end
+
+ context 'when skip_authorization is true' do
+ it 'does close the issue even if user is not authorized' do
+ non_authorized_user = create(:user)
+
+ service = described_class.new(project: project, current_user: non_authorized_user)
+
+ expect do
+ service.execute(issue, skip_authorization: true)
+ end.to change { issue.reload.state }.from('closed').to('opened')
+ end
+ end
end
context 'when user is authorized to reopen issue' do
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 15ed5c5a33f..2e6e6041fc3 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -80,7 +80,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'does not add a member' do
expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to eq('Invite email has already been taken')
+ expect(execute_service[:message]).to eq("The member's email address has already been taken")
expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index dd82facaf14..478733e8aa0 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -150,7 +150,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
expect_to_create_members(count: 1)
expect(result[:status]).to eq(:error)
expect(result[:message][invited_member.invite_email])
- .to eq("Invite email has already been taken")
+ .to eq("The member's email address has already been taken")
expect(project.users).to include project_user
end
end
diff --git a/spec/services/merge_requests/assign_issues_service_spec.rb b/spec/services/merge_requests/assign_issues_service_spec.rb
index b857f26c052..cf405c0102e 100644
--- a/spec/services/merge_requests/assign_issues_service_spec.rb
+++ b/spec/services/merge_requests/assign_issues_service_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe MergeRequests::AssignIssuesService do
expect(service.assignable_issues.map(&:id)).to include(issue.id)
end
- it 'ignores issues the user cannot update assignee on' do
+ it 'ignores issues the user cannot update assignee on', :sidekiq_inline do
project.team.truncate
expect(service.assignable_issues).to be_empty
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 0f282384661..ab3d9880d29 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -440,7 +440,7 @@ RSpec.describe MergeRequests::BuildService do
expect(merge_request.title).to eq('Closes #1234 Second commit')
end
- it 'adds the remaining lines of the first multi-line commit message as the description' do
+ it 'adds the remaining lines of the first multi-line commit message as the description', :sidekiq_inline do
expect(merge_request.description).to eq('Create the app')
end
end
diff --git a/spec/services/merge_requests/mergeability/check_base_service_spec.rb b/spec/services/merge_requests/mergeability/check_base_service_spec.rb
new file mode 100644
index 00000000000..f07522b43cb
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_base_service_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckBaseService do
+ subject(:check_base_service) { described_class.new(merge_request: merge_request, params: params) }
+
+ let(:merge_request) { double }
+ let(:params) { double }
+
+ describe '#merge_request' do
+ it 'returns the merge_request' do
+ expect(check_base_service.merge_request).to eq merge_request
+ end
+ end
+
+ describe '#params' do
+ it 'returns the params' do
+ expect(check_base_service.params).to eq params
+ end
+ end
+
+ describe '#skip?' do
+ it 'raises NotImplementedError' do
+ expect { check_base_service.skip? }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'raises NotImplementedError' do
+ expect { check_base_service.skip? }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#cache_key?' do
+ it 'raises NotImplementedError' do
+ expect { check_base_service.skip? }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
new file mode 100644
index 00000000000..6fbbecd7c0e
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckCiStatusService do
+ subject(:check_ci_status) { described_class.new(merge_request: merge_request, params: params) }
+
+ let(:merge_request) { build(:merge_request) }
+ let(:params) { { skip_ci_check: skip_check } }
+ let(:skip_check) { false }
+
+ describe '#execute' do
+ before do
+ expect(merge_request).to receive(:mergeable_ci_state?).and_return(mergeable)
+ end
+
+ context 'when the merge request is in a mergable state' do
+ let(:mergeable) { true }
+
+ it 'returns a check result with status success' do
+ expect(check_ci_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+
+ context 'when the merge request is not in a mergeable state' do
+ let(:mergeable) { false }
+
+ it 'returns a check result with status failed' do
+ expect(check_ci_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ context 'when skip check is true' do
+ let(:skip_check) { true }
+
+ it 'returns true' do
+ expect(check_ci_status.skip?).to eq true
+ end
+ end
+
+ context 'when skip check is false' do
+ let(:skip_check) { false }
+
+ it 'returns false' do
+ expect(check_ci_status.skip?).to eq false
+ end
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_ci_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
new file mode 100644
index 00000000000..170d99f4642
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::RunChecksService do
+ subject(:run_checks) { described_class.new(merge_request: merge_request, params: {}) }
+
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ describe '#CHECKS' do
+ it 'contains every subclass of the base checks service' do
+ expect(described_class::CHECKS).to contain_exactly(*MergeRequests::Mergeability::CheckBaseService.subclasses)
+ end
+ end
+
+ describe '#execute' do
+ subject(:execute) { run_checks.execute }
+
+ let(:params) { {} }
+ let(:success_result) { Gitlab::MergeRequests::Mergeability::CheckResult.success }
+
+ context 'when every check is skipped' do
+ before do
+ MergeRequests::Mergeability::CheckBaseService.subclasses.each do |subclass|
+ expect_next_instance_of(subclass) do |service|
+ expect(service).to receive(:skip?).and_return(true)
+ end
+ end
+ end
+
+ it 'is still a success' do
+ expect(execute.all?(&:success?)).to eq(true)
+ end
+ end
+
+ context 'when a check is skipped' do
+ it 'does not execute the check' do
+ expect_next_instance_of(MergeRequests::Mergeability::CheckCiStatusService) do |service|
+ expect(service).to receive(:skip?).and_return(true)
+ expect(service).not_to receive(:execute)
+ end
+
+ expect(execute).to match_array([])
+ end
+ end
+
+ context 'when a check is not skipped' do
+ let(:cacheable) { true }
+ let(:merge_check) { instance_double(MergeRequests::Mergeability::CheckCiStatusService) }
+
+ before do
+ expect(MergeRequests::Mergeability::CheckCiStatusService).to receive(:new).and_return(merge_check)
+ expect(merge_check).to receive(:skip?).and_return(false)
+ allow(merge_check).to receive(:cacheable?).and_return(cacheable)
+ allow(merge_check).to receive(:execute).and_return(success_result)
+ end
+
+ context 'when the check is cacheable' do
+ context 'when the check is cached' do
+ it 'returns the cached result' do
+ expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
+ expect(service).to receive(:read).with(merge_check: merge_check).and_return(success_result)
+ end
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+
+ context 'when the check is not cached' do
+ it 'writes and returns the result' do
+ expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
+ expect(service).to receive(:read).with(merge_check: merge_check).and_return(nil)
+ expect(service).to receive(:write).with(merge_check: merge_check, result_hash: success_result.to_hash).and_return(true)
+ end
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+ end
+
+ context 'when check is not cacheable' do
+ let(:cacheable) { false }
+
+ it 'does not call the results store' do
+ expect(Gitlab::MergeRequests::Mergeability::ResultsStore).not_to receive(:new)
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+
+ context 'when mergeability_caching is turned off' do
+ before do
+ stub_feature_flags(mergeability_caching: false)
+ end
+
+ it 'does not call the results store' do
+ expect(Gitlab::MergeRequests::Mergeability::ResultsStore).not_to receive(:new)
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index f00a8928109..348ea9ad7d4 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -701,7 +701,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }
- it 'records an error' do
+ it 'records an error', :sidekiq_inline do
Members::DestroyService.new(user1).execute(ProjectMember.find_by!(user_id: user1.id))
service.execute
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 0a56f01ebba..bca954c3959 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Notes::QuickActionsService do
let(:note_text) { "/relate #{other_issue.to_reference}" }
let(:note) { create(:note_on_issue, noteable: issue, project: project, note: note_text) }
- context 'user cannot relate issues' do
+ context 'user cannot relate issues', :sidekiq_inline do
before do
project.team.find_member(maintainer.id).destroy!
project.update!(visibility: Gitlab::VisibilityLevel::PUBLIC)
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index a03f1f17b39..48718cbc24a 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -3155,7 +3155,7 @@ RSpec.describe NotificationService, :mailer do
notification.pipeline_finished(pipeline)
end
- it 'does not send emails' do
+ it 'does not send emails', :sidekiq_inline do
should_not_email_anyone
end
end
diff --git a/spec/services/packages/composer/create_package_service_spec.rb b/spec/services/packages/composer/create_package_service_spec.rb
index 2ffd0a269f2..593777faa55 100644
--- a/spec/services/packages/composer/create_package_service_spec.rb
+++ b/spec/services/packages/composer/create_package_service_spec.rb
@@ -24,25 +24,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
let(:created_package) { Packages::Package.composer.last }
- shared_examples 'using the cache update worker' do
- context 'with remove_composer_v1_cache_code enabled' do
- it 'does not enqueue a cache update job' do
- expect(::Packages::Composer::CacheUpdateWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
- context 'with remove_composer_v1_cache_code disabled' do
- it 'enqueues a cache update job' do
- stub_feature_flags(remove_composer_v1_cache_code: true)
- expect(::Packages::Composer::CacheUpdateWorker).not_to receive(:perform_async)
-
- subject
- end
- end
- end
-
context 'without an existing package' do
context 'with a branch' do
let(:branch) { project.repository.find_branch('master') }
@@ -64,7 +45,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
it_behaves_like 'assigns build to package'
it_behaves_like 'assigns status to package'
- it_behaves_like 'using the cache update worker'
end
context 'with a tag' do
@@ -89,7 +69,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
it_behaves_like 'assigns build to package'
it_behaves_like 'assigns status to package'
- it_behaves_like 'using the cache update worker'
end
end
@@ -106,8 +85,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
.to change { Packages::Package.composer.count }.by(0)
.and change { Packages::Composer::Metadatum.count }.by(0)
end
-
- it_behaves_like 'using the cache update worker'
end
context 'belonging to another project' do
@@ -129,8 +106,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
end
-
- it_behaves_like 'using the cache update worker'
end
end
end
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
index 3069a2806b2..8e5e36cdbcb 100644
--- a/spec/services/packages/debian/process_changes_service_spec.rb
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -36,7 +36,6 @@ RSpec.describe Packages::Debian::ProcessChangesService do
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and not_change { incoming.package_files.count }
- .and not_change { distribution.reload.needs_update? }
.and raise_error(Packages::Debian::ExtractChangesMetadataService::ExtractionError, 'is not a changes file')
end
end
@@ -54,7 +53,6 @@ RSpec.describe Packages::Debian::ProcessChangesService do
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and not_change { incoming.package_files.count }
- .and not_change { distribution.reload.needs_update? }
.and raise_error(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
end
end
diff --git a/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb b/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
new file mode 100644
index 00000000000..dfe2ff9e57c
--- /dev/null
+++ b/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Projects::ContainerRepository::CacheTagsCreatedAtService, :clean_gitlab_redis_cache do
+ let_it_be(:dummy_tag_class) { Struct.new(:name, :created_at) }
+ let_it_be(:repository) { create(:container_repository) }
+
+ let(:tags) { create_tags(5) }
+ let(:service) { described_class.new(repository) }
+
+ shared_examples 'not interacting with redis' do
+ it 'does not interact with redis' do
+ expect(::Gitlab::Redis::Cache).not_to receive(:with)
+
+ subject
+ end
+ end
+
+ describe '#populate' do
+ subject { service.populate(tags) }
+
+ context 'with tags' do
+ it 'gets values from redis' do
+ expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
+
+ expect(subject).to eq(0)
+
+ tags.each { |t| expect(t.created_at).to eq(nil) }
+ end
+
+ context 'with cached values' do
+ let(:cached_tags) { tags.first(2) }
+
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ cached_tags.each do |tag|
+ redis.set(cache_key(tag), rfc3339(10.days.ago))
+ end
+ end
+ end
+
+ it 'gets values from redis' do
+ expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
+
+ expect(subject).to eq(2)
+
+ cached_tags.each { |t| expect(t.created_at).not_to eq(nil) }
+ (tags - cached_tags).each { |t| expect(t.created_at).to eq(nil) }
+ end
+ end
+ end
+
+ context 'with no tags' do
+ let(:tags) { [] }
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ describe '#insert' do
+ let(:max_ttl) { 90.days }
+
+ subject { service.insert(tags, max_ttl) }
+
+ context 'with tags' do
+ let(:tag) { tags.first }
+ let(:ttl) { 90.days - 3.days }
+
+ before do
+ travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
+
+ tag.created_at = DateTime.rfc3339(3.days.ago.rfc3339)
+ end
+
+ after do
+ travel_back
+ end
+
+ it 'inserts values in redis' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis)
+ .to receive(:set)
+ .with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i)
+ .and_call_original
+ end
+
+ subject
+ end
+
+ context 'with some of them already cached' do
+ let(:tag) { tags.first }
+
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key(tag), rfc3339(10.days.ago))
+ end
+ service.populate(tags)
+ end
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ context 'with no tags' do
+ let(:tags) { [] }
+
+ it_behaves_like 'not interacting with redis'
+ end
+
+ context 'with no expires_in' do
+ let(:max_ttl) { nil }
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ def create_tags(size)
+ Array.new(size) do |i|
+ dummy_tag_class.new("Tag #{i}", nil)
+ end
+ end
+
+ def cache_key(tag)
+ "container_repository:{#{repository.id}}:tag:#{tag.name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index eed22416868..289bbf4540e 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe Projects::ContainerRepository::CleanupTagsService do
+RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_redis_cache do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :private) }
- let_it_be(:repository) { create(:container_repository, :root, project: project) }
- let(:service) { described_class.new(project, user, params) }
+ let(:repository) { create(:container_repository, :root, project: project) }
+ let(:service) { described_class.new(repository, user, params) }
let(:tags) { %w[latest A Ba Bb C D E] }
before do
@@ -39,291 +39,442 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
end
describe '#execute' do
- subject { service.execute(repository) }
+ subject { service.execute }
- context 'when no params are specified' do
- let(:params) { {} }
+ shared_examples 'reading and removing tags' do |caching_enabled: true|
+ context 'when no params are specified' do
+ let(:params) { {} }
- it 'does not remove anything' do
- expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:execute)
+ it 'does not remove anything' do
+ expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:execute)
+ expect_no_caching
- is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
- end
- end
-
- context 'when regex matching everything is specified' do
- shared_examples 'removes all matches' do
- it 'does remove all tags except latest' do
- expect_delete(%w(A Ba Bb C D E))
-
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
+ is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
end
end
- let(:params) do
- { 'name_regex_delete' => '.*' }
- end
+ context 'when regex matching everything is specified' do
+ shared_examples 'removes all matches' do
+ it 'does remove all tags except latest' do
+ expect_no_caching
- it_behaves_like 'removes all matches'
+ expect_delete(%w(A Ba Bb C D E))
+
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
+ end
+ end
- context 'with deprecated name_regex param' do
let(:params) do
- { 'name_regex' => '.*' }
+ { 'name_regex_delete' => '.*' }
end
it_behaves_like 'removes all matches'
+
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { 'name_regex' => '.*' }
+ end
+
+ it_behaves_like 'removes all matches'
+ end
end
- end
- context 'with invalid regular expressions' do
- RSpec.shared_examples 'handling an invalid regex' do
- it 'keeps all tags' do
- expect(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:new)
- subject
+ context 'with invalid regular expressions' do
+ shared_examples 'handling an invalid regex' do
+ it 'keeps all tags' do
+ expect_no_caching
+
+ expect(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:new)
+
+ subject
+ end
+
+ it { is_expected.to eq(status: :error, message: 'invalid regex') }
+
+ it 'calls error tracking service' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+
+ subject
+ end
end
- it { is_expected.to eq(status: :error, message: 'invalid regex') }
+ context 'when name_regex_delete is invalid' do
+ let(:params) { { 'name_regex_delete' => '*test*' } }
+
+ it_behaves_like 'handling an invalid regex'
+ end
- it 'calls error tracking service' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+ context 'when name_regex is invalid' do
+ let(:params) { { 'name_regex' => '*test*' } }
- subject
+ it_behaves_like 'handling an invalid regex'
end
- end
- context 'when name_regex_delete is invalid' do
- let(:params) { { 'name_regex_delete' => '*test*' } }
+ context 'when name_regex_keep is invalid' do
+ let(:params) { { 'name_regex_keep' => '*test*' } }
- it_behaves_like 'handling an invalid regex'
+ it_behaves_like 'handling an invalid regex'
+ end
end
- context 'when name_regex is invalid' do
- let(:params) { { 'name_regex' => '*test*' } }
+ context 'when delete regex matching specific tags is used' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D' }
+ end
- it_behaves_like 'handling an invalid regex'
- end
+ it 'does remove C and D' do
+ expect_delete(%w(C D))
- context 'when name_regex_keep is invalid' do
- let(:params) { { 'name_regex_keep' => '*test*' } }
+ expect_no_caching
- it_behaves_like 'handling an invalid regex'
- end
- end
+ is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
+ end
- context 'when delete regex matching specific tags is used' do
- let(:params) do
- { 'name_regex_delete' => 'C|D' }
- end
+ context 'with overriding allow regex' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D',
+ 'name_regex_keep' => 'C' }
+ end
- it 'does remove C and D' do
- expect_delete(%w(C D))
+ it 'does not remove C' do
+ expect_delete(%w(D))
- is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
- end
+ expect_no_caching
- context 'with overriding allow regex' do
- let(:params) do
- { 'name_regex_delete' => 'C|D',
- 'name_regex_keep' => 'C' }
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ end
end
- it 'does not remove C' do
- expect_delete(%w(D))
+ context 'with name_regex_delete overriding deprecated name_regex' do
+ let(:params) do
+ { 'name_regex' => 'C|D',
+ 'name_regex_delete' => 'D' }
+ end
+
+ it 'does not remove C' do
+ expect_delete(%w(D))
+
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ end
end
end
- context 'with name_regex_delete overriding deprecated name_regex' do
+ context 'with allow regex value' do
let(:params) do
- { 'name_regex' => 'C|D',
- 'name_regex_delete' => 'D' }
+ { 'name_regex_delete' => '.*',
+ 'name_regex_keep' => 'B.*' }
end
- it 'does not remove C' do
- expect_delete(%w(D))
+ it 'does not remove B*' do
+ expect_delete(%w(A C D E))
+
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
end
end
- end
- context 'with allow regex value' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'name_regex_keep' => 'B.*' }
- end
+ context 'when keeping only N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C',
+ 'keep_n' => 1 }
+ end
- it 'does not remove B*' do
- expect_delete(%w(A C D E))
+ it 'sorts tags by date' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
+ expect_no_caching
- context 'when keeping only N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C',
- 'keep_n' => 1 }
+ expect(service).to receive(:order_by_date).and_call_original
+
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
+ end
end
- it 'sorts tags by date' do
- expect_delete(%w(Bb Ba C))
+ context 'when not keeping N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C' }
+ end
+
+ it 'does not sort tags by date' do
+ expect_delete(%w(A Ba Bb C))
- expect(service).to receive(:order_by_date).and_call_original
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
- end
- end
+ expect(service).not_to receive(:order_by_date)
- context 'when not keeping N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C' }
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
+ end
end
- it 'does not sort tags by date' do
- expect_delete(%w(A Ba Bb C))
+ context 'when removing keeping only 3' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 3 }
+ end
- expect(service).not_to receive(:order_by_date)
+ it 'does remove B* and C as they are the oldest' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
+ expect_no_caching
- context 'when removing keeping only 3' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 3 }
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C as they are the oldest' do
- expect_delete(%w(Bb Ba C))
+ context 'when removing older than 1 day' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'older_than' => '1 day' }
+ end
+
+ it 'does remove B* and C as they are older than 1 day' do
+ expect_delete(%w(Ba Bb C))
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
- end
+ expect_no_caching
- context 'when removing older than 1 day' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'older_than' => '1 day' }
+ is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C as they are older than 1 day' do
- expect_delete(%w(Ba Bb C))
+ context 'when combining all parameters' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
+
+ it 'does remove B* and C' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
- end
- end
+ expect_no_caching
- context 'when combining all parameters' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C' do
- expect_delete(%w(Bb Ba C))
+ context 'when running a container_expiration_policy' do
+ let(:user) { nil }
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
- end
+ context 'with valid container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true }
+ end
- context 'when running a container_expiration_policy' do
- let(:user) { nil }
+ it 'succeeds without a user' do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- context 'with valid container_expiration_policy param' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true }
+ caching_enabled ? expect_caching : expect_no_caching
+
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'succeeds without a user' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ context 'without container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ it 'fails' do
+ is_expected.to eq(status: :error, message: 'access denied')
+ end
end
end
- context 'without container_expiration_policy param' do
+ context 'truncating the tags list' do
let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1
+ }
+ end
+
+ shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
+ it 'returns the response' do
+ expect_no_caching
+
+ result = subject
+
+ service_response = expected_service_response(
+ status: status,
+ original_size: original_size,
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size,
+ deleted: nil
+ )
+
+ expect(result).to eq(service_response)
+ end
+ end
+
+ where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
+ false | 10 | :success | :success | false
+ false | 10 | :error | :error | false
+ false | 3 | :success | :success | false
+ false | 3 | :error | :error | false
+ false | 0 | :success | :success | false
+ false | 0 | :error | :error | false
+ true | 10 | :success | :success | false
+ true | 10 | :error | :error | false
+ true | 3 | :success | :error | true
+ true | 3 | :error | :error | true
+ true | 0 | :success | :success | false
+ true | 0 | :error | :error | false
end
- it 'fails' do
- is_expected.to eq(status: :error, message: 'access denied')
+ with_them do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
+ stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
+ allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
+ expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ end
+ end
+
+ original_size = 7
+ keep_n = 1
+
+ it_behaves_like(
+ 'returning the response',
+ status: params[:expected_status],
+ original_size: original_size,
+ before_truncate_size: original_size - keep_n,
+ after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
+ before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
+ )
end
end
end
- context 'truncating the tags list' do
+ context 'caching' do
let(:params) do
{
'name_regex_delete' => '.*',
- 'keep_n' => 1
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
+ end
+
+ let(:tags_and_created_ats) do
+ {
+ 'A' => 1.hour.ago,
+ 'Ba' => 5.days.ago,
+ 'Bb' => 5.days.ago,
+ 'C' => 1.month.ago,
+ 'D' => nil,
+ 'E' => nil
}
end
- shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
- it 'returns the response' do
- result = subject
+ let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
- service_response = expected_service_response(
- status: status,
- original_size: original_size,
- before_truncate_size: before_truncate_size,
- after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size,
- deleted: nil
- )
+ before do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
+ # We froze time so we need to set the created_at stubs again
+ stub_digest_config('sha256:configA', 1.hour.ago)
+ stub_digest_config('sha256:configB', 5.days.ago)
+ stub_digest_config('sha256:configC', 1.month.ago)
+ end
- expect(result).to eq(service_response)
- end
+ after do
+ travel_back
end
- where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
- false | 10 | :success | :success | false
- false | 10 | :error | :error | false
- false | 3 | :success | :success | false
- false | 3 | :error | :error | false
- false | 0 | :success | :success | false
- false | 0 | :error | :error | false
- true | 10 | :success | :success | false
- true | 10 | :error | :error | false
- true | 3 | :success | :error | true
- true | 3 | :error | :error | true
- true | 0 | :success | :success | false
- true | 0 | :error | :error | false
+ it 'caches the created_at values' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect_mget(redis, tags_and_created_ats.keys)
+
+ expect_set(redis, cacheable_tags)
+ end
+
+ expect(subject).to include(cached_tags_count: 0)
end
- with_them do
+ context 'with cached values' do
before do
- stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
- stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
- allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
- expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key('C'), rfc3339(1.month.ago))
+ end
+ end
+
+ it 'uses them' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect_mget(redis, tags_and_created_ats.keys)
+
+ # because C is already in cache, it should not be cached again
+ expect_set(redis, cacheable_tags.except('C'))
+ end
+
+ # We will ping the container registry for all tags *except* for C because it's cached
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configA").and_call_original
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configB").twice.and_call_original
+ expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, "digest" => "sha256:configC")
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configD").and_call_original
+
+ expect(subject).to include(cached_tags_count: 1)
+ end
+ end
+
+ def expect_mget(redis, keys)
+ expect(redis).to receive(:mget).with(keys.map(&method(:cache_key))).and_call_original
+ end
+
+ def expect_set(redis, tags)
+ tags.each do |tag_name, created_at|
+ ex = 1.day.seconds - (Time.zone.now - created_at).seconds
+ if ex > 0
+ expect(redis).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex.to_i)
end
end
+ end
+
+ def cache_key(tag_name)
+ "container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+ end
+
+ context 'with container_registry_expiration_policies_caching enabled for the project' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: project)
+ end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: true
+ end
- original_size = 7
- keep_n = 1
+ context 'with container_registry_expiration_policies_caching disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: false)
+ end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: false
+ end
- it_behaves_like(
- 'returning the response',
- status: params[:expected_status],
- original_size: original_size,
- before_truncate_size: original_size - keep_n,
- after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
- before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
- )
+ context 'with container_registry_expiration_policies_caching not enabled for the project' do
+ let_it_be(:another_project) { create(:project) }
+
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: another_project)
end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: false
end
end
@@ -368,7 +519,19 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
original_size: original_size,
before_truncate_size: before_truncate_size,
after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size
+ before_delete_size: before_delete_size,
+ cached_tags_count: 0
}.compact.merge(deleted_size: deleted&.size)
end
+
+ def expect_no_caching
+ expect(::Gitlab::Redis::Cache).not_to receive(:with)
+ end
+
+ def expect_caching
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:mget).and_call_original
+ expect(redis).to receive(:set).and_call_original
+ end
+ end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index e15d9341fd1..d7c43ac676e 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -622,6 +622,22 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
+ context 'when SAST initialization is requested' do
+ let(:project) { create_project(user, opts) }
+
+ before do
+ opts[:initialize_with_sast] = '1'
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
+ it 'creates a commit for SAST', :aggregate_failures do
+ expect(project.repository.commit_count).to be(1)
+ expect(project.repository.commit.message).to eq(
+ 'Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist'
+ )
+ end
+ end
+
describe 'create integration for the project' do
subject(:project) { create_project(user, opts) }
@@ -823,25 +839,23 @@ RSpec.describe Projects::CreateService, '#execute' do
let_it_be(:user) { create :user }
context 'when parent group is present' do
- let_it_be(:group) do
+ let_it_be(:group, reload: true) do
create(:group) do |group|
group.add_owner(user)
end
end
before do
- allow_next_found_instance_of(Group) do |group|
- allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- end
+ group.update_shared_runners_setting!(shared_runners_setting)
user.refresh_authorized_projects # Ensure cache is warm
end
context 'default value based on parent group setting' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
- 'enabled' | nil | true
- 'disabled_with_override' | nil | false
- 'disabled_and_unoverridable' | nil | false
+ Namespace::SR_ENABLED | nil | true
+ Namespace::SR_DISABLED_WITH_OVERRIDE | nil | false
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | nil | false
end
with_them do
@@ -858,11 +872,11 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'parent group is present and allows desired config' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
- 'enabled' | true | true
- 'enabled' | false | false
- 'disabled_with_override' | false | false
- 'disabled_with_override' | true | true
- 'disabled_and_unoverridable' | false | false
+ Namespace::SR_ENABLED | true | true
+ Namespace::SR_ENABLED | false | false
+ Namespace::SR_DISABLED_WITH_OVERRIDE | false | false
+ Namespace::SR_DISABLED_WITH_OVERRIDE | true | true
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | false | false
end
with_them do
@@ -878,7 +892,7 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'parent group is present and disallows desired config' do
where(:shared_runners_setting, :desired_config_for_new_project) do
- 'disabled_and_unoverridable' | true
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | true
end
with_them do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 27688d8c966..9bdd9800fcc 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -39,26 +39,64 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
let!(:job_variables) { create(:ci_job_variable, job: build) }
let!(:report_result) { create(:ci_build_report_result, build: build) }
let!(:pending_state) { create(:ci_build_pending_state, build: build) }
+ let!(:pipeline_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline) }
- it 'deletes build related records' do
- expect { destroy_project(project, user, {}) }.to change { Ci::Build.count }.by(-1)
+ it 'deletes build and pipeline related records' do
+ expect { destroy_project(project, user, {}) }
+ .to change { Ci::Build.count }.by(-1)
.and change { Ci::BuildTraceChunk.count }.by(-1)
.and change { Ci::JobArtifact.count }.by(-2)
+ .and change { Ci::DeletedObject.count }.by(2)
+ .and change { Ci::PipelineArtifact.count }.by(-1)
.and change { Ci::JobVariable.count }.by(-1)
.and change { Ci::BuildPendingState.count }.by(-1)
.and change { Ci::BuildReportResult.count }.by(-1)
.and change { Ci::BuildRunnerSession.count }.by(-1)
+ .and change { Ci::Pipeline.count }.by(-1)
end
- it 'avoids N+1 queries', skip: 'skipped until fixed in https://gitlab.com/gitlab-org/gitlab/-/issues/24644' do
- recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+ context 'with abort_deleted_project_pipelines disabled' do
+ stub_feature_flags(abort_deleted_project_pipelines: false)
- project = create(:project, :repository, namespace: user.namespace)
- pipeline = create(:ci_pipeline, project: project)
- builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
- create_list(:ci_build_trace_chunk, 3, build: builds[0])
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
- expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create(:ci_pipeline_artifact, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction disabled' do
+ stub_feature_flags(ci_optimize_project_records_destruction: false)
+
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction and abort_deleted_project_pipelines enabled' do
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
end
it_behaves_like 'deleting the project'
@@ -95,24 +133,63 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
context 'with abort_deleted_project_pipelines feature disabled' do
- it 'does not cancel project ci pipelines' do
+ before do
stub_feature_flags(abort_deleted_project_pipelines: false)
+ end
+ it 'does not bulk-fail project ci pipelines' do
expect(::Ci::AbortPipelinesService).not_to receive(:new)
destroy_project(project, user, {})
end
+
+ it 'does not destroy CI records via DestroyPipelineService' do
+ expect(::Ci::DestroyPipelineService).not_to receive(:new)
+
+ destroy_project(project, user, {})
+ end
end
context 'with abort_deleted_project_pipelines feature enabled' do
- it 'performs cancel for project ci pipelines' do
- stub_feature_flags(abort_deleted_project_pipelines: true)
- pipelines = build_list(:ci_pipeline, 3, :running)
- allow(project).to receive(:all_pipelines).and_return(pipelines)
+ let!(:pipelines) { create_list(:ci_pipeline, 3, :running, project: project) }
+ let(:destroy_pipeline_service) { double('DestroyPipelineService', execute: nil) }
- expect(::Ci::AbortPipelinesService).to receive_message_chain(:new, :execute).with(pipelines, :project_deleted)
+ context 'with ci_optimize_project_records_destruction disabled' do
+ before do
+ stub_feature_flags(ci_optimize_project_records_destruction: false)
+ end
- destroy_project(project, user, {})
+ it 'bulk-fails project ci pipelines' do
+ expect(::Ci::AbortPipelinesService)
+ .to receive_message_chain(:new, :execute)
+ .with(project.all_pipelines, :project_deleted)
+
+ destroy_project(project, user, {})
+ end
+
+ it 'does not destroy CI records via DestroyPipelineService' do
+ expect(::Ci::DestroyPipelineService).not_to receive(:new)
+
+ destroy_project(project, user, {})
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction enabled' do
+ it 'executes DestroyPipelineService for project ci pipelines' do
+ allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service)
+
+ expect(::Ci::AbortPipelinesService)
+ .to receive_message_chain(:new, :execute)
+ .with(project.all_pipelines, :project_deleted)
+
+ pipelines.each do |pipeline|
+ expect(destroy_pipeline_service)
+ .to receive(:execute)
+ .with(pipeline)
+ end
+
+ destroy_project(project, user, {})
+ end
end
end
diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb
index 4a38fb0c7d9..ff1618c3bbe 100644
--- a/spec/services/projects/group_links/update_service_spec.rb
+++ b/spec/services/projects/group_links/update_service_spec.rb
@@ -34,86 +34,40 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute' do
end
context 'project authorizations update' do
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
- end
-
- it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
- .to receive(:perform_async).with(link.project.id)
-
- subject
- end
-
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- [[user.id]],
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- subject
- end
-
- it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
- group.add_maintainer(user)
-
- expect { subject }.to(
- change { Ability.allowed?(user, :create_release, project) }
- .from(true).to(false))
- end
- end
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(link.project.id)
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
- end
+ subject
+ end
- it 'calls UserProjectAccessChangedService to update project authorizations' do
- expect_next_instance_of(UserProjectAccessChangedService, [user.id]) do |service|
- expect(service).to receive(:execute)
- end
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- subject
- end
+ subject
+ end
- it 'updates project authorizations of users who had access to the project via the group share' do
- group.add_maintainer(user)
+ it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
+ group.add_maintainer(user)
- expect { subject }.to(
- change { Ability.allowed?(user, :create_release, project) }
- .from(true).to(false))
- end
+ expect { subject }.to(
+ change { Ability.allowed?(user, :create_release, project) }
+ .from(true).to(false))
end
end
context 'with only param not requiring authorization refresh' do
let(:group_link_params) { { expires_at: Date.tomorrow } }
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
- end
-
- it 'does not perform any project authorizations update using `AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
- end
-
- it 'does not perform any project authorizations update using `UserProjectAccessChangedService`' do
- expect(UserProjectAccessChangedService).not_to receive(:new)
+ it 'does not perform any project authorizations update using `AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
- subject
- end
+ subject
end
end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index 92e18b6cb46..1d63f72ec38 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -86,6 +86,12 @@ RSpec.describe Projects::ImportService do
end
context 'with a Github repository' do
+ it 'tracks the start of import' do
+ expect(Gitlab::GithubImport::ParallelImporter).to receive(:track_start_import)
+
+ subject.execute
+ end
+
it 'succeeds if repository import was scheduled' do
expect_any_instance_of(Gitlab::GithubImport::ParallelImporter)
.to receive(:execute)
diff --git a/spec/services/projects/move_access_service_spec.rb b/spec/services/projects/move_access_service_spec.rb
index 90167ffebed..45e10c3ca84 100644
--- a/spec/services/projects/move_access_service_spec.rb
+++ b/spec/services/projects/move_access_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Projects::MoveAccessService do
describe '#execute' do
shared_examples 'move the accesses' do
- it do
+ it 'moves the accesses', :sidekiq_inline do
expect(project_with_access.project_members.count).to eq 4
expect(project_with_access.project_group_links.count).to eq 3
expect(project_with_access.authorized_users.count).to eq 4
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index a71fafb2121..b64f2d1e7d6 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -294,10 +294,10 @@ RSpec.describe Projects::Operations::UpdateService do
end
context 'without setting' do
- it 'does not create a setting' do
- expect(result[:status]).to eq(:error)
-
- expect(project.reload.error_tracking_setting).to be_nil
+ it 'creates setting with default values' do
+ expect(result[:status]).to eq(:success)
+ expect(project.error_tracking_setting.enabled).to be_truthy
+ expect(project.error_tracking_setting.integrated).to be_truthy
end
end
end
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index b84e28314f2..eab7228307a 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -104,104 +104,116 @@ RSpec.describe Projects::ParticipantsService do
describe '#project_members' do
subject(:usernames) { service.project_members.map { |member| member[:username] } }
- context 'when there is a project in group namespace' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
+ shared_examples 'return project members' do
+ context 'when there is a project in group namespace' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
- let(:service) { described_class.new(public_project, create(:user)) }
+ let(:service) { described_class.new(public_project, create(:user)) }
- before do
- public_group.add_owner(public_group_owner)
- end
+ before do
+ public_group.add_owner(public_group_owner)
+ end
- it 'returns members of a group' do
- expect(usernames).to include(public_group_owner.username)
+ it 'returns members of a group' do
+ expect(usernames).to include(public_group_owner.username)
+ end
end
- end
-
- context 'when there is a private group and a public project' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:private_group) { create(:group, :private, :nested) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:project_issue) { create(:issue, project: public_project)}
+ context 'when there is a private group and a public project' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:private_group) { create(:group, :private, :nested) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:public_group_owner) { create(:user) }
- let_it_be(:private_group_member) { create(:user) }
- let_it_be(:public_project_maintainer) { create(:user) }
- let_it_be(:private_group_owner) { create(:user) }
+ let_it_be(:project_issue) { create(:issue, project: public_project)}
- let_it_be(:group_ancestor_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:private_group_member) { create(:user) }
+ let_it_be(:public_project_maintainer) { create(:user) }
+ let_it_be(:private_group_owner) { create(:user) }
- before_all do
- public_group.add_owner public_group_owner
- private_group.add_developer private_group_member
- public_project.add_maintainer public_project_maintainer
+ let_it_be(:group_ancestor_owner) { create(:user) }
- private_group.add_owner private_group_owner
- private_group.parent.add_owner group_ancestor_owner
- end
-
- context 'when the private group is invited to the public project' do
before_all do
- create(:project_group_link, group: private_group, project: public_project)
- end
+ public_group.add_owner public_group_owner
+ private_group.add_developer private_group_member
+ public_project.add_maintainer public_project_maintainer
- context 'when a user who is outside the public project and the private group is signed in' do
- let(:service) { described_class.new(public_project, create(:user)) }
+ private_group.add_owner private_group_owner
+ private_group.parent.add_owner group_ancestor_owner
+ end
- it 'does not return the private group' do
- expect(usernames).not_to include(private_group.name)
+ context 'when the private group is invited to the public project' do
+ before_all do
+ create(:project_group_link, group: private_group, project: public_project)
end
- it 'does not return private group members' do
- expect(usernames).not_to include(private_group_member.username)
- end
+ context 'when a user who is outside the public project and the private group is signed in' do
+ let(:service) { described_class.new(public_project, create(:user)) }
- it 'returns the project maintainer' do
- expect(usernames).to include(public_project_maintainer.username)
- end
+ it 'does not return the private group' do
+ expect(usernames).not_to include(private_group.name)
+ end
- it 'returns project members from an invited public group' do
- invited_public_group = create(:group, :public)
- invited_public_group.add_owner create(:user)
+ it 'does not return private group members' do
+ expect(usernames).not_to include(private_group_member.username)
+ end
- create(:project_group_link, group: invited_public_group, project: public_project)
+ it 'returns the project maintainer' do
+ expect(usernames).to include(public_project_maintainer.username)
+ end
- expect(usernames).to include(invited_public_group.users.first.username)
- end
+ it 'returns project members from an invited public group' do
+ invited_public_group = create(:group, :public)
+ invited_public_group.add_owner create(:user)
- it 'does not return ancestors of the private group' do
- expect(usernames).not_to include(group_ancestor_owner.username)
- end
- end
+ create(:project_group_link, group: invited_public_group, project: public_project)
- context 'when private group owner is signed in' do
- let(:service) { described_class.new(public_project, private_group_owner) }
+ expect(usernames).to include(invited_public_group.users.first.username)
+ end
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
+ it 'does not return ancestors of the private group' do
+ expect(usernames).not_to include(group_ancestor_owner.username)
+ end
end
- it 'returns ancestors of the the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
- end
- end
+ context 'when private group owner is signed in' do
+ let(:service) { described_class.new(public_project, private_group_owner) }
- context 'when the namespace owner of the public project is signed in' do
- let(:service) { described_class.new(public_project, public_group_owner) }
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
+ it 'returns ancestors of the the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
end
- it 'does not return members of the ancestral groups of the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
+ context 'when the namespace owner of the public project is signed in' do
+ let(:service) { described_class.new(public_project, public_group_owner) }
+
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
+
+ it 'does not return members of the ancestral groups of the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
end
end
end
end
+
+ it_behaves_like 'return project members'
+
+ context 'when feature flag :linear_participants_service_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_participants_service_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'return project members'
+ end
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index d96573e26af..b539b01066e 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -64,6 +64,33 @@ RSpec.describe Projects::TransferService do
expect(transfer_result).to be_truthy
expect(project.namespace).to eq(group)
end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be_truthy
+
+ project_namespace_in_sync(group)
+ end
+
+ context 'when project is transferred to a deeper nested group' do
+ let(:parent_group) { create(:group) }
+ let(:sub_group) { create(:group, parent: parent_group) }
+ let(:sub_sub_group) { create(:group, parent: sub_group) }
+ let(:group) { sub_sub_group }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be_truthy
+
+ project_namespace_in_sync(sub_sub_group)
+ end
+ end
+ end
end
context 'when transfer succeeds' do
@@ -143,6 +170,28 @@ RSpec.describe Projects::TransferService do
end
end
end
+
+ context 'when project has pending builds' do
+ let!(:other_project) { create(:project) }
+ let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
+ let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ before do
+ group.reload
+ end
+
+ it 'updates pending builds for the project', :aggregate_failures do
+ execute_transfer
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(group.id)
+ expect(pending_build.namespace_traversal_ids).to eq(group.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
end
context 'when transfer fails' do
@@ -203,6 +252,34 @@ RSpec.describe Projects::TransferService do
shard_name: project.repository_storage
)
end
+
+ context 'when project has pending builds' do
+ let!(:other_project) { create(:project) }
+ let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
+ let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ it 'does not update pending builds for the project', :aggregate_failures do
+ attempt_project_transfer
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(project.namespace_id)
+ expect(pending_build.namespace_traversal_ids).to eq(project.namespace.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ attempt_project_transfer
+
+ project_namespace_in_sync(user.namespace)
+ end
+ end
end
context 'namespace -> no namespace' do
@@ -215,6 +292,18 @@ RSpec.describe Projects::TransferService do
expect(project.namespace).to eq(user.namespace)
expect(project.errors.messages[:new_namespace].first).to eq 'Please select a new namespace for your project.'
end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be false
+
+ project_namespace_in_sync(user.namespace)
+ end
+ end
end
context 'disallow transferring of project with tags' do
@@ -369,28 +458,23 @@ RSpec.describe Projects::TransferService do
using RSpec::Parameterized::TableSyntax
where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
- true | 'disabled_and_unoverridable' | false
- false | 'disabled_and_unoverridable' | false
- true | 'disabled_with_override' | true
- false | 'disabled_with_override' | false
- true | 'enabled' | true
- false | 'enabled' | false
+ true | :disabled_and_unoverridable | false
+ false | :disabled_and_unoverridable | false
+ true | :disabled_with_override | true
+ false | :disabled_with_override | false
+ true | :shared_runners_enabled | true
+ false | :shared_runners_enabled | false
end
with_them do
let(:project) { create(:project, :public, :repository, namespace: user.namespace, shared_runners_enabled: project_shared_runners_enabled) }
- let(:group) { create(:group) }
+ let(:group) { create(:group, shared_runners_setting) }
- before do
+ it 'updates shared runners based on the parent group' do
group.add_owner(user)
- expect_next_found_instance_of(Group) do |group|
- expect(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- end
- execute_transfer
- end
+ expect(execute_transfer).to eq(true)
- it 'updates shared runners based on the parent group' do
expect(project.shared_runners_enabled).to eq(expected_shared_runners_enabled)
end
end
@@ -478,58 +562,30 @@ RSpec.describe Projects::TransferService do
group.add_owner(user)
end
- context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: true)
- end
-
- it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
- .to receive(:perform_async).with(project.id)
-
- execute_transfer
- end
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(project.id)
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
- user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
-
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- user_ids,
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- subject
- end
-
- it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do
- expect { execute_transfer }
- .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
- .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
- end
+ execute_transfer
end
- context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: false)
- end
-
- it 'calls UserProjectAccessChangedService to update project authorizations' do
- user_ids = [user.id, member_of_old_group.id, member_of_new_group.id]
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
- expect_next_instance_of(UserProjectAccessChangedService, user_ids) do |service|
- expect(service).to receive(:execute)
- end
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ user_ids,
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- execute_transfer
- end
+ subject
+ end
- it 'refreshes the permissions of the members of the old and new namespace' do
- expect { execute_transfer }
- .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
- .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
- end
+ it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do
+ expect { execute_transfer }
+ .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
+ .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
end
end
@@ -643,4 +699,13 @@ RSpec.describe Projects::TransferService do
def rugged_config
rugged_repo(project.repository).config
end
+
+ def project_namespace_in_sync(group)
+ project.reload
+ expect(project.namespace).to eq(group)
+ expect(project.project_namespace.visibility_level).to eq(project.visibility_level)
+ expect(project.project_namespace.path).to eq(project.path)
+ expect(project.project_namespace.parent).to eq(project.namespace)
+ expect(project.project_namespace.traversal_ids).to eq([*project.namespace.traversal_ids, project.project_namespace.id])
+ end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 6d0b75e0c95..5810024a1ef 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -173,14 +173,6 @@ RSpec.describe Projects::UpdatePagesService do
include_examples 'successfully deploys'
- context 'when pages_smart_check_outdated_sha feature flag is disabled' do
- before do
- stub_feature_flags(pages_smart_check_outdated_sha: false)
- end
-
- include_examples 'fails with outdated reference message'
- end
-
context 'when old deployment present' do
before do
old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
@@ -189,14 +181,6 @@ RSpec.describe Projects::UpdatePagesService do
end
include_examples 'successfully deploys'
-
- context 'when pages_smart_check_outdated_sha feature flag is disabled' do
- before do
- stub_feature_flags(pages_smart_check_outdated_sha: false)
- end
-
- include_examples 'fails with outdated reference message'
- end
end
context 'when newer deployment present' do
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 115f3098185..4923ef169e8 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -374,7 +374,7 @@ RSpec.describe Projects::UpdateService do
expect(result).to eq({
status: :error,
- message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'."
+ message: "Name can contain only letters, digits, emojis, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'."
})
end
end
@@ -441,26 +441,62 @@ RSpec.describe Projects::UpdateService do
end
end
- context 'when updating #shared_runners', :https_pages_enabled do
- let!(:pending_build) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+ context 'when updating runners settings' do
+ let(:settings) do
+ { instance_runners_enabled: true, namespace_traversal_ids: [123] }
+ end
- subject(:call_service) do
- update_project(project, admin, shared_runners_enabled: shared_runners_enabled)
+ let!(:pending_build) do
+ create(:ci_pending_build, project: project, **settings)
+ end
+
+ context 'when project has shared runners enabled' do
+ let(:project) { create(:project, shared_runners_enabled: true) }
+
+ it 'updates builds queue when shared runners get disabled' do
+ expect { update_project(project, admin, shared_runners_enabled: false) }
+ .to change { pending_build.reload.instance_runners_enabled }.to(false)
+
+ expect(pending_build.reload.instance_runners_enabled).to be false
+ end
+ end
+
+ context 'when project has shared runners disabled' do
+ let(:project) { create(:project, shared_runners_enabled: false) }
+
+ it 'updates builds queue when shared runners get enabled' do
+ expect { update_project(project, admin, shared_runners_enabled: true) }
+ .to not_change { pending_build.reload.instance_runners_enabled }
+
+ expect(pending_build.reload.instance_runners_enabled).to be true
+ end
end
- context 'when shared runners is toggled' do
- let(:shared_runners_enabled) { false }
+ context 'when project has group runners enabled' do
+ let(:project) { create(:project, group_runners_enabled: true) }
+
+ before do
+ project.ci_cd_settings.update!(group_runners_enabled: true)
+ end
+
+ it 'updates builds queue when group runners get disabled' do
+ update_project(project, admin, group_runners_enabled: false)
- it 'updates ci pending builds' do
- expect { call_service }.to change { pending_build.reload.instance_runners_enabled }.to(false)
+ expect(pending_build.reload.namespace_traversal_ids).to be_empty
end
end
- context 'when shared runners is not toggled' do
- let(:shared_runners_enabled) { true }
+ context 'when project has group runners disabled' do
+ let(:project) { create(:project, :in_subgroup, group_runners_enabled: false) }
+
+ before do
+ project.reload.ci_cd_settings.update!(group_runners_enabled: false)
+ end
+
+ it 'updates builds queue when group runners get enabled' do
+ update_project(project, admin, group_runners_enabled: true)
- it 'updates ci pending builds' do
- expect { call_service }.to not_change { pending_build.reload.instance_runners_enabled }
+ expect(pending_build.reload.namespace_traversal_ids).to include(project.namespace.id)
end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 02997096021..d67b189f90e 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1935,6 +1935,21 @@ RSpec.describe QuickActions::InterpretService do
it_behaves_like 'relate command'
end
+ context 'when quick action target is unpersisted' do
+ let(:issue) { build(:issue, project: project) }
+ let(:other_issue) { create(:issue, project: project) }
+ let(:issues_related) { [other_issue] }
+ let(:content) { "/relate #{other_issue.to_reference}" }
+
+ it 'relates the issues after the issue is persisted' do
+ service.execute(content, issue)
+
+ issue.save!
+
+ expect(IssueLink.where(source: issue).map(&:target)).to match_array(issues_related)
+ end
+ end
+
context 'empty relate command' do
let(:issues_related) { [] }
let(:content) { '/relate' }
diff --git a/spec/services/security/ci_configuration/sast_create_service_spec.rb b/spec/services/security/ci_configuration/sast_create_service_spec.rb
index 44f8f07a5be..c7e732dc79a 100644
--- a/spec/services/security/ci_configuration/sast_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_create_service_spec.rb
@@ -23,4 +23,27 @@ RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow do
end
include_examples 'services security ci configuration create service'
+
+ context "when committing to the default branch", :aggregate_failures do
+ subject(:result) { described_class.new(project, user, params, commit_on_default: true).execute }
+
+ let(:params) { {} }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it "doesn't try to remove that branch on raised exceptions" do
+ expect(Files::MultiService).to receive(:new).and_raise(StandardError, '_exception_')
+ expect(project.repository).not_to receive(:rm_branch)
+
+ expect { result }.to raise_error(StandardError, '_exception_')
+ end
+
+ it "commits directly to the default branch" do
+ expect(result.status).to eq(:success)
+ expect(result.payload[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
+ expect(result.payload[:branch]).to eq('master')
+ end
+ end
end
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index c2fe565938a..d8672eec682 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -50,6 +50,7 @@ RSpec.describe ServicePing::SubmitService do
let(:with_dev_ops_score_params) { { dev_ops_score: score_params[:score] } }
let(:with_conv_index_params) { { conv_index: score_params[:score] } }
+ let(:with_usage_data_id_params) { { conv_index: { usage_data_id: usage_data_id } } }
shared_examples 'does not run' do
it do
@@ -173,6 +174,29 @@ RSpec.describe ServicePing::SubmitService do
end
end
+ context 'when only usage_data_id is passed in response' do
+ before do
+ stub_response(body: with_usage_data_id_params)
+ end
+
+ it 'does not save DevOps report data' do
+ expect { subject.execute }.not_to change { DevOpsReport::Metric.count }
+ end
+
+ it 'saves usage_data_id to version_usage_data_id_value' do
+ recorded_at = Time.current
+ usage_data = { uuid: 'uuid', recorded_at: recorded_at }
+
+ expect(Gitlab::UsageData).to receive(:data).with(force_refresh: true).and_return(usage_data)
+
+ subject.execute
+
+ raw_usage_data = RawUsageData.find_by(recorded_at: recorded_at)
+
+ expect(raw_usage_data.version_usage_data_id_value).to eq(31643)
+ end
+ end
+
context 'when version app usage_data_id is invalid' do
let(:usage_data_id) { -1000 }
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index f8835fefc84..438db6b987b 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -47,15 +47,13 @@ RSpec.describe UserProjectAccessChangedService do
let(:service) { UserProjectAccessChangedService.new([1, 2]) }
before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
-
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_and_wait)
.with([[1], [2]])
.and_return(10)
end
it 'sticks all the updated users and returns the original result', :aggregate_failures do
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:bulk_stick).with(:user, [1, 2])
+ expect(ApplicationRecord.sticking).to receive(:bulk_stick).with(:user, [1, 2])
expect(service.execute).to eq(10)
end
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index 6c1df5c745f..092c5cd3e5e 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -91,9 +91,9 @@ RSpec.describe Users::ActivityService do
context 'when last activity is in the past' do
let(:user) { create(:user, last_activity_on: Date.today - 1.week) }
- context 'database load balancing is configured', :db_load_balancing do
+ context 'database load balancing is configured' do
before do
- allow(ActiveRecord::Base).to receive(:connection).and_return(::Gitlab::Database::LoadBalancing.proxy)
+ ::Gitlab::Database::LoadBalancing::Session.clear_session
end
let(:service) do
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index b30b7e6eb56..3244db4c1fb 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Users::UpdateService do
- let(:user) { create(:user) }
+ let(:password) { 'longsecret987!' }
+ let(:user) { create(:user, password: password, password_confirmation: password) }
describe '#execute' do
it 'updates time preferences' do
@@ -18,7 +19,7 @@ RSpec.describe Users::UpdateService do
it 'returns an error result when record cannot be updated' do
result = {}
expect do
- result = update_user(user, { email: 'invalid' })
+ result = update_user(user, { email: 'invalid', validation_password: password })
end.not_to change { user.reload.email }
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Email is invalid')
@@ -65,7 +66,7 @@ RSpec.describe Users::UpdateService do
context 'updating canonical email' do
context 'if email was changed' do
subject do
- update_user(user, email: 'user+extrastuff@example.com')
+ update_user(user, email: 'user+extrastuff@example.com', validation_password: password)
end
it 'calls canonicalize_email' do
@@ -75,15 +76,68 @@ RSpec.describe Users::UpdateService do
subject
end
+
+ context 'when check_password is true' do
+ def update_user(user, opts)
+ described_class.new(user, opts.merge(user: user)).execute(check_password: true)
+ end
+
+ it 'returns error if no password confirmation was passed', :aggregate_failures do
+ result = {}
+
+ expect do
+ result = update_user(user, { email: 'example@example.com' })
+ end.not_to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid password')
+ end
+
+ it 'returns error if wrong password confirmation was passed', :aggregate_failures do
+ result = {}
+
+ expect do
+ result = update_user(user, { email: 'example@example.com', validation_password: 'wrongpassword' })
+ end.not_to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid password')
+ end
+
+ it 'does not require password if it was automatically set', :aggregate_failures do
+ user.update!(password_automatically_set: true)
+ result = {}
+
+ expect do
+ result = update_user(user, { email: 'example@example.com' })
+ end.to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:success)
+ end
+
+ it 'does not require a password if the attribute changed does not require it' do
+ result = {}
+
+ expect do
+ result = update_user(user, { job_title: 'supreme leader of the universe' })
+ end.to change { user.reload.job_title }
+ expect(result[:status]).to eq(:success)
+ end
+ end
end
- context 'if email was NOT changed' do
- subject do
- update_user(user, job_title: 'supreme leader of the universe')
+ context 'when check_password is left to false' do
+ it 'does not require a password check', :aggregate_failures do
+ result = {}
+ expect do
+ result = update_user(user, { email: 'example@example.com' })
+ end.to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:success)
end
+ end
+ context 'if email was NOT changed' do
it 'skips update canonicalize email service call' do
- expect { subject }.not_to change { user.user_canonical_email }
+ expect do
+ update_user(user, job_title: 'supreme leader of the universe')
+ end.not_to change { user.user_canonical_email }
end
end
end
@@ -106,7 +160,7 @@ RSpec.describe Users::UpdateService do
it 'raises an error when record cannot be updated' do
expect do
- update_user(user, email: 'invalid')
+ update_user(user, email: 'invalid', validation_password: password)
end.to raise_error(ActiveRecord::RecordInvalid)
end
diff --git a/spec/services/users/upsert_credit_card_validation_service_spec.rb b/spec/services/users/upsert_credit_card_validation_service_spec.rb
index 148638fe5e7..bede30e1898 100644
--- a/spec/services/users/upsert_credit_card_validation_service_spec.rb
+++ b/spec/services/users/upsert_credit_card_validation_service_spec.rb
@@ -7,7 +7,17 @@ RSpec.describe Users::UpsertCreditCardValidationService do
let(:user_id) { user.id }
let(:credit_card_validated_time) { Time.utc(2020, 1, 1) }
- let(:params) { { user_id: user_id, credit_card_validated_at: credit_card_validated_time } }
+ let(:expiration_year) { Date.today.year + 10 }
+ let(:params) do
+ {
+ user_id: user_id,
+ credit_card_validated_at: credit_card_validated_time,
+ credit_card_expiration_year: expiration_year,
+ credit_card_expiration_month: 1,
+ credit_card_holder_name: 'John Smith',
+ credit_card_mask_number: '1111'
+ }
+ end
describe '#execute' do
subject(:service) { described_class.new(params) }
@@ -52,6 +62,16 @@ RSpec.describe Users::UpsertCreditCardValidationService do
end
end
+ shared_examples 'returns an error, tracking the exception' do
+ it do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ result = service.execute
+
+ expect(result.status).to eq(:error)
+ end
+ end
+
context 'when user id does not exist' do
let(:user_id) { non_existing_record_id }
@@ -61,19 +81,27 @@ RSpec.describe Users::UpsertCreditCardValidationService do
context 'when missing credit_card_validated_at' do
let(:params) { { user_id: user_id } }
- it_behaves_like 'returns an error without tracking the exception'
+ it_behaves_like 'returns an error, tracking the exception'
end
context 'when missing user id' do
let(:params) { { credit_card_validated_at: credit_card_validated_time } }
- it_behaves_like 'returns an error without tracking the exception'
+ it_behaves_like 'returns an error, tracking the exception'
end
context 'when unexpected exception happen' do
it 'tracks the exception and returns an error' do
+ logged_params = {
+ credit_card_validated_at: credit_card_validated_time,
+ expiration_date: Date.new(expiration_year, 1, 31),
+ holder_name: "John Smith",
+ last_digits: 1111,
+ user_id: user_id
+ }
+
expect(::Users::CreditCardValidation).to receive(:upsert).and_raise(e = StandardError.new('My exception!'))
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, class: described_class.to_s, params: params)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, class: described_class.to_s, params: logged_params)
result = service.execute
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index f9fa46a4fc8..2aebd2adab9 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -392,7 +392,7 @@ RSpec.describe WebHookService do
end
end
- context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_cache do
+ context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_rate_limiting do
before do
# Set a high interval to avoid intermittent failures in CI
allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits).and_return(
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index aa791d1d2e7..c8664598691 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -315,6 +315,10 @@ RSpec.configure do |config|
# For more information check https://gitlab.com/gitlab-org/gitlab/-/issues/339348
stub_feature_flags(new_header_search: false)
+ # Disable the override flag in order to enable the feature by default.
+ # See https://docs.gitlab.com/ee/development/feature_flags/#selectively-disable-by-actor
+ stub_feature_flags(surface_environment_creation_failure_override: false)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
diff --git a/spec/support/before_all_adapter.rb b/spec/support/before_all_adapter.rb
index f48e0f46e80..890bdd6a2c4 100644
--- a/spec/support/before_all_adapter.rb
+++ b/spec/support/before_all_adapter.rb
@@ -1,25 +1,25 @@
# frozen_string_literal: true
class BeforeAllAdapter # rubocop:disable Gitlab/NamespacedClass
- def self.all_connection_pools
- ::ActiveRecord::Base.connection_handler.all_connection_pools
+ def self.all_connection_classes
+ @all_connection_classes ||= [ActiveRecord::Base] + ActiveRecord::Base.descendants.select(&:connection_class?) # rubocop: disable Database/MultipleDatabases
end
def self.begin_transaction
- self.all_connection_pools.each do |connection_pool|
- connection_pool.connection.begin_transaction(joinable: false)
+ self.all_connection_classes.each do |connection_class|
+ connection_class.connection.begin_transaction(joinable: false)
end
end
def self.rollback_transaction
- self.all_connection_pools.each do |connection_pool|
- if connection_pool.connection.open_transactions.zero?
+ self.all_connection_classes.each do |connection_class|
+ if connection_class.connection.open_transactions.zero?
warn "!!! before_all transaction has been already rollbacked and " \
"could work incorrectly"
next
end
- connection_pool.connection.rollback_transaction
+ connection_class.connection.rollback_transaction
end
end
end
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 6f96d552da6..ac35662ec93 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -22,7 +22,8 @@ JS_CONSOLE_FILTER = Regexp.union([
'"[WDS] Live Reloading enabled."',
'Download the Vue Devtools extension',
'Download the Apollo DevTools',
- "Unrecognized feature: 'interest-cohort'"
+ "Unrecognized feature: 'interest-cohort'",
+ 'Does this page need fixes or improvements?'
])
CAPYBARA_WINDOW_SIZE = [1366, 768].freeze
diff --git a/spec/support/database/cross-database-modification-allowlist.yml b/spec/support/database/cross-database-modification-allowlist.yml
new file mode 100644
index 00000000000..627967f65f3
--- /dev/null
+++ b/spec/support/database/cross-database-modification-allowlist.yml
@@ -0,0 +1,1343 @@
+- "./ee/spec/controllers/admin/geo/nodes_controller_spec.rb"
+- "./ee/spec/controllers/admin/geo/projects_controller_spec.rb"
+- "./ee/spec/controllers/admin/projects_controller_spec.rb"
+- "./ee/spec/controllers/concerns/internal_redirect_spec.rb"
+- "./ee/spec/controllers/ee/projects/jobs_controller_spec.rb"
+- "./ee/spec/controllers/oauth/geo_auth_controller_spec.rb"
+- "./ee/spec/controllers/projects/approver_groups_controller_spec.rb"
+- "./ee/spec/controllers/projects/approvers_controller_spec.rb"
+- "./ee/spec/controllers/projects/merge_requests_controller_spec.rb"
+- "./ee/spec/controllers/projects/merge_requests/creations_controller_spec.rb"
+- "./ee/spec/controllers/projects/settings/access_tokens_controller_spec.rb"
+- "./ee/spec/controllers/projects/subscriptions_controller_spec.rb"
+- "./ee/spec/features/account_recovery_regular_check_spec.rb"
+- "./ee/spec/features/admin/admin_audit_logs_spec.rb"
+- "./ee/spec/features/admin/admin_credentials_inventory_spec.rb"
+- "./ee/spec/features/admin/admin_dashboard_spec.rb"
+- "./ee/spec/features/admin/admin_dev_ops_report_spec.rb"
+- "./ee/spec/features/admin/admin_merge_requests_approvals_spec.rb"
+- "./ee/spec/features/admin/admin_reset_pipeline_minutes_spec.rb"
+- "./ee/spec/features/admin/admin_sends_notification_spec.rb"
+- "./ee/spec/features/admin/admin_settings_spec.rb"
+- "./ee/spec/features/admin/admin_show_new_user_signups_cap_alert_spec.rb"
+- "./ee/spec/features/admin/admin_users_spec.rb"
+- "./ee/spec/features/admin/geo/admin_geo_nodes_spec.rb"
+- "./ee/spec/features/admin/geo/admin_geo_projects_spec.rb"
+- "./ee/spec/features/admin/geo/admin_geo_replication_nav_spec.rb"
+- "./ee/spec/features/admin/geo/admin_geo_sidebar_spec.rb"
+- "./ee/spec/features/admin/geo/admin_geo_uploads_spec.rb"
+- "./ee/spec/features/admin/groups/admin_changes_plan_spec.rb"
+- "./ee/spec/features/admin/licenses/admin_uploads_license_spec.rb"
+- "./ee/spec/features/admin/licenses/show_user_count_threshold_spec.rb"
+- "./ee/spec/features/admin/subscriptions/admin_views_subscription_spec.rb"
+- "./ee/spec/features/analytics/code_analytics_spec.rb"
+- "./ee/spec/features/billings/billing_plans_spec.rb"
+- "./ee/spec/features/billings/extend_reactivate_trial_spec.rb"
+- "./ee/spec/features/billings/qrtly_reconciliation_alert_spec.rb"
+- "./ee/spec/features/boards/boards_licensed_features_spec.rb"
+- "./ee/spec/features/boards/boards_spec.rb"
+- "./ee/spec/features/boards/group_boards/board_deletion_spec.rb"
+- "./ee/spec/features/boards/group_boards/multiple_boards_spec.rb"
+- "./ee/spec/features/boards/new_issue_spec.rb"
+- "./ee/spec/features/boards/scoped_issue_board_spec.rb"
+- "./ee/spec/features/boards/sidebar_spec.rb"
+- "./ee/spec/features/boards/swimlanes/epics_swimlanes_drag_drop_spec.rb"
+- "./ee/spec/features/boards/swimlanes/epics_swimlanes_filtering_spec.rb"
+- "./ee/spec/features/boards/swimlanes/epics_swimlanes_sidebar_labels_spec.rb"
+- "./ee/spec/features/boards/swimlanes/epics_swimlanes_sidebar_spec.rb"
+- "./ee/spec/features/boards/swimlanes/epics_swimlanes_spec.rb"
+- "./ee/spec/features/boards/user_adds_lists_to_board_spec.rb"
+- "./ee/spec/features/boards/user_visits_board_spec.rb"
+- "./ee/spec/features/burndown_charts_spec.rb"
+- "./ee/spec/features/burnup_charts_spec.rb"
+- "./ee/spec/features/ci/ci_minutes_spec.rb"
+- "./ee/spec/features/ci_shared_runner_warnings_spec.rb"
+- "./ee/spec/features/clusters/create_agent_spec.rb"
+- "./ee/spec/features/dashboards/activity_spec.rb"
+- "./ee/spec/features/dashboards/groups_spec.rb"
+- "./ee/spec/features/dashboards/issues_spec.rb"
+- "./ee/spec/features/dashboards/merge_requests_spec.rb"
+- "./ee/spec/features/dashboards/operations_spec.rb"
+- "./ee/spec/features/dashboards/projects_spec.rb"
+- "./ee/spec/features/dashboards/todos_spec.rb"
+- "./ee/spec/features/discussion_comments/epic_quick_actions_spec.rb"
+- "./ee/spec/features/discussion_comments/epic_spec.rb"
+- "./ee/spec/features/epic_boards/epic_boards_sidebar_spec.rb"
+- "./ee/spec/features/epic_boards/epic_boards_spec.rb"
+- "./ee/spec/features/epic_boards/multiple_epic_boards_spec.rb"
+- "./ee/spec/features/epic_boards/new_epic_spec.rb"
+- "./ee/spec/features/epics/delete_epic_spec.rb"
+- "./ee/spec/features/epics/epic_issues_spec.rb"
+- "./ee/spec/features/epics/epic_labels_spec.rb"
+- "./ee/spec/features/epics/epic_show_spec.rb"
+- "./ee/spec/features/epics/epics_list_spec.rb"
+- "./ee/spec/features/epics/filtered_search/visual_tokens_spec.rb"
+- "./ee/spec/features/epics/gfm_autocomplete_spec.rb"
+- "./ee/spec/features/epics/issue_promotion_spec.rb"
+- "./ee/spec/features/epics/referencing_epics_spec.rb"
+- "./ee/spec/features/epics/shortcuts_epic_spec.rb"
+- "./ee/spec/features/epics/todo_spec.rb"
+- "./ee/spec/features/epics/update_epic_spec.rb"
+- "./ee/spec/features/epics/user_uses_quick_actions_spec.rb"
+- "./ee/spec/features/geo_node_spec.rb"
+- "./ee/spec/features/groups/analytics/ci_cd_analytics_spec.rb"
+- "./ee/spec/features/groups/analytics/cycle_analytics/charts_spec.rb"
+- "./ee/spec/features/groups/analytics/cycle_analytics/filters_and_data_spec.rb"
+- "./ee/spec/features/groups/analytics/cycle_analytics/multiple_value_streams_spec.rb"
+- "./ee/spec/features/groups/audit_events_spec.rb"
+- "./ee/spec/features/groups/billing_spec.rb"
+- "./ee/spec/features/groups/contribution_analytics_spec.rb"
+- "./ee/spec/features/groups/group_overview_spec.rb"
+- "./ee/spec/features/groups/group_roadmap_spec.rb"
+- "./ee/spec/features/groups/group_settings_spec.rb"
+- "./ee/spec/features/groups/groups_security_credentials_spec.rb"
+- "./ee/spec/features/groups/hooks/user_tests_hooks_spec.rb"
+- "./ee/spec/features/groups/insights_spec.rb"
+- "./ee/spec/features/groups/issues_spec.rb"
+- "./ee/spec/features/groups/iterations/iterations_list_spec.rb"
+- "./ee/spec/features/groups/iteration_spec.rb"
+- "./ee/spec/features/groups/iterations/user_creates_iteration_in_cadence_spec.rb"
+- "./ee/spec/features/groups/iterations/user_edits_iteration_cadence_spec.rb"
+- "./ee/spec/features/groups/iterations/user_edits_iteration_spec.rb"
+- "./ee/spec/features/groups/iterations/user_views_iteration_cadence_spec.rb"
+- "./ee/spec/features/groups/iterations/user_views_iteration_spec.rb"
+- "./ee/spec/features/groups/ldap_group_links_spec.rb"
+- "./ee/spec/features/groups/ldap_settings_spec.rb"
+- "./ee/spec/features/groups/members/leave_group_spec.rb"
+- "./ee/spec/features/groups/members/list_members_spec.rb"
+- "./ee/spec/features/groups/members/override_ldap_memberships_spec.rb"
+- "./ee/spec/features/groups/new_spec.rb"
+- "./ee/spec/features/groups/push_rules_spec.rb"
+- "./ee/spec/features/groups/saml_providers_spec.rb"
+- "./ee/spec/features/groups/scim_token_spec.rb"
+- "./ee/spec/features/groups/seat_usage/seat_usage_spec.rb"
+- "./ee/spec/features/groups/security/compliance_dashboards_spec.rb"
+- "./ee/spec/features/groups/settings/user_configures_insights_spec.rb"
+- "./ee/spec/features/groups/settings/user_searches_in_settings_spec.rb"
+- "./ee/spec/features/groups/sso_spec.rb"
+- "./ee/spec/features/groups/wikis_spec.rb"
+- "./ee/spec/features/groups/wiki/user_views_wiki_empty_spec.rb"
+- "./ee/spec/features/ide/user_commits_changes_spec.rb"
+- "./ee/spec/features/ide/user_opens_ide_spec.rb"
+- "./ee/spec/features/integrations/jira/jira_issues_list_spec.rb"
+- "./ee/spec/features/issues/blocking_issues_spec.rb"
+- "./ee/spec/features/issues/epic_in_issue_sidebar_spec.rb"
+- "./ee/spec/features/issues/filtered_search/filter_issues_by_iteration_spec.rb"
+- "./ee/spec/features/issues/filtered_search/filter_issues_epic_spec.rb"
+- "./ee/spec/features/issues/filtered_search/filter_issues_weight_spec.rb"
+- "./ee/spec/features/issues/form_spec.rb"
+- "./ee/spec/features/issues/gfm_autocomplete_ee_spec.rb"
+- "./ee/spec/features/issues/issue_actions_spec.rb"
+- "./ee/spec/features/issues/issue_sidebar_spec.rb"
+- "./ee/spec/features/issues/move_issue_resource_weight_events_spec.rb"
+- "./ee/spec/features/issues/related_issues_spec.rb"
+- "./ee/spec/features/issues/resource_weight_events_spec.rb"
+- "./ee/spec/features/issues/user_bulk_edits_issues_spec.rb"
+- "./ee/spec/features/issues/user_edits_issue_spec.rb"
+- "./ee/spec/features/issues/user_uses_quick_actions_spec.rb"
+- "./ee/spec/features/issues/user_views_issues_spec.rb"
+- "./ee/spec/features/labels_hierarchy_spec.rb"
+- "./ee/spec/features/markdown/metrics_spec.rb"
+- "./ee/spec/features/merge_requests/user_filters_by_approvers_spec.rb"
+- "./ee/spec/features/merge_requests/user_resets_approvers_spec.rb"
+- "./ee/spec/features/merge_requests/user_views_all_merge_requests_spec.rb"
+- "./ee/spec/features/merge_request/user_approves_with_password_spec.rb"
+- "./ee/spec/features/merge_request/user_creates_merge_request_spec.rb"
+- "./ee/spec/features/merge_request/user_creates_merge_request_with_blocking_mrs_spec.rb"
+- "./ee/spec/features/merge_request/user_creates_multiple_assignees_mr_spec.rb"
+- "./ee/spec/features/merge_request/user_creates_multiple_reviewers_mr_spec.rb"
+- "./ee/spec/features/merge_request/user_edits_approval_rules_mr_spec.rb"
+- "./ee/spec/features/merge_request/user_edits_merge_request_blocking_mrs_spec.rb"
+- "./ee/spec/features/merge_request/user_edits_multiple_assignees_mr_spec.rb"
+- "./ee/spec/features/merge_request/user_edits_multiple_reviewers_mr_spec.rb"
+- "./ee/spec/features/merge_request/user_merges_immediately_spec.rb"
+- "./ee/spec/features/merge_request/user_merges_with_push_rules_spec.rb"
+- "./ee/spec/features/merge_request/user_sees_approval_widget_spec.rb"
+- "./ee/spec/features/merge_request/user_sees_closing_issues_message_spec.rb"
+- "./ee/spec/features/merge_request/user_sees_merge_widget_spec.rb"
+- "./ee/spec/features/merge_request/user_sees_status_checks_widget_spec.rb"
+- "./ee/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb"
+- "./ee/spec/features/merge_request/user_sets_approval_rules_spec.rb"
+- "./ee/spec/features/merge_request/user_sets_approvers_spec.rb"
+- "./ee/spec/features/merge_request/user_uses_slash_commands_spec.rb"
+- "./ee/spec/features/merge_request/user_views_blocked_merge_request_spec.rb"
+- "./ee/spec/features/merge_trains/user_adds_merge_request_to_merge_train_spec.rb"
+- "./ee/spec/features/merge_trains/user_adds_to_merge_train_when_pipeline_succeeds_spec.rb"
+- "./ee/spec/features/oncall_schedules/user_creates_schedule_spec.rb"
+- "./ee/spec/features/operations_nav_link_spec.rb"
+- "./ee/spec/features/profiles/account_spec.rb"
+- "./ee/spec/features/profiles/billing_spec.rb"
+- "./ee/spec/features/projects/audit_events_spec.rb"
+- "./ee/spec/features/projects/cluster_agents_spec.rb"
+- "./ee/spec/features/projects/custom_projects_template_spec.rb"
+- "./ee/spec/features/projects/environments/environments_spec.rb"
+- "./ee/spec/features/projects/feature_flags/feature_flag_issues_spec.rb"
+- "./ee/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb"
+- "./ee/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb"
+- "./ee/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb"
+- "./ee/spec/features/projects/insights_spec.rb"
+- "./ee/spec/features/projects/integrations/user_activates_jira_spec.rb"
+- "./ee/spec/features/projects/issues/user_creates_issue_spec.rb"
+- "./ee/spec/features/projects/iterations/iteration_cadences_list_spec.rb"
+- "./ee/spec/features/projects/iterations/iterations_list_spec.rb"
+- "./ee/spec/features/projects/iterations/user_views_iteration_spec.rb"
+- "./ee/spec/features/projects/jobs_spec.rb"
+- "./ee/spec/features/projects/kerberos_clone_instructions_spec.rb"
+- "./ee/spec/features/projects/licenses/maintainer_views_policies_spec.rb"
+- "./ee/spec/features/projects/members/member_is_removed_from_project_spec.rb"
+- "./ee/spec/features/projects/merge_requests/user_approves_merge_request_spec.rb"
+- "./ee/spec/features/projects/merge_requests/user_edits_merge_request_spec.rb"
+- "./ee/spec/features/projects/mirror_spec.rb"
+- "./ee/spec/features/projects/new_project_from_template_spec.rb"
+- "./ee/spec/features/projects/new_project_spec.rb"
+- "./ee/spec/features/projects/path_locks_spec.rb"
+- "./ee/spec/features/projects/pipelines/pipeline_spec.rb"
+- "./ee/spec/features/projects/push_rules_spec.rb"
+- "./ee/spec/features/projects/quality/test_case_create_spec.rb"
+- "./ee/spec/features/projects/quality/test_case_list_spec.rb"
+- "./ee/spec/features/projects/quality/test_case_show_spec.rb"
+- "./ee/spec/features/projects/releases/user_views_release_spec.rb"
+- "./ee/spec/features/projects/requirements_management/requirements_list_spec.rb"
+- "./ee/spec/features/projects/security/dast_scanner_profiles_spec.rb"
+- "./ee/spec/features/projects/security/dast_site_profiles_spec.rb"
+- "./ee/spec/features/projects/security/user_creates_on_demand_scan_spec.rb"
+- "./ee/spec/features/projects/security/user_views_security_configuration_spec.rb"
+- "./ee/spec/features/projects/services/prometheus_custom_metrics_spec.rb"
+- "./ee/spec/features/projects/services/user_activates_github_spec.rb"
+- "./ee/spec/features/projects/settings/disable_merge_trains_setting_spec.rb"
+- "./ee/spec/features/projects/settings/ee/repository_mirrors_settings_spec.rb"
+- "./ee/spec/features/projects/settings/ee/service_desk_setting_spec.rb"
+- "./ee/spec/features/projects/settings/issues_settings_spec.rb"
+- "./ee/spec/features/projects/settings/merge_request_approvals_settings_spec.rb"
+- "./ee/spec/features/projects/settings/merge_requests_settings_spec.rb"
+- "./ee/spec/features/projects/settings/pipeline_subscriptions_spec.rb"
+- "./ee/spec/features/projects/settings/protected_environments_spec.rb"
+- "./ee/spec/features/projects/settings/user_manages_merge_pipelines_spec.rb"
+- "./ee/spec/features/projects/settings/user_manages_merge_trains_spec.rb"
+- "./ee/spec/features/projects_spec.rb"
+- "./ee/spec/features/projects/user_applies_custom_file_template_spec.rb"
+- "./ee/spec/features/projects/view_blob_with_code_owners_spec.rb"
+- "./ee/spec/features/projects/wiki/user_views_wiki_empty_spec.rb"
+- "./ee/spec/features/promotion_spec.rb"
+- "./ee/spec/features/protected_branches_spec.rb"
+- "./ee/spec/features/protected_tags_spec.rb"
+- "./ee/spec/features/registrations/combined_registration_spec.rb"
+- "./ee/spec/features/registrations/trial_during_signup_flow_spec.rb"
+- "./ee/spec/features/registrations/user_sees_new_onboarding_flow_spec.rb"
+- "./ee/spec/features/registrations/welcome_spec.rb"
+- "./ee/spec/features/search/elastic/global_search_spec.rb"
+- "./ee/spec/features/search/elastic/group_search_spec.rb"
+- "./ee/spec/features/search/elastic/project_search_spec.rb"
+- "./ee/spec/features/search/elastic/snippet_search_spec.rb"
+- "./ee/spec/features/search/user_searches_for_epics_spec.rb"
+- "./ee/spec/features/subscriptions/groups/edit_spec.rb"
+- "./ee/spec/features/trial_registrations/signup_spec.rb"
+- "./ee/spec/features/trials/capture_lead_spec.rb"
+- "./ee/spec/features/trials/select_namespace_spec.rb"
+- "./ee/spec/features/trials/show_trial_banner_spec.rb"
+- "./ee/spec/features/users/login_spec.rb"
+- "./ee/spec/finders/geo/attachment_legacy_registry_finder_spec.rb"
+- "./ee/spec/finders/geo/container_repository_registry_finder_spec.rb"
+- "./ee/spec/finders/geo/lfs_object_registry_finder_spec.rb"
+- "./ee/spec/finders/geo/merge_request_diff_registry_finder_spec.rb"
+- "./ee/spec/finders/geo/package_file_registry_finder_spec.rb"
+- "./ee/spec/finders/geo/pages_deployment_registry_finder_spec.rb"
+- "./ee/spec/finders/geo/pipeline_artifact_registry_finder_spec.rb"
+- "./ee/spec/finders/geo/project_registry_finder_spec.rb"
+- "./ee/spec/finders/merge_requests/by_approvers_finder_spec.rb"
+- "./ee/spec/frontend/fixtures/analytics/value_streams.rb"
+- "./ee/spec/graphql/mutations/dast_on_demand_scans/create_spec.rb"
+- "./ee/spec/graphql/mutations/dast/profiles/create_spec.rb"
+- "./ee/spec/graphql/mutations/dast/profiles/run_spec.rb"
+- "./ee/spec/graphql/mutations/dast/profiles/update_spec.rb"
+- "./ee/spec/graphql/mutations/merge_requests/accept_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/group_wiki_repository_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/lfs_object_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/merge_request_diff_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/package_file_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/pages_deployment_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/pipeline_artifact_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/snippet_repository_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/terraform_state_version_registries_resolver_spec.rb"
+- "./ee/spec/graphql/resolvers/geo/upload_registries_resolver_spec.rb"
+- "./ee/spec/helpers/application_helper_spec.rb"
+- "./ee/spec/helpers/ee/geo_helper_spec.rb"
+- "./ee/spec/lib/analytics/devops_adoption/snapshot_calculator_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/backfill_iteration_cadence_id_for_boards_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/backfill_version_data_from_gitaly_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/create_security_setting_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/fix_ruby_object_in_audit_events_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_check_progress_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_in_batch_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/migrate_devops_segments_to_groups_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/migrate_security_scans_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/move_epic_issues_after_epics_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_any_approval_rule_for_merge_requests_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_any_approval_rule_for_projects_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_latest_pipeline_ids_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_namespace_statistics_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_resolved_on_default_branch_column_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_uuids_for_security_findings_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_vulnerability_feedback_pipeline_id_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/populate_vulnerability_historical_statistics_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/prune_orphaned_geo_events_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/remove_duplicate_cs_findings_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/remove_duplicated_cs_findings_without_vulnerability_id_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/remove_inaccessible_epic_todos_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/remove_undefined_occurrence_confidence_level_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/remove_undefined_occurrence_severity_level_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/remove_undefined_vulnerability_confidence_level_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/remove_undefined_vulnerability_severity_level_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/update_location_fingerprint_for_container_scanning_findings_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/update_vulnerabilities_to_dismissed_spec.rb"
+- "./ee/spec/lib/ee/gitlab/background_migration/update_vulnerability_confidence_spec.rb"
+- "./ee/spec/lib/ee/gitlab/database/connection_spec.rb"
+- "./ee/spec/lib/ee/gitlab/database_spec.rb"
+- "./ee/spec/lib/ee/gitlab/middleware/read_only_spec.rb"
+- "./ee/spec/lib/ee/gitlab/usage_data_spec.rb"
+- "./ee/spec/lib/gitlab/background_migration/fix_orphan_promoted_issues_spec.rb"
+- "./ee/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb"
+- "./ee/spec/lib/gitlab/ci/templates/Jobs/dast_default_branch_gitlab_ci_yaml_spec.rb"
+- "./ee/spec/lib/gitlab/geo/base_request_spec.rb"
+- "./ee/spec/lib/gitlab/geo/database_tasks_spec.rb"
+- "./ee/spec/lib/gitlab/geo/event_gap_tracking_spec.rb"
+- "./ee/spec/lib/gitlab/geo/geo_tasks_spec.rb"
+- "./ee/spec/lib/gitlab/geo/jwt_request_decoder_spec.rb"
+- "./ee/spec/lib/gitlab/geo/log_cursor/events/design_repository_updated_event_spec.rb"
+- "./ee/spec/lib/gitlab/geo/log_cursor/events/job_artifact_deleted_event_spec.rb"
+- "./ee/spec/lib/gitlab/geo/log_cursor/events/repository_created_event_spec.rb"
+- "./ee/spec/lib/gitlab/geo/log_cursor/events/repository_updated_event_spec.rb"
+- "./ee/spec/lib/gitlab/geo/oauth/login_state_spec.rb"
+- "./ee/spec/lib/gitlab/geo/oauth/logout_token_spec.rb"
+- "./ee/spec/lib/gitlab/geo/oauth/session_spec.rb"
+- "./ee/spec/lib/gitlab/geo/registry_batcher_spec.rb"
+- "./ee/spec/lib/gitlab/geo/replicable_model_spec.rb"
+- "./ee/spec/lib/gitlab/geo/replication/blob_downloader_spec.rb"
+- "./ee/spec/lib/gitlab/geo/replication/file_transfer_spec.rb"
+- "./ee/spec/lib/gitlab/geo/replicator_spec.rb"
+- "./ee/spec/lib/gitlab/git_access_spec.rb"
+- "./ee/spec/lib/pseudonymizer/dumper_spec.rb"
+- "./ee/spec/lib/system_check/geo/geo_database_configured_check_spec.rb"
+- "./ee/spec/lib/system_check/geo/http_connection_check_spec.rb"
+- "./ee/spec/lib/system_check/rake_task/geo_task_spec.rb"
+- "./ee/spec/mailers/notify_spec.rb"
+- "./ee/spec/migrations/20190926180443_schedule_epic_issues_after_epics_move_spec.rb"
+- "./ee/spec/migrations/add_non_null_constraint_for_escalation_rule_on_pending_alert_escalations_spec.rb"
+- "./ee/spec/migrations/add_unique_constraint_to_software_licenses_spec.rb"
+- "./ee/spec/migrations/backfill_namespace_statistics_with_wiki_size_spec.rb"
+- "./ee/spec/migrations/backfill_operations_feature_flags_iid_spec.rb"
+- "./ee/spec/migrations/backfill_software_licenses_spdx_identifiers_spec.rb"
+- "./ee/spec/migrations/backfill_version_author_and_created_at_spec.rb"
+- "./ee/spec/migrations/cleanup_deploy_access_levels_for_removed_groups_spec.rb"
+- "./ee/spec/migrations/create_elastic_reindexing_subtasks_spec.rb"
+- "./ee/spec/migrations/fix_any_approver_rule_for_projects_spec.rb"
+- "./ee/spec/migrations/migrate_design_notes_mentions_to_db_spec.rb"
+- "./ee/spec/migrations/migrate_epic_mentions_to_db_spec.rb"
+- "./ee/spec/migrations/migrate_epic_notes_mentions_to_db_spec.rb"
+- "./ee/spec/migrations/migrate_license_management_artifacts_to_license_scanning_spec.rb"
+- "./ee/spec/migrations/migrate_saml_identities_to_scim_identities_spec.rb"
+- "./ee/spec/migrations/migrate_scim_identities_to_saml_for_new_users_spec.rb"
+- "./ee/spec/migrations/migrate_vulnerability_dismissal_feedback_spec.rb"
+- "./ee/spec/migrations/migrate_vulnerability_dismissals_spec.rb"
+- "./ee/spec/migrations/nullify_feature_flag_plaintext_tokens_spec.rb"
+- "./ee/spec/migrations/populate_vulnerability_historical_statistics_for_year_spec.rb"
+- "./ee/spec/migrations/remove_creations_in_gitlab_subscription_histories_spec.rb"
+- "./ee/spec/migrations/remove_cycle_analytics_total_stage_data_spec.rb"
+- "./ee/spec/migrations/remove_duplicated_cs_findings_spec.rb"
+- "./ee/spec/migrations/remove_duplicated_cs_findings_without_vulnerability_id_spec.rb"
+- "./ee/spec/migrations/remove_schedule_and_status_null_constraints_from_pending_escalations_alert_spec.rb"
+- "./ee/spec/migrations/schedule_fix_orphan_promoted_issues_spec.rb"
+- "./ee/spec/migrations/schedule_fix_ruby_object_in_audit_events_spec.rb"
+- "./ee/spec/migrations/schedule_merge_request_any_approval_rule_migration_spec.rb"
+- "./ee/spec/migrations/schedule_populate_dismissed_state_for_vulnerabilities_spec.rb"
+- "./ee/spec/migrations/schedule_populate_resolved_on_default_branch_column_spec.rb"
+- "./ee/spec/migrations/schedule_populate_vulnerability_historical_statistics_spec.rb"
+- "./ee/spec/migrations/schedule_project_any_approval_rule_migration_spec.rb"
+- "./ee/spec/migrations/schedule_remove_inaccessible_epic_todos_spec.rb"
+- "./ee/spec/migrations/schedule_sync_blocking_issues_count_spec.rb"
+- "./ee/spec/migrations/schedule_uuid_population_for_security_findings2_spec.rb"
+- "./ee/spec/migrations/set_report_type_for_vulnerabilities_spec.rb"
+- "./ee/spec/migrations/set_resolved_state_on_vulnerabilities_spec.rb"
+- "./ee/spec/migrations/update_cs_vulnerability_confidence_column_spec.rb"
+- "./ee/spec/migrations/update_gitlab_subscriptions_start_at_post_eoa_spec.rb"
+- "./ee/spec/migrations/update_location_fingerprint_column_for_cs_spec.rb"
+- "./ee/spec/migrations/update_occurrence_severity_column_spec.rb"
+- "./ee/spec/migrations/update_undefined_confidence_from_occurrences_spec.rb"
+- "./ee/spec/migrations/update_undefined_confidence_from_vulnerabilities_spec.rb"
+- "./ee/spec/migrations/update_vulnerability_severity_column_spec.rb"
+- "./ee/spec/models/analytics/cycle_analytics/group_level_spec.rb"
+- "./ee/spec/models/approval_merge_request_rule_spec.rb"
+- "./ee/spec/models/approval_project_rule_spec.rb"
+- "./ee/spec/models/approval_state_spec.rb"
+- "./ee/spec/models/approval_wrapped_code_owner_rule_spec.rb"
+- "./ee/spec/models/approval_wrapped_rule_spec.rb"
+- "./ee/spec/models/approver_group_spec.rb"
+- "./ee/spec/models/ci/bridge_spec.rb"
+- "./ee/spec/models/ci/build_spec.rb"
+- "./ee/spec/models/ci/minutes/additional_pack_spec.rb"
+- "./ee/spec/models/ci/pipeline_spec.rb"
+- "./ee/spec/models/ci/subscriptions/project_spec.rb"
+- "./ee/spec/models/concerns/approval_rule_like_spec.rb"
+- "./ee/spec/models/concerns/approver_migrate_hook_spec.rb"
+- "./ee/spec/models/dora/daily_metrics_spec.rb"
+- "./ee/spec/models/ee/ci/job_artifact_spec.rb"
+- "./ee/spec/models/ee/ci/pipeline_artifact_spec.rb"
+- "./ee/spec/models/ee/ci/runner_spec.rb"
+- "./ee/spec/models/ee/merge_request_diff_spec.rb"
+- "./ee/spec/models/ee/pages_deployment_spec.rb"
+- "./ee/spec/models/ee/terraform/state_version_spec.rb"
+- "./ee/spec/models/geo/container_repository_registry_spec.rb"
+- "./ee/spec/models/geo/deleted_project_spec.rb"
+- "./ee/spec/models/geo/design_registry_spec.rb"
+- "./ee/spec/models/geo/job_artifact_registry_spec.rb"
+- "./ee/spec/models/geo_node_namespace_link_spec.rb"
+- "./ee/spec/models/geo_node_spec.rb"
+- "./ee/spec/models/geo_node_status_spec.rb"
+- "./ee/spec/models/geo/package_file_registry_spec.rb"
+- "./ee/spec/models/geo/project_registry_spec.rb"
+- "./ee/spec/models/group_member_spec.rb"
+- "./ee/spec/models/group_wiki_repository_spec.rb"
+- "./ee/spec/models/merge_request_spec.rb"
+- "./ee/spec/models/packages/package_file_spec.rb"
+- "./ee/spec/models/project_spec.rb"
+- "./ee/spec/models/requirements_management/requirement_spec.rb"
+- "./ee/spec/models/snippet_repository_spec.rb"
+- "./ee/spec/models/upload_spec.rb"
+- "./ee/spec/models/visible_approvable_spec.rb"
+- "./ee/spec/policies/ci/build_policy_spec.rb"
+- "./ee/spec/presenters/approval_rule_presenter_spec.rb"
+- "./ee/spec/presenters/merge_request_presenter_spec.rb"
+- "./ee/spec/replicators/geo/pipeline_artifact_replicator_spec.rb"
+- "./ee/spec/replicators/geo/terraform_state_version_replicator_spec.rb"
+- "./ee/spec/requests/api/ci/pipelines_spec.rb"
+- "./ee/spec/requests/api/geo_nodes_spec.rb"
+- "./ee/spec/requests/api/geo_replication_spec.rb"
+- "./ee/spec/requests/api/graphql/mutations/dast_on_demand_scans/create_spec.rb"
+- "./ee/spec/requests/api/graphql/mutations/dast/profiles/create_spec.rb"
+- "./ee/spec/requests/api/graphql/mutations/dast/profiles/run_spec.rb"
+- "./ee/spec/requests/api/graphql/mutations/dast/profiles/update_spec.rb"
+- "./ee/spec/requests/api/graphql/project/pipeline/dast_profile_spec.rb"
+- "./ee/spec/requests/api/merge_request_approval_rules_spec.rb"
+- "./ee/spec/requests/api/merge_requests_spec.rb"
+- "./ee/spec/requests/api/project_approval_rules_spec.rb"
+- "./ee/spec/requests/api/project_approval_settings_spec.rb"
+- "./ee/spec/requests/api/project_approvals_spec.rb"
+- "./ee/spec/requests/api/project_snapshots_spec.rb"
+- "./ee/spec/requests/api/status_checks_spec.rb"
+- "./ee/spec/requests/api/vulnerability_findings_spec.rb"
+- "./ee/spec/requests/projects/merge_requests_controller_spec.rb"
+- "./ee/spec/routing/admin_routing_spec.rb"
+- "./ee/spec/serializers/dashboard_operations_project_entity_spec.rb"
+- "./ee/spec/serializers/ee/evidences/release_entity_spec.rb"
+- "./ee/spec/serializers/ee/user_serializer_spec.rb"
+- "./ee/spec/serializers/evidences/evidence_entity_spec.rb"
+- "./ee/spec/serializers/merge_request_widget_entity_spec.rb"
+- "./ee/spec/serializers/pipeline_serializer_spec.rb"
+- "./ee/spec/services/approval_rules/create_service_spec.rb"
+- "./ee/spec/services/approval_rules/finalize_service_spec.rb"
+- "./ee/spec/services/approval_rules/merge_request_rule_destroy_service_spec.rb"
+- "./ee/spec/services/approval_rules/params_filtering_service_spec.rb"
+- "./ee/spec/services/approval_rules/project_rule_destroy_service_spec.rb"
+- "./ee/spec/services/approval_rules/update_service_spec.rb"
+- "./ee/spec/services/app_sec/dast/profiles/create_service_spec.rb"
+- "./ee/spec/services/app_sec/dast/profiles/update_service_spec.rb"
+- "./ee/spec/services/app_sec/dast/scans/create_service_spec.rb"
+- "./ee/spec/services/app_sec/dast/scans/run_service_spec.rb"
+- "./ee/spec/services/ci/compare_license_scanning_reports_service_spec.rb"
+- "./ee/spec/services/ci/compare_metrics_reports_service_spec.rb"
+- "./ee/spec/services/ci/create_pipeline_service/dast_configuration_spec.rb"
+- "./ee/spec/services/ci/destroy_pipeline_service_spec.rb"
+- "./ee/spec/services/ci/minutes/track_live_consumption_service_spec.rb"
+- "./ee/spec/services/ci/minutes/update_build_minutes_service_spec.rb"
+- "./ee/spec/services/ci/register_job_service_spec.rb"
+- "./ee/spec/services/ci/retry_build_service_spec.rb"
+- "./ee/spec/services/ci/run_dast_scan_service_spec.rb"
+- "./ee/spec/services/ci/subscribe_bridge_service_spec.rb"
+- "./ee/spec/services/ci/sync_reports_to_approval_rules_service_spec.rb"
+- "./ee/spec/services/ci/trigger_downstream_subscription_service_spec.rb"
+- "./ee/spec/services/dast_on_demand_scans/create_service_spec.rb"
+- "./ee/spec/services/deployments/auto_rollback_service_spec.rb"
+- "./ee/spec/services/ee/ci/job_artifacts/destroy_all_expired_service_spec.rb"
+- "./ee/spec/services/ee/ci/job_artifacts/destroy_batch_service_spec.rb"
+- "./ee/spec/services/ee/integrations/test/project_service_spec.rb"
+- "./ee/spec/services/ee/issuable/destroy_service_spec.rb"
+- "./ee/spec/services/ee/merge_requests/refresh_service_spec.rb"
+- "./ee/spec/services/ee/merge_requests/update_service_spec.rb"
+- "./ee/spec/services/ee/notification_service_spec.rb"
+- "./ee/spec/services/ee/post_receive_service_spec.rb"
+- "./ee/spec/services/ee/releases/create_evidence_service_spec.rb"
+- "./ee/spec/services/ee/users/destroy_service_spec.rb"
+- "./ee/spec/services/external_status_checks/create_service_spec.rb"
+- "./ee/spec/services/external_status_checks/destroy_service_spec.rb"
+- "./ee/spec/services/external_status_checks/update_service_spec.rb"
+- "./ee/spec/services/geo/container_repository_sync_service_spec.rb"
+- "./ee/spec/services/geo/hashed_storage_migrated_event_store_spec.rb"
+- "./ee/spec/services/geo/hashed_storage_migration_service_spec.rb"
+- "./ee/spec/services/geo/node_create_service_spec.rb"
+- "./ee/spec/services/geo/node_status_request_service_spec.rb"
+- "./ee/spec/services/geo/node_update_service_spec.rb"
+- "./ee/spec/services/geo/project_housekeeping_service_spec.rb"
+- "./ee/spec/services/geo/registry_consistency_service_spec.rb"
+- "./ee/spec/services/geo/repositories_changed_event_store_spec.rb"
+- "./ee/spec/services/geo/repository_updated_event_store_spec.rb"
+- "./ee/spec/services/geo/repository_verification_reset_spec.rb"
+- "./ee/spec/services/geo/repository_verification_secondary_service_spec.rb"
+- "./ee/spec/services/merge_requests/merge_service_spec.rb"
+- "./ee/spec/services/merge_requests/reset_approvals_service_spec.rb"
+- "./ee/spec/services/merge_requests/sync_report_approver_approval_rules_spec.rb"
+- "./ee/spec/services/projects/transfer_service_spec.rb"
+- "./ee/spec/services/security/security_orchestration_policies/rule_schedule_service_spec.rb"
+- "./ee/spec/services/todo_service_spec.rb"
+- "./ee/spec/services/vulnerability_feedback/create_service_spec.rb"
+- "./ee/spec/services/wiki_pages/create_service_spec.rb"
+- "./ee/spec/services/wiki_pages/destroy_service_spec.rb"
+- "./ee/spec/services/wiki_pages/update_service_spec.rb"
+- "./ee/spec/support/shared_examples/fixtures/analytics_value_streams_shared_examples.rb"
+- "./ee/spec/support/shared_examples/graphql/geo/geo_registries_resolver_shared_examples.rb"
+- "./ee/spec/support/shared_examples/graphql/mutations/dast_on_demand_scans_shared_examples.rb"
+- "./ee/spec/support/shared_examples/graphql/mutations/dast_on_demand_scan_with_user_abilities_shared_examples.rb"
+- "./ee/spec/support/shared_examples/lib/gitlab/geo/geo_log_cursor_event_shared_examples.rb"
+- "./ee/spec/support/shared_examples/lib/gitlab/geo/geo_logs_event_source_info_shared_examples.rb"
+- "./ee/spec/support/shared_examples/models/concerns/blob_replicator_strategy_shared_examples.rb"
+- "./ee/spec/support/shared_examples/models/concerns/replicable_model_shared_examples.rb"
+- "./ee/spec/support/shared_examples/models/concerns/verifiable_replicator_shared_examples.rb"
+- "./ee/spec/support/shared_examples/policies/protected_environments_shared_examples.rb"
+- "./ee/spec/support/shared_examples/requests/api/project_approval_rules_api_shared_examples.rb"
+- "./ee/spec/support/shared_examples/services/audit_event_logging_shared_examples.rb"
+- "./ee/spec/support/shared_examples/services/build_execute_shared_examples.rb"
+- "./ee/spec/support/shared_examples/services/dast_on_demand_scans_shared_examples.rb"
+- "./ee/spec/support/shared_examples/services/geo_event_store_shared_examples.rb"
+- "./ee/spec/tasks/geo_rake_spec.rb"
+- "./ee/spec/tasks/gitlab/geo_rake_spec.rb"
+- "./ee/spec/workers/geo/file_download_dispatch_worker_spec.rb"
+- "./ee/spec/workers/geo/metrics_update_worker_spec.rb"
+- "./ee/spec/workers/geo/prune_event_log_worker_spec.rb"
+- "./ee/spec/workers/geo/registry_sync_worker_spec.rb"
+- "./ee/spec/workers/geo/repository_cleanup_worker_spec.rb"
+- "./ee/spec/workers/geo/repository_sync_worker_spec.rb"
+- "./ee/spec/workers/geo/repository_verification/secondary/scheduler_worker_spec.rb"
+- "./ee/spec/workers/geo/repository_verification/secondary/single_worker_spec.rb"
+- "./ee/spec/workers/geo/verification_worker_spec.rb"
+- "./ee/spec/workers/refresh_license_compliance_checks_worker_spec.rb"
+- "./spec/controllers/abuse_reports_controller_spec.rb"
+- "./spec/controllers/admin/spam_logs_controller_spec.rb"
+- "./spec/controllers/admin/users_controller_spec.rb"
+- "./spec/controllers/omniauth_callbacks_controller_spec.rb"
+- "./spec/controllers/projects/issues_controller_spec.rb"
+- "./spec/controllers/projects/jobs_controller_spec.rb"
+- "./spec/controllers/projects/merge_requests/content_controller_spec.rb"
+- "./spec/controllers/projects/merge_requests_controller_spec.rb"
+- "./spec/controllers/projects/pipelines_controller_spec.rb"
+- "./spec/controllers/projects/pipelines/tests_controller_spec.rb"
+- "./spec/controllers/projects/settings/access_tokens_controller_spec.rb"
+- "./spec/controllers/projects/tags_controller_spec.rb"
+- "./spec/controllers/sent_notifications_controller_spec.rb"
+- "./spec/factories_spec.rb"
+- "./spec/features/action_cable_logging_spec.rb"
+- "./spec/features/admin/admin_abuse_reports_spec.rb"
+- "./spec/features/admin/admin_appearance_spec.rb"
+- "./spec/features/admin/admin_broadcast_messages_spec.rb"
+- "./spec/features/admin/admin_builds_spec.rb"
+- "./spec/features/admin/admin_dev_ops_report_spec.rb"
+- "./spec/features/admin/admin_disables_git_access_protocol_spec.rb"
+- "./spec/features/admin/admin_disables_two_factor_spec.rb"
+- "./spec/features/admin/admin_groups_spec.rb"
+- "./spec/features/admin/admin_hooks_spec.rb"
+- "./spec/features/admin/admin_labels_spec.rb"
+- "./spec/features/admin/admin_mode/login_spec.rb"
+- "./spec/features/admin/admin_mode/logout_spec.rb"
+- "./spec/features/admin/admin_mode_spec.rb"
+- "./spec/features/admin/admin_mode/workers_spec.rb"
+- "./spec/features/admin/admin_projects_spec.rb"
+- "./spec/features/admin/admin_runners_spec.rb"
+- "./spec/features/admin/admin_search_settings_spec.rb"
+- "./spec/features/admin/admin_serverless_domains_spec.rb"
+- "./spec/features/admin/admin_settings_spec.rb"
+- "./spec/features/admin/admin_users_impersonation_tokens_spec.rb"
+- "./spec/features/admin/admin_uses_repository_checks_spec.rb"
+- "./spec/features/admin/clusters/eks_spec.rb"
+- "./spec/features/admin/dashboard_spec.rb"
+- "./spec/features/admin/integrations/user_activates_mattermost_slash_command_spec.rb"
+- "./spec/features/admin/users/user_spec.rb"
+- "./spec/features/admin/users/users_spec.rb"
+- "./spec/features/alert_management/alert_details_spec.rb"
+- "./spec/features/alert_management/alert_management_list_spec.rb"
+- "./spec/features/alert_management_spec.rb"
+- "./spec/features/alert_management/user_filters_alerts_by_status_spec.rb"
+- "./spec/features/alert_management/user_searches_alerts_spec.rb"
+- "./spec/features/alert_management/user_updates_alert_status_spec.rb"
+- "./spec/features/alerts_settings/user_views_alerts_settings_spec.rb"
+- "./spec/features/atom/dashboard_spec.rb"
+- "./spec/features/boards/boards_spec.rb"
+- "./spec/features/boards/focus_mode_spec.rb"
+- "./spec/features/boards/issue_ordering_spec.rb"
+- "./spec/features/boards/keyboard_shortcut_spec.rb"
+- "./spec/features/boards/multiple_boards_spec.rb"
+- "./spec/features/boards/new_issue_spec.rb"
+- "./spec/features/boards/reload_boards_on_browser_back_spec.rb"
+- "./spec/features/boards/sidebar_due_date_spec.rb"
+- "./spec/features/boards/sidebar_labels_in_namespaces_spec.rb"
+- "./spec/features/boards/sidebar_labels_spec.rb"
+- "./spec/features/boards/sidebar_milestones_spec.rb"
+- "./spec/features/boards/sidebar_spec.rb"
+- "./spec/features/boards/user_adds_lists_to_board_spec.rb"
+- "./spec/features/boards/user_visits_board_spec.rb"
+- "./spec/features/broadcast_messages_spec.rb"
+- "./spec/features/calendar_spec.rb"
+- "./spec/features/callouts/registration_enabled_spec.rb"
+- "./spec/features/clusters/cluster_detail_page_spec.rb"
+- "./spec/features/clusters/cluster_health_dashboard_spec.rb"
+- "./spec/features/commit_spec.rb"
+- "./spec/features/commits_spec.rb"
+- "./spec/features/commits/user_uses_quick_actions_spec.rb"
+- "./spec/features/contextual_sidebar_spec.rb"
+- "./spec/features/cycle_analytics_spec.rb"
+- "./spec/features/dashboard/activity_spec.rb"
+- "./spec/features/dashboard/archived_projects_spec.rb"
+- "./spec/features/dashboard/datetime_on_tooltips_spec.rb"
+- "./spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb"
+- "./spec/features/dashboard/groups_list_spec.rb"
+- "./spec/features/dashboard/group_spec.rb"
+- "./spec/features/dashboard/issues_filter_spec.rb"
+- "./spec/features/dashboard/issues_spec.rb"
+- "./spec/features/dashboard/label_filter_spec.rb"
+- "./spec/features/dashboard/merge_requests_spec.rb"
+- "./spec/features/dashboard/milestones_spec.rb"
+- "./spec/features/dashboard/project_member_activity_index_spec.rb"
+- "./spec/features/dashboard/projects_spec.rb"
+- "./spec/features/dashboard/root_spec.rb"
+- "./spec/features/dashboard/shortcuts_spec.rb"
+- "./spec/features/dashboard/snippets_spec.rb"
+- "./spec/features/dashboard/todos/todos_filtering_spec.rb"
+- "./spec/features/dashboard/todos/todos_spec.rb"
+- "./spec/features/dashboard/user_filters_projects_spec.rb"
+- "./spec/features/discussion_comments/commit_spec.rb"
+- "./spec/features/discussion_comments/issue_spec.rb"
+- "./spec/features/discussion_comments/merge_request_spec.rb"
+- "./spec/features/discussion_comments/snippets_spec.rb"
+- "./spec/features/error_pages_spec.rb"
+- "./spec/features/error_tracking/user_filters_errors_by_status_spec.rb"
+- "./spec/features/error_tracking/user_searches_sentry_errors_spec.rb"
+- "./spec/features/error_tracking/user_sees_error_details_spec.rb"
+- "./spec/features/error_tracking/user_sees_error_index_spec.rb"
+- "./spec/features/expand_collapse_diffs_spec.rb"
+- "./spec/features/explore/groups_list_spec.rb"
+- "./spec/features/explore/groups_spec.rb"
+- "./spec/features/explore/user_explores_projects_spec.rb"
+- "./spec/features/file_uploads/attachment_spec.rb"
+- "./spec/features/file_uploads/ci_artifact_spec.rb"
+- "./spec/features/file_uploads/git_lfs_spec.rb"
+- "./spec/features/file_uploads/graphql_add_design_spec.rb"
+- "./spec/features/file_uploads/group_import_spec.rb"
+- "./spec/features/file_uploads/maven_package_spec.rb"
+- "./spec/features/file_uploads/multipart_invalid_uploads_spec.rb"
+- "./spec/features/file_uploads/nuget_package_spec.rb"
+- "./spec/features/file_uploads/project_import_spec.rb"
+- "./spec/features/file_uploads/rubygem_package_spec.rb"
+- "./spec/features/file_uploads/user_avatar_spec.rb"
+- "./spec/features/frequently_visited_projects_and_groups_spec.rb"
+- "./spec/features/gitlab_experiments_spec.rb"
+- "./spec/features/global_search_spec.rb"
+- "./spec/features/groups/activity_spec.rb"
+- "./spec/features/groups/board_sidebar_spec.rb"
+- "./spec/features/groups/board_spec.rb"
+- "./spec/features/groups/clusters/eks_spec.rb"
+- "./spec/features/groups/clusters/user_spec.rb"
+- "./spec/features/groups/container_registry_spec.rb"
+- "./spec/features/groups/dependency_proxy_spec.rb"
+- "./spec/features/groups/empty_states_spec.rb"
+- "./spec/features/groups/import_export/connect_instance_spec.rb"
+- "./spec/features/groups/import_export/export_file_spec.rb"
+- "./spec/features/groups/import_export/import_file_spec.rb"
+- "./spec/features/groups/integrations/user_activates_mattermost_slash_command_spec.rb"
+- "./spec/features/groups/issues_spec.rb"
+- "./spec/features/groups/labels/index_spec.rb"
+- "./spec/features/groups/labels/search_labels_spec.rb"
+- "./spec/features/groups/labels/sort_labels_spec.rb"
+- "./spec/features/groups/labels/subscription_spec.rb"
+- "./spec/features/groups/members/filter_members_spec.rb"
+- "./spec/features/groups/members/leave_group_spec.rb"
+- "./spec/features/groups/members/list_members_spec.rb"
+- "./spec/features/groups/members/manage_groups_spec.rb"
+- "./spec/features/groups/members/manage_members_spec.rb"
+- "./spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb"
+- "./spec/features/groups/members/master_manages_access_requests_spec.rb"
+- "./spec/features/groups/members/search_members_spec.rb"
+- "./spec/features/groups/members/sort_members_spec.rb"
+- "./spec/features/groups/members/tabs_spec.rb"
+- "./spec/features/groups/merge_requests_spec.rb"
+- "./spec/features/groups/milestones/gfm_autocomplete_spec.rb"
+- "./spec/features/groups/milestone_spec.rb"
+- "./spec/features/groups/milestones_sorting_spec.rb"
+- "./spec/features/groups/packages_spec.rb"
+- "./spec/features/groups/settings/group_badges_spec.rb"
+- "./spec/features/groups/settings/packages_and_registries_spec.rb"
+- "./spec/features/groups/settings/repository_spec.rb"
+- "./spec/features/groups/settings/user_searches_in_settings_spec.rb"
+- "./spec/features/groups/show_spec.rb"
+- "./spec/features/groups_spec.rb"
+- "./spec/features/groups/user_browse_projects_group_page_spec.rb"
+- "./spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb"
+- "./spec/features/help_pages_spec.rb"
+- "./spec/features/ide_spec.rb"
+- "./spec/features/ide/user_commits_changes_spec.rb"
+- "./spec/features/ide/user_opens_merge_request_spec.rb"
+- "./spec/features/import/manifest_import_spec.rb"
+- "./spec/features/incidents/incident_details_spec.rb"
+- "./spec/features/incidents/incidents_list_spec.rb"
+- "./spec/features/incidents/user_creates_new_incident_spec.rb"
+- "./spec/features/incidents/user_filters_incidents_by_status_spec.rb"
+- "./spec/features/incidents/user_searches_incidents_spec.rb"
+- "./spec/features/incidents/user_views_incident_spec.rb"
+- "./spec/features/issuables/issuable_list_spec.rb"
+- "./spec/features/issuables/markdown_references/internal_references_spec.rb"
+- "./spec/features/issuables/markdown_references/jira_spec.rb"
+- "./spec/features/issuables/sorting_list_spec.rb"
+- "./spec/features/issuables/user_sees_sidebar_spec.rb"
+- "./spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb"
+- "./spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb"
+- "./spec/features/issues/csv_spec.rb"
+- "./spec/features/issues/discussion_lock_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_assignee_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_author_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_base_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_emoji_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_hint_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_label_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_milestone_spec.rb"
+- "./spec/features/issues/filtered_search/dropdown_release_spec.rb"
+- "./spec/features/issues/filtered_search/filter_issues_spec.rb"
+- "./spec/features/issues/filtered_search/recent_searches_spec.rb"
+- "./spec/features/issues/filtered_search/search_bar_spec.rb"
+- "./spec/features/issues/filtered_search/visual_tokens_spec.rb"
+- "./spec/features/issues/form_spec.rb"
+- "./spec/features/issues/gfm_autocomplete_spec.rb"
+- "./spec/features/issues/group_label_sidebar_spec.rb"
+- "./spec/features/issues/incident_issue_spec.rb"
+- "./spec/features/issues/issue_detail_spec.rb"
+- "./spec/features/issues/issue_header_spec.rb"
+- "./spec/features/issues/issue_sidebar_spec.rb"
+- "./spec/features/issues/keyboard_shortcut_spec.rb"
+- "./spec/features/issues/markdown_toolbar_spec.rb"
+- "./spec/features/issues/move_spec.rb"
+- "./spec/features/issues/note_polling_spec.rb"
+- "./spec/features/issues/notes_on_issues_spec.rb"
+- "./spec/features/issues/related_issues_spec.rb"
+- "./spec/features/issues/resource_label_events_spec.rb"
+- "./spec/features/issues/service_desk_spec.rb"
+- "./spec/features/issues/spam_issues_spec.rb"
+- "./spec/features/issues/todo_spec.rb"
+- "./spec/features/issues/user_bulk_edits_issues_labels_spec.rb"
+- "./spec/features/issues/user_bulk_edits_issues_spec.rb"
+- "./spec/features/issues/user_comments_on_issue_spec.rb"
+- "./spec/features/issues/user_creates_branch_and_merge_request_spec.rb"
+- "./spec/features/issues/user_creates_confidential_merge_request_spec.rb"
+- "./spec/features/issues/user_creates_issue_by_email_spec.rb"
+- "./spec/features/issues/user_creates_issue_spec.rb"
+- "./spec/features/issues/user_edits_issue_spec.rb"
+- "./spec/features/issues/user_filters_issues_spec.rb"
+- "./spec/features/issues/user_interacts_with_awards_spec.rb"
+- "./spec/features/issues/user_invites_from_a_comment_spec.rb"
+- "./spec/features/issues/user_resets_their_incoming_email_token_spec.rb"
+- "./spec/features/issues/user_sees_empty_state_spec.rb"
+- "./spec/features/issues/user_sees_live_update_spec.rb"
+- "./spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb"
+- "./spec/features/issues/user_sorts_issue_comments_spec.rb"
+- "./spec/features/issues/user_sorts_issues_spec.rb"
+- "./spec/features/issues/user_toggles_subscription_spec.rb"
+- "./spec/features/issues/user_uses_quick_actions_spec.rb"
+- "./spec/features/issues/user_views_issue_spec.rb"
+- "./spec/features/issues/user_views_issues_spec.rb"
+- "./spec/features/jira_connect/branches_spec.rb"
+- "./spec/features/labels_hierarchy_spec.rb"
+- "./spec/features/markdown/copy_as_gfm_spec.rb"
+- "./spec/features/markdown/gitlab_flavored_markdown_spec.rb"
+- "./spec/features/markdown/keyboard_shortcuts_spec.rb"
+- "./spec/features/markdown/math_spec.rb"
+- "./spec/features/markdown/mermaid_spec.rb"
+- "./spec/features/markdown/metrics_spec.rb"
+- "./spec/features/merge_request/batch_comments_spec.rb"
+- "./spec/features/merge_request/close_reopen_report_toggle_spec.rb"
+- "./spec/features/merge_request/maintainer_edits_fork_spec.rb"
+- "./spec/features/merge_request/merge_request_discussion_lock_spec.rb"
+- "./spec/features/merge_requests/filters_generic_behavior_spec.rb"
+- "./spec/features/merge_requests/user_exports_as_csv_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_approvals_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_assignees_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_deployments_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_draft_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_labels_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_milestones_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb"
+- "./spec/features/merge_requests/user_filters_by_target_branch_spec.rb"
+- "./spec/features/merge_requests/user_mass_updates_spec.rb"
+- "./spec/features/merge_request/user_accepts_merge_request_spec.rb"
+- "./spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb"
+- "./spec/features/merge_request/user_approves_spec.rb"
+- "./spec/features/merge_request/user_assigns_themselves_spec.rb"
+- "./spec/features/merge_request/user_awards_emoji_spec.rb"
+- "./spec/features/merge_request/user_clicks_merge_request_tabs_spec.rb"
+- "./spec/features/merge_request/user_comments_on_commit_spec.rb"
+- "./spec/features/merge_request/user_comments_on_diff_spec.rb"
+- "./spec/features/merge_request/user_comments_on_merge_request_spec.rb"
+- "./spec/features/merge_request/user_creates_image_diff_notes_spec.rb"
+- "./spec/features/merge_request/user_creates_merge_request_spec.rb"
+- "./spec/features/merge_request/user_creates_mr_spec.rb"
+- "./spec/features/merge_request/user_customizes_merge_commit_message_spec.rb"
+- "./spec/features/merge_request/user_edits_assignees_sidebar_spec.rb"
+- "./spec/features/merge_request/user_edits_merge_request_spec.rb"
+- "./spec/features/merge_request/user_edits_mr_spec.rb"
+- "./spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb"
+- "./spec/features/merge_request/user_expands_diff_spec.rb"
+- "./spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb"
+- "./spec/features/merge_request/user_invites_from_a_comment_spec.rb"
+- "./spec/features/merge_request/user_jumps_to_discussion_spec.rb"
+- "./spec/features/merge_request/user_locks_discussion_spec.rb"
+- "./spec/features/merge_request/user_manages_subscription_spec.rb"
+- "./spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb"
+- "./spec/features/merge_request/user_merges_immediately_spec.rb"
+- "./spec/features/merge_request/user_merges_merge_request_spec.rb"
+- "./spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb"
+- "./spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb"
+- "./spec/features/merge_request/user_posts_diff_notes_spec.rb"
+- "./spec/features/merge_request/user_posts_notes_spec.rb"
+- "./spec/features/merge_request/user_rebases_merge_request_spec.rb"
+- "./spec/features/merge_request/user_resolves_conflicts_spec.rb"
+- "./spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb"
+- "./spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb"
+- "./spec/features/merge_request/user_resolves_wip_mr_spec.rb"
+- "./spec/features/merge_request/user_reverts_merge_request_spec.rb"
+- "./spec/features/merge_request/user_reviews_image_spec.rb"
+- "./spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb"
+- "./spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb"
+- "./spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb"
+- "./spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb"
+- "./spec/features/merge_request/user_sees_closing_issues_message_spec.rb"
+- "./spec/features/merge_request/user_sees_deleted_target_branch_spec.rb"
+- "./spec/features/merge_request/user_sees_deployment_widget_spec.rb"
+- "./spec/features/merge_request/user_sees_diff_spec.rb"
+- "./spec/features/merge_request/user_sees_discussions_spec.rb"
+- "./spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb"
+- "./spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb"
+- "./spec/features/merge_request/user_sees_merge_widget_spec.rb"
+- "./spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb"
+- "./spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb"
+- "./spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb"
+- "./spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb"
+- "./spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb"
+- "./spec/features/merge_request/user_sees_pipelines_spec.rb"
+- "./spec/features/merge_request/user_sees_suggest_pipeline_spec.rb"
+- "./spec/features/merge_request/user_sees_system_notes_spec.rb"
+- "./spec/features/merge_request/user_sees_versions_spec.rb"
+- "./spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb"
+- "./spec/features/merge_request/user_squashes_merge_request_spec.rb"
+- "./spec/features/merge_request/user_suggests_changes_on_diff_spec.rb"
+- "./spec/features/merge_request/user_toggles_whitespace_changes_spec.rb"
+- "./spec/features/merge_request/user_uses_quick_actions_spec.rb"
+- "./spec/features/merge_request/user_views_auto_expanding_diff_spec.rb"
+- "./spec/features/merge_request/user_views_diffs_commit_spec.rb"
+- "./spec/features/merge_request/user_views_diffs_file_by_file_spec.rb"
+- "./spec/features/merge_request/user_views_diffs_spec.rb"
+- "./spec/features/merge_request/user_views_open_merge_request_spec.rb"
+- "./spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb"
+- "./spec/features/milestone_spec.rb"
+- "./spec/features/milestones/user_creates_milestone_spec.rb"
+- "./spec/features/milestones/user_deletes_milestone_spec.rb"
+- "./spec/features/milestones/user_edits_milestone_spec.rb"
+- "./spec/features/milestones/user_views_milestone_spec.rb"
+- "./spec/features/milestones/user_views_milestones_spec.rb"
+- "./spec/features/nav/top_nav_responsive_spec.rb"
+- "./spec/features/oauth_login_spec.rb"
+- "./spec/features/participants_autocomplete_spec.rb"
+- "./spec/features/populate_new_pipeline_vars_with_params_spec.rb"
+- "./spec/features/profiles/account_spec.rb"
+- "./spec/features/profiles/active_sessions_spec.rb"
+- "./spec/features/profiles/keys_spec.rb"
+- "./spec/features/profiles/oauth_applications_spec.rb"
+- "./spec/features/profile_spec.rb"
+- "./spec/features/profiles/personal_access_tokens_spec.rb"
+- "./spec/features/profiles/user_changes_notified_of_own_activity_spec.rb"
+- "./spec/features/profiles/user_edit_preferences_spec.rb"
+- "./spec/features/profiles/user_edit_profile_spec.rb"
+- "./spec/features/profiles/user_search_settings_spec.rb"
+- "./spec/features/profiles/user_visits_notifications_tab_spec.rb"
+- "./spec/features/profiles/user_visits_profile_preferences_page_spec.rb"
+- "./spec/features/profiles/user_visits_profile_spec.rb"
+- "./spec/features/project_group_variables_spec.rb"
+- "./spec/features/projects/activity/user_sees_activity_spec.rb"
+- "./spec/features/projects/activity/user_sees_design_activity_spec.rb"
+- "./spec/features/projects/activity/user_sees_design_comment_spec.rb"
+- "./spec/features/projects/activity/user_sees_private_activity_spec.rb"
+- "./spec/features/projects/artifacts/file_spec.rb"
+- "./spec/features/projects/artifacts/raw_spec.rb"
+- "./spec/features/projects/artifacts/user_browses_artifacts_spec.rb"
+- "./spec/features/projects/badges/list_spec.rb"
+- "./spec/features/projects/badges/pipeline_badge_spec.rb"
+- "./spec/features/projects/blobs/balsamiq_spec.rb"
+- "./spec/features/projects/blobs/blob_line_permalink_updater_spec.rb"
+- "./spec/features/projects/blobs/blob_show_spec.rb"
+- "./spec/features/projects/blobs/edit_spec.rb"
+- "./spec/features/projects/blobs/shortcuts_blob_spec.rb"
+- "./spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb"
+- "./spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb"
+- "./spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb"
+- "./spec/features/projects/branches/new_branch_ref_dropdown_spec.rb"
+- "./spec/features/projects/branches_spec.rb"
+- "./spec/features/projects/branches/user_creates_branch_spec.rb"
+- "./spec/features/projects/branches/user_deletes_branch_spec.rb"
+- "./spec/features/projects/branches/user_views_branches_spec.rb"
+- "./spec/features/projects/ci/editor_spec.rb"
+- "./spec/features/projects/clusters/eks_spec.rb"
+- "./spec/features/projects/clusters/gcp_spec.rb"
+- "./spec/features/projects/clusters_spec.rb"
+- "./spec/features/projects/clusters/user_spec.rb"
+- "./spec/features/projects/commit/builds_spec.rb"
+- "./spec/features/projects/commit/cherry_pick_spec.rb"
+- "./spec/features/projects/commit/comments/user_adds_comment_spec.rb"
+- "./spec/features/projects/commit/comments/user_deletes_comments_spec.rb"
+- "./spec/features/projects/commit/comments/user_edits_comments_spec.rb"
+- "./spec/features/projects/commit/diff_notes_spec.rb"
+- "./spec/features/projects/commit/mini_pipeline_graph_spec.rb"
+- "./spec/features/projects/commits/user_browses_commits_spec.rb"
+- "./spec/features/projects/commit/user_comments_on_commit_spec.rb"
+- "./spec/features/projects/commit/user_reverts_commit_spec.rb"
+- "./spec/features/projects/commit/user_views_user_status_on_commit_spec.rb"
+- "./spec/features/projects/compare_spec.rb"
+- "./spec/features/projects/container_registry_spec.rb"
+- "./spec/features/projects/deploy_keys_spec.rb"
+- "./spec/features/projects/diffs/diff_show_spec.rb"
+- "./spec/features/projects/environments/environment_metrics_spec.rb"
+- "./spec/features/projects/environments/environment_spec.rb"
+- "./spec/features/projects/environments/environments_spec.rb"
+- "./spec/features/projects/environments_pod_logs_spec.rb"
+- "./spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb"
+- "./spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb"
+- "./spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb"
+- "./spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb"
+- "./spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb"
+- "./spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb"
+- "./spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb"
+- "./spec/features/projects/features_visibility_spec.rb"
+- "./spec/features/projects/files/dockerfile_dropdown_spec.rb"
+- "./spec/features/projects/files/edit_file_soft_wrap_spec.rb"
+- "./spec/features/projects/files/files_sort_submodules_with_folders_spec.rb"
+- "./spec/features/projects/files/find_file_keyboard_spec.rb"
+- "./spec/features/projects/files/gitignore_dropdown_spec.rb"
+- "./spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb"
+- "./spec/features/projects/files/project_owner_creates_license_file_spec.rb"
+- "./spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb"
+- "./spec/features/projects/files/template_selector_menu_spec.rb"
+- "./spec/features/projects/files/template_type_dropdown_spec.rb"
+- "./spec/features/projects/files/undo_template_spec.rb"
+- "./spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb"
+- "./spec/features/projects/files/user_browses_files_spec.rb"
+- "./spec/features/projects/files/user_browses_lfs_files_spec.rb"
+- "./spec/features/projects/files/user_creates_directory_spec.rb"
+- "./spec/features/projects/files/user_creates_files_spec.rb"
+- "./spec/features/projects/files/user_deletes_files_spec.rb"
+- "./spec/features/projects/files/user_edits_files_spec.rb"
+- "./spec/features/projects/files/user_find_file_spec.rb"
+- "./spec/features/projects/files/user_reads_pipeline_status_spec.rb"
+- "./spec/features/projects/files/user_replaces_files_spec.rb"
+- "./spec/features/projects/files/user_uploads_files_spec.rb"
+- "./spec/features/projects/fork_spec.rb"
+- "./spec/features/projects/gfm_autocomplete_load_spec.rb"
+- "./spec/features/projects/graph_spec.rb"
+- "./spec/features/projects/import_export/export_file_spec.rb"
+- "./spec/features/projects/import_export/import_file_spec.rb"
+- "./spec/features/projects/infrastructure_registry_spec.rb"
+- "./spec/features/projects/integrations/user_activates_asana_spec.rb"
+- "./spec/features/projects/integrations/user_activates_assembla_spec.rb"
+- "./spec/features/projects/integrations/user_activates_atlassian_bamboo_ci_spec.rb"
+- "./spec/features/projects/integrations/user_activates_flowdock_spec.rb"
+- "./spec/features/projects/integrations/user_activates_jira_spec.rb"
+- "./spec/features/projects/integrations/user_activates_pivotaltracker_spec.rb"
+- "./spec/features/projects/integrations/user_uses_inherited_settings_spec.rb"
+- "./spec/features/projects/issuable_templates_spec.rb"
+- "./spec/features/projects/issues/design_management/user_paginates_designs_spec.rb"
+- "./spec/features/projects/issues/design_management/user_permissions_upload_spec.rb"
+- "./spec/features/projects/issues/design_management/user_uploads_designs_spec.rb"
+- "./spec/features/projects/issues/design_management/user_views_design_spec.rb"
+- "./spec/features/projects/issues/design_management/user_views_designs_spec.rb"
+- "./spec/features/projects/issues/design_management/user_views_designs_with_svg_xss_spec.rb"
+- "./spec/features/projects/issues/email_participants_spec.rb"
+- "./spec/features/projects/jobs/permissions_spec.rb"
+- "./spec/features/projects/jobs_spec.rb"
+- "./spec/features/projects/jobs/user_browses_job_spec.rb"
+- "./spec/features/projects/jobs/user_browses_jobs_spec.rb"
+- "./spec/features/projects/labels/issues_sorted_by_priority_spec.rb"
+- "./spec/features/projects/labels/search_labels_spec.rb"
+- "./spec/features/projects/labels/sort_labels_spec.rb"
+- "./spec/features/projects/labels/subscription_spec.rb"
+- "./spec/features/projects/labels/update_prioritization_spec.rb"
+- "./spec/features/projects/labels/user_removes_labels_spec.rb"
+- "./spec/features/projects/members/anonymous_user_sees_members_spec.rb"
+- "./spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb"
+- "./spec/features/projects/members/group_members_spec.rb"
+- "./spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb"
+- "./spec/features/projects/members/groups_with_access_list_spec.rb"
+- "./spec/features/projects/members/invite_group_spec.rb"
+- "./spec/features/projects/members/list_spec.rb"
+- "./spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb"
+- "./spec/features/projects/members/master_manages_access_requests_spec.rb"
+- "./spec/features/projects/members/sorting_spec.rb"
+- "./spec/features/projects/members/tabs_spec.rb"
+- "./spec/features/projects/members/user_requests_access_spec.rb"
+- "./spec/features/projects/merge_request_button_spec.rb"
+- "./spec/features/projects/milestones/gfm_autocomplete_spec.rb"
+- "./spec/features/projects/milestones/milestones_sorting_spec.rb"
+- "./spec/features/projects/milestones/new_spec.rb"
+- "./spec/features/projects/milestones/user_interacts_with_labels_spec.rb"
+- "./spec/features/projects/network_graph_spec.rb"
+- "./spec/features/projects/new_project_from_template_spec.rb"
+- "./spec/features/projects/new_project_spec.rb"
+- "./spec/features/projects/packages_spec.rb"
+- "./spec/features/projects/pages/user_adds_domain_spec.rb"
+- "./spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb"
+- "./spec/features/projects/pages/user_edits_settings_spec.rb"
+- "./spec/features/projects/pipeline_schedules_spec.rb"
+- "./spec/features/projects/pipelines/pipeline_spec.rb"
+- "./spec/features/projects/pipelines/pipelines_spec.rb"
+- "./spec/features/projects/product_analytics/graphs_spec.rb"
+- "./spec/features/projects/releases/user_creates_release_spec.rb"
+- "./spec/features/projects/releases/user_views_edit_release_spec.rb"
+- "./spec/features/projects/releases/user_views_release_spec.rb"
+- "./spec/features/projects/releases/user_views_releases_spec.rb"
+- "./spec/features/projects/remote_mirror_spec.rb"
+- "./spec/features/projects/serverless/functions_spec.rb"
+- "./spec/features/projects/services/disable_triggers_spec.rb"
+- "./spec/features/projects/services/prometheus_external_alerts_spec.rb"
+- "./spec/features/projects/services/user_activates_emails_on_push_spec.rb"
+- "./spec/features/projects/services/user_activates_irker_spec.rb"
+- "./spec/features/projects/services/user_activates_issue_tracker_spec.rb"
+- "./spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb"
+- "./spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb"
+- "./spec/features/projects/services/user_activates_packagist_spec.rb"
+- "./spec/features/projects/services/user_activates_prometheus_spec.rb"
+- "./spec/features/projects/services/user_activates_pushover_spec.rb"
+- "./spec/features/projects/services/user_activates_slack_notifications_spec.rb"
+- "./spec/features/projects/services/user_activates_slack_slash_command_spec.rb"
+- "./spec/features/projects/services/user_views_services_spec.rb"
+- "./spec/features/projects/settings/access_tokens_spec.rb"
+- "./spec/features/projects/settings/lfs_settings_spec.rb"
+- "./spec/features/projects/settings/monitor_settings_spec.rb"
+- "./spec/features/projects/settings/packages_settings_spec.rb"
+- "./spec/features/projects/settings/project_badges_spec.rb"
+- "./spec/features/projects/settings/project_settings_spec.rb"
+- "./spec/features/projects/settings/registry_settings_spec.rb"
+- "./spec/features/projects/settings/repository_settings_spec.rb"
+- "./spec/features/projects/settings/service_desk_setting_spec.rb"
+- "./spec/features/projects/settings/user_changes_default_branch_spec.rb"
+- "./spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb"
+- "./spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb"
+- "./spec/features/projects/settings/user_manages_project_members_spec.rb"
+- "./spec/features/projects/settings/user_searches_in_settings_spec.rb"
+- "./spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb"
+- "./spec/features/projects/settings/user_tags_project_spec.rb"
+- "./spec/features/projects/settings/user_transfers_a_project_spec.rb"
+- "./spec/features/projects/settings/visibility_settings_spec.rb"
+- "./spec/features/projects/settings/webhooks_settings_spec.rb"
+- "./spec/features/projects/show/schema_markup_spec.rb"
+- "./spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb"
+- "./spec/features/projects/show/user_interacts_with_stars_spec.rb"
+- "./spec/features/projects/show/user_manages_notifications_spec.rb"
+- "./spec/features/projects/show/user_sees_collaboration_links_spec.rb"
+- "./spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb"
+- "./spec/features/projects/show/user_sees_readme_spec.rb"
+- "./spec/features/projects/show/user_uploads_files_spec.rb"
+- "./spec/features/projects/snippets/create_snippet_spec.rb"
+- "./spec/features/projects/snippets/show_spec.rb"
+- "./spec/features/projects/snippets/user_comments_on_snippet_spec.rb"
+- "./spec/features/projects/snippets/user_deletes_snippet_spec.rb"
+- "./spec/features/projects/snippets/user_updates_snippet_spec.rb"
+- "./spec/features/projects_spec.rb"
+- "./spec/features/projects/sub_group_issuables_spec.rb"
+- "./spec/features/projects/tags/user_edits_tags_spec.rb"
+- "./spec/features/projects/terraform_spec.rb"
+- "./spec/features/projects/tree/create_directory_spec.rb"
+- "./spec/features/projects/tree/create_file_spec.rb"
+- "./spec/features/projects/tree/tree_show_spec.rb"
+- "./spec/features/projects/tree/upload_file_spec.rb"
+- "./spec/features/projects/user_changes_project_visibility_spec.rb"
+- "./spec/features/projects/user_creates_project_spec.rb"
+- "./spec/features/projects/user_sees_sidebar_spec.rb"
+- "./spec/features/projects/user_sees_user_popover_spec.rb"
+- "./spec/features/projects/user_uses_shortcuts_spec.rb"
+- "./spec/features/projects/user_views_empty_project_spec.rb"
+- "./spec/features/projects/view_on_env_spec.rb"
+- "./spec/features/projects/wikis_spec.rb"
+- "./spec/features/projects/wiki/user_views_wiki_empty_spec.rb"
+- "./spec/features/project_variables_spec.rb"
+- "./spec/features/promotion_spec.rb"
+- "./spec/features/protected_branches_spec.rb"
+- "./spec/features/protected_tags_spec.rb"
+- "./spec/features/reportable_note/commit_spec.rb"
+- "./spec/features/reportable_note/issue_spec.rb"
+- "./spec/features/reportable_note/merge_request_spec.rb"
+- "./spec/features/reportable_note/snippets_spec.rb"
+- "./spec/features/runners_spec.rb"
+- "./spec/features/search/user_searches_for_code_spec.rb"
+- "./spec/features/search/user_searches_for_commits_spec.rb"
+- "./spec/features/search/user_searches_for_issues_spec.rb"
+- "./spec/features/search/user_searches_for_merge_requests_spec.rb"
+- "./spec/features/search/user_searches_for_milestones_spec.rb"
+- "./spec/features/search/user_searches_for_projects_spec.rb"
+- "./spec/features/search/user_searches_for_users_spec.rb"
+- "./spec/features/search/user_searches_for_wiki_pages_spec.rb"
+- "./spec/features/search/user_uses_header_search_field_spec.rb"
+- "./spec/features/search/user_uses_search_filters_spec.rb"
+- "./spec/features/signed_commits_spec.rb"
+- "./spec/features/snippets/embedded_snippet_spec.rb"
+- "./spec/features/snippets/internal_snippet_spec.rb"
+- "./spec/features/snippets/notes_on_personal_snippets_spec.rb"
+- "./spec/features/snippets/private_snippets_spec.rb"
+- "./spec/features/snippets/public_snippets_spec.rb"
+- "./spec/features/snippets/show_spec.rb"
+- "./spec/features/snippets/user_creates_snippet_spec.rb"
+- "./spec/features/snippets/user_deletes_snippet_spec.rb"
+- "./spec/features/snippets/user_edits_snippet_spec.rb"
+- "./spec/features/tags/developer_creates_tag_spec.rb"
+- "./spec/features/tags/developer_deletes_tag_spec.rb"
+- "./spec/features/tags/developer_updates_tag_spec.rb"
+- "./spec/features/task_lists_spec.rb"
+- "./spec/features/triggers_spec.rb"
+- "./spec/features/u2f_spec.rb"
+- "./spec/features/uploads/user_uploads_avatar_to_profile_spec.rb"
+- "./spec/features/uploads/user_uploads_file_to_note_spec.rb"
+- "./spec/features/user_can_display_performance_bar_spec.rb"
+- "./spec/features/user_opens_link_to_comment_spec.rb"
+- "./spec/features/user_sees_revert_modal_spec.rb"
+- "./spec/features/users/login_spec.rb"
+- "./spec/features/users/logout_spec.rb"
+- "./spec/features/users/overview_spec.rb"
+- "./spec/features/users/signup_spec.rb"
+- "./spec/features/users/snippets_spec.rb"
+- "./spec/features/users/terms_spec.rb"
+- "./spec/features/users/user_browses_projects_on_user_page_spec.rb"
+- "./spec/features/webauthn_spec.rb"
+- "./spec/features/whats_new_spec.rb"
+- "./spec/finders/ci/pipeline_schedules_finder_spec.rb"
+- "./spec/finders/ci/pipelines_finder_spec.rb"
+- "./spec/finders/ci/pipelines_for_merge_request_finder_spec.rb"
+- "./spec/finders/projects_finder_spec.rb"
+- "./spec/finders/releases/evidence_pipeline_finder_spec.rb"
+- "./spec/frontend/fixtures/analytics.rb"
+- "./spec/frontend/fixtures/jobs.rb"
+- "./spec/frontend/fixtures/pipeline_schedules.rb"
+- "./spec/frontend/fixtures/pipelines.rb"
+- "./spec/graphql/mutations/design_management/upload_spec.rb"
+- "./spec/graphql/mutations/merge_requests/accept_spec.rb"
+- "./spec/graphql/resolvers/ci/test_report_summary_resolver_spec.rb"
+- "./spec/helpers/issuables_helper_spec.rb"
+- "./spec/initializers/active_record_locking_spec.rb"
+- "./spec/initializers/database_config_spec.rb"
+- "./spec/lib/gitlab/auth_spec.rb"
+- "./spec/lib/gitlab/ci/badge/pipeline/status_spec.rb"
+- "./spec/lib/gitlab/ci/build/policy/changes_spec.rb"
+- "./spec/lib/gitlab/ci/charts_spec.rb"
+- "./spec/lib/gitlab/ci/config_spec.rb"
+- "./spec/lib/gitlab/ci/pipeline/chain/create_spec.rb"
+- "./spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb"
+- "./spec/lib/gitlab/ci/pipeline/seed/build_spec.rb"
+- "./spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb"
+- "./spec/lib/gitlab/ci/status/stage/common_spec.rb"
+- "./spec/lib/gitlab/ci/status/stage/factory_spec.rb"
+- "./spec/lib/gitlab/ci/status/stage/play_manual_spec.rb"
+- "./spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb"
+- "./spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb"
+- "./spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb"
+- "./spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb"
+- "./spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb"
+- "./spec/lib/gitlab/database/bulk_update_spec.rb"
+- "./spec/lib/gitlab/database/connection_spec.rb"
+- "./spec/lib/gitlab/database/load_balancing/host_spec.rb"
+- "./spec/lib/gitlab/database/load_balancing_spec.rb"
+- "./spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb"
+- "./spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb"
+- "./spec/lib/gitlab/database/schema_migrations/context_spec.rb"
+- "./spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb"
+- "./spec/lib/gitlab/database/with_lock_retries_spec.rb"
+- "./spec/lib/gitlab/data_builder/pipeline_spec.rb"
+- "./spec/lib/gitlab/email/handler/create_issue_handler_spec.rb"
+- "./spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb"
+- "./spec/lib/gitlab/email/handler/create_note_handler_spec.rb"
+- "./spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb"
+- "./spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb"
+- "./spec/lib/gitlab/usage_data_spec.rb"
+- "./spec/lib/peek/views/active_record_spec.rb"
+- "./spec/mailers/emails/pipelines_spec.rb"
+- "./spec/migrations/20210205174154_remove_bad_dependency_proxy_manifests_spec.rb"
+- "./spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb"
+- "./spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb"
+- "./spec/migrations/insert_ci_daily_pipeline_schedule_triggers_plan_limits_spec.rb"
+- "./spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb"
+- "./spec/models/ci/bridge_spec.rb"
+- "./spec/models/ci/build_need_spec.rb"
+- "./spec/models/ci/build_spec.rb"
+- "./spec/models/ci/build_trace_chunk_spec.rb"
+- "./spec/models/ci/commit_with_pipeline_spec.rb"
+- "./spec/models/ci/group_spec.rb"
+- "./spec/models/ci/group_variable_spec.rb"
+- "./spec/models/ci/instance_variable_spec.rb"
+- "./spec/models/ci/job_artifact_spec.rb"
+- "./spec/models/ci/job_variable_spec.rb"
+- "./spec/models/ci/legacy_stage_spec.rb"
+- "./spec/models/ci/pipeline_schedule_spec.rb"
+- "./spec/models/ci/pipeline_spec.rb"
+- "./spec/models/ci/runner_namespace_spec.rb"
+- "./spec/models/ci/runner_project_spec.rb"
+- "./spec/models/ci/runner_spec.rb"
+- "./spec/models/ci/running_build_spec.rb"
+- "./spec/models/ci/stage_spec.rb"
+- "./spec/models/ci/variable_spec.rb"
+- "./spec/models/clusters/applications/jupyter_spec.rb"
+- "./spec/models/clusters/applications/runner_spec.rb"
+- "./spec/models/commit_collection_spec.rb"
+- "./spec/models/commit_status_spec.rb"
+- "./spec/models/concerns/batch_destroy_dependent_associations_spec.rb"
+- "./spec/models/concerns/bulk_insertable_associations_spec.rb"
+- "./spec/models/concerns/cron_schedulable_spec.rb"
+- "./spec/models/concerns/has_environment_scope_spec.rb"
+- "./spec/models/concerns/schedulable_spec.rb"
+- "./spec/models/concerns/token_authenticatable_spec.rb"
+- "./spec/models/design_management/version_spec.rb"
+- "./spec/models/environment_status_spec.rb"
+- "./spec/models/hooks/system_hook_spec.rb"
+- "./spec/models/issue_spec.rb"
+- "./spec/models/members/project_member_spec.rb"
+- "./spec/models/merge_request_spec.rb"
+- "./spec/models/plan_spec.rb"
+- "./spec/models/project_feature_usage_spec.rb"
+- "./spec/models/project_spec.rb"
+- "./spec/models/spam_log_spec.rb"
+- "./spec/models/user_spec.rb"
+- "./spec/models/user_status_spec.rb"
+- "./spec/policies/ci/build_policy_spec.rb"
+- "./spec/policies/ci/pipeline_policy_spec.rb"
+- "./spec/presenters/ci/stage_presenter_spec.rb"
+- "./spec/requests/api/admin/ci/variables_spec.rb"
+- "./spec/requests/api/admin/plan_limits_spec.rb"
+- "./spec/requests/api/ci/jobs_spec.rb"
+- "./spec/requests/api/ci/pipeline_schedules_spec.rb"
+- "./spec/requests/api/ci/pipelines_spec.rb"
+- "./spec/requests/api/ci/runner/runners_post_spec.rb"
+- "./spec/requests/api/ci/runners_spec.rb"
+- "./spec/requests/api/commits_spec.rb"
+- "./spec/requests/api/commit_statuses_spec.rb"
+- "./spec/requests/api/graphql/ci/runner_spec.rb"
+- "./spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb"
+- "./spec/requests/api/graphql/project/issues_spec.rb"
+- "./spec/requests/api/graphql/project/merge_request_spec.rb"
+- "./spec/requests/api/graphql/project_query_spec.rb"
+- "./spec/requests/api/issues/issues_spec.rb"
+- "./spec/requests/api/merge_requests_spec.rb"
+- "./spec/requests/api/projects_spec.rb"
+- "./spec/requests/api/resource_access_tokens_spec.rb"
+- "./spec/requests/api/users_spec.rb"
+- "./spec/requests/lfs_http_spec.rb"
+- "./spec/requests/projects/cycle_analytics_events_spec.rb"
+- "./spec/serializers/ci/downloadable_artifact_entity_spec.rb"
+- "./spec/serializers/ci/downloadable_artifact_serializer_spec.rb"
+- "./spec/serializers/ci/pipeline_entity_spec.rb"
+- "./spec/serializers/merge_request_poll_cached_widget_entity_spec.rb"
+- "./spec/serializers/merge_request_poll_widget_entity_spec.rb"
+- "./spec/serializers/merge_request_widget_entity_spec.rb"
+- "./spec/serializers/pipeline_details_entity_spec.rb"
+- "./spec/serializers/pipeline_serializer_spec.rb"
+- "./spec/serializers/stage_entity_spec.rb"
+- "./spec/serializers/stage_serializer_spec.rb"
+- "./spec/serializers/test_report_entity_spec.rb"
+- "./spec/serializers/test_report_summary_entity_spec.rb"
+- "./spec/serializers/test_suite_entity_spec.rb"
+- "./spec/serializers/test_suite_summary_entity_spec.rb"
+- "./spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb"
+- "./spec/services/ci/compare_accessibility_reports_service_spec.rb"
+- "./spec/services/ci/compare_codequality_reports_service_spec.rb"
+- "./spec/services/ci/compare_reports_base_service_spec.rb"
+- "./spec/services/ci/compare_test_reports_service_spec.rb"
+- "./spec/services/ci/create_pipeline_service/environment_spec.rb"
+- "./spec/services/ci/create_pipeline_service_spec.rb"
+- "./spec/services/ci/destroy_pipeline_service_spec.rb"
+- "./spec/services/ci/disable_user_pipeline_schedules_service_spec.rb"
+- "./spec/services/ci/ensure_stage_service_spec.rb"
+- "./spec/services/ci/expire_pipeline_cache_service_spec.rb"
+- "./spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb"
+- "./spec/services/ci/generate_coverage_reports_service_spec.rb"
+- "./spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb"
+- "./spec/services/ci/job_artifacts/destroy_associations_service_spec.rb"
+- "./spec/services/ci/job_artifacts/destroy_batch_service_spec.rb"
+- "./spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb"
+- "./spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb"
+- "./spec/services/ci/pipeline_bridge_status_service_spec.rb"
+- "./spec/services/ci/pipeline_processing/shared_processing_service.rb"
+- "./spec/services/ci/pipelines/add_job_service_spec.rb"
+- "./spec/services/ci/pipeline_schedule_service_spec.rb"
+- "./spec/services/ci/pipeline_trigger_service_spec.rb"
+- "./spec/services/ci/register_job_service_spec.rb"
+- "./spec/services/ci/retry_build_service_spec.rb"
+- "./spec/services/ci/test_failure_history_service_spec.rb"
+- "./spec/services/ci/update_instance_variables_service_spec.rb"
+- "./spec/services/deployments/update_environment_service_spec.rb"
+- "./spec/services/design_management/save_designs_service_spec.rb"
+- "./spec/services/environments/stop_service_spec.rb"
+- "./spec/services/groups/transfer_service_spec.rb"
+- "./spec/services/integrations/test/project_service_spec.rb"
+- "./spec/services/issuable/destroy_service_spec.rb"
+- "./spec/services/issue_links/list_service_spec.rb"
+- "./spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb"
+- "./spec/services/merge_requests/mergeability_check_service_spec.rb"
+- "./spec/services/merge_requests/post_merge_service_spec.rb"
+- "./spec/services/merge_requests/refresh_service_spec.rb"
+- "./spec/services/pages/migrate_from_legacy_storage_service_spec.rb"
+- "./spec/services/projects/destroy_service_spec.rb"
+- "./spec/services/projects/transfer_service_spec.rb"
+- "./spec/services/projects/update_service_spec.rb"
+- "./spec/services/releases/create_service_spec.rb"
+- "./spec/services/resource_access_tokens/revoke_service_spec.rb"
+- "./spec/services/todo_service_spec.rb"
+- "./spec/services/users/activity_service_spec.rb"
+- "./spec/services/users/destroy_service_spec.rb"
+- "./spec/services/users/reject_service_spec.rb"
+- "./spec/support/shared_contexts/email_shared_context.rb"
+- "./spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb"
+- "./spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb"
+- "./spec/support/shared_examples/integrations/test_examples.rb"
+- "./spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb"
+- "./spec/support/shared_examples/models/cluster_application_status_shared_examples.rb"
+- "./spec/support/shared_examples/models/cluster_application_version_shared_examples.rb"
+- "./spec/support/shared_examples/models/concerns/cron_schedulable_shared_examples.rb"
+- "./spec/support/shared_examples/models/concerns/limitable_shared_examples.rb"
+- "./spec/support/shared_examples/models/update_highest_role_shared_examples.rb"
+- "./spec/support/shared_examples/models/update_project_statistics_shared_examples.rb"
+- "./spec/support/shared_examples/models/with_uploads_shared_examples.rb"
+- "./spec/support/shared_examples/requests/api/status_shared_examples.rb"
+- "./spec/support/shared_examples/requests/lfs_http_shared_examples.rb"
+- "./spec/support/shared_examples/services/destroy_label_links_shared_examples.rb"
+- "./spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb"
+- "./spec/support/shared_examples/services/notification_service_shared_examples.rb"
+- "./spec/support/shared_examples/services/wiki_pages/create_service_shared_examples.rb"
+- "./spec/support/shared_examples/services/wiki_pages/destroy_service_shared_examples.rb"
+- "./spec/support/shared_examples/services/wiki_pages/update_service_shared_examples.rb"
+- "./spec/support/shared_examples/workers/idempotency_shared_examples.rb"
+- "./spec/views/projects/artifacts/_artifact.html.haml_spec.rb"
+- "./spec/views/projects/commits/_commit.html.haml_spec.rb"
+- "./spec/views/projects/jobs/_build.html.haml_spec.rb"
+- "./spec/views/projects/jobs/_generic_commit_status.html.haml_spec.rb"
+- "./spec/views/projects/merge_requests/creations/_new_submit.html.haml_spec.rb"
+- "./spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb"
+- "./spec/views/shared/runners/_runner_details.html.haml_spec.rb"
+- "./spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb"
+- "./spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb"
+- "./spec/workers/container_expiration_policy_worker_spec.rb"
+- "./spec/workers/merge_requests/create_pipeline_worker_spec.rb"
+- "./spec/workers/pipeline_metrics_worker_spec.rb"
+- "./spec/workers/pipeline_schedule_worker_spec.rb"
+- "./spec/workers/releases/create_evidence_worker_spec.rb"
+- "./spec/workers/remove_expired_members_worker_spec.rb"
+- "./spec/workers/repository_cleanup_worker_spec.rb"
+- "./spec/workers/stage_update_worker_spec.rb"
+- "./spec/workers/stuck_merge_jobs_worker_spec.rb"
+- "./ee/spec/requests/api/graphql/project/pipelines/dast_profile_spec.rb"
+- "./spec/services/projects/overwrite_project_service_spec.rb"
diff --git a/spec/support/database/cross-join-allowlist.yml b/spec/support/database/cross-join-allowlist.yml
index 45e95cf3262..c209d275fc8 100644
--- a/spec/support/database/cross-join-allowlist.yml
+++ b/spec/support/database/cross-join-allowlist.yml
@@ -1,197 +1,58 @@
-- "./ee/spec/controllers/operations_controller_spec.rb"
-- "./ee/spec/controllers/projects/issues_controller_spec.rb"
-- "./ee/spec/controllers/projects/security/vulnerabilities_controller_spec.rb"
- "./ee/spec/features/ci/ci_minutes_spec.rb"
-- "./ee/spec/features/merge_request/user_merges_immediately_spec.rb"
-- "./ee/spec/features/merge_request/user_sees_merge_widget_spec.rb"
- "./ee/spec/features/merge_trains/two_merge_requests_on_train_spec.rb"
- "./ee/spec/features/merge_trains/user_adds_merge_request_to_merge_train_spec.rb"
-- "./ee/spec/features/merge_trains/user_adds_to_merge_train_when_pipeline_succeeds_spec.rb"
-- "./ee/spec/features/projects/pipelines/pipeline_spec.rb"
-- "./ee/spec/features/projects/settings/auto_rollback_spec.rb"
-- "./ee/spec/features/projects/settings/pipeline_subscriptions_spec.rb"
-- "./ee/spec/features/projects/settings/protected_environments_spec.rb"
- "./ee/spec/finders/ee/namespaces/projects_finder_spec.rb"
-- "./ee/spec/finders/group_projects_finder_spec.rb"
-- "./ee/spec/finders/security/findings_finder_spec.rb"
- "./ee/spec/graphql/ee/resolvers/namespace_projects_resolver_spec.rb"
-- "./ee/spec/lib/analytics/devops_adoption/snapshot_calculator_spec.rb"
-- "./ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_spec.rb"
-- "./ee/spec/lib/ee/gitlab/background_migration/migrate_security_scans_spec.rb"
-- "./ee/spec/lib/ee/gitlab/background_migration/populate_latest_pipeline_ids_spec.rb"
-- "./ee/spec/lib/ee/gitlab/background_migration/populate_resolved_on_default_branch_column_spec.rb"
-- "./ee/spec/lib/ee/gitlab/background_migration/populate_uuids_for_security_findings_spec.rb"
-- "./ee/spec/lib/ee/gitlab/background_migration/populate_vulnerability_feedback_pipeline_id_spec.rb"
-- "./ee/spec/lib/ee/gitlab/usage_data_spec.rb"
-- "./ee/spec/migrations/schedule_populate_resolved_on_default_branch_column_spec.rb"
-- "./ee/spec/models/ci/build_spec.rb"
- "./ee/spec/models/ci/minutes/project_monthly_usage_spec.rb"
-- "./ee/spec/models/ci/pipeline_spec.rb"
-- "./ee/spec/models/ee/vulnerability_spec.rb"
-- "./ee/spec/models/merge_request_spec.rb"
- "./ee/spec/models/project_spec.rb"
- "./ee/spec/models/security/finding_spec.rb"
- "./ee/spec/models/security/scan_spec.rb"
-- "./ee/spec/presenters/ci/pipeline_presenter_spec.rb"
- "./ee/spec/requests/api/ci/minutes_spec.rb"
- "./ee/spec/requests/api/graphql/ci/minutes/usage_spec.rb"
-- "./ee/spec/requests/api/graphql/mutations/environments/canary_ingress/update_spec.rb"
-- "./ee/spec/requests/api/graphql/mutations/vulnerabilities/create_external_issue_link_spec.rb"
-- "./ee/spec/requests/api/graphql/project/pipeline/security_report_summary_spec.rb"
-- "./ee/spec/requests/api/graphql/vulnerabilities/location_spec.rb"
-- "./ee/spec/requests/api/groups_spec.rb"
- "./ee/spec/requests/api/namespaces_spec.rb"
-- "./ee/spec/requests/api/vulnerability_findings_spec.rb"
-- "./ee/spec/serializers/dashboard_environment_entity_spec.rb"
-- "./ee/spec/serializers/dashboard_environments_serializer_spec.rb"
-- "./ee/spec/services/auto_merge/add_to_merge_train_when_pipeline_succeeds_service_spec.rb"
-- "./ee/spec/services/ci/create_pipeline_service/runnable_builds_spec.rb"
- "./ee/spec/services/ci/minutes/additional_packs/change_namespace_service_spec.rb"
- "./ee/spec/services/ci/minutes/additional_packs/create_service_spec.rb"
- "./ee/spec/services/ci/minutes/refresh_cached_data_service_spec.rb"
-- "./ee/spec/services/ci/process_pipeline_service_spec.rb"
-- "./ee/spec/services/ci/trigger_downstream_subscription_service_spec.rb"
-- "./ee/spec/services/clear_namespace_shared_runners_minutes_service_spec.rb"
-- "./ee/spec/services/deployments/auto_rollback_service_spec.rb"
-- "./ee/spec/services/ee/ci/job_artifacts/destroy_all_expired_service_spec.rb"
-- "./ee/spec/services/ee/ci/job_artifacts/destroy_batch_service_spec.rb"
-- "./ee/spec/services/ee/issues/build_from_vulnerability_service_spec.rb"
-- "./ee/spec/services/ee/merge_requests/create_pipeline_service_spec.rb"
-- "./ee/spec/services/ee/merge_requests/refresh_service_spec.rb"
-- "./ee/spec/services/security/report_summary_service_spec.rb"
-- "./ee/spec/services/security/vulnerability_counting_service_spec.rb"
-- "./ee/spec/workers/scan_security_report_secrets_worker_spec.rb"
-- "./ee/spec/workers/security/store_scans_worker_spec.rb"
- "./spec/controllers/admin/runners_controller_spec.rb"
-- "./spec/controllers/groups/runners_controller_spec.rb"
- "./spec/controllers/groups/settings/ci_cd_controller_spec.rb"
-- "./spec/controllers/projects/logs_controller_spec.rb"
-- "./spec/controllers/projects/merge_requests_controller_spec.rb"
-- "./spec/controllers/projects/runners_controller_spec.rb"
-- "./spec/controllers/projects/serverless/functions_controller_spec.rb"
- "./spec/controllers/projects/settings/ci_cd_controller_spec.rb"
- "./spec/features/admin/admin_runners_spec.rb"
-- "./spec/features/groups/settings/ci_cd_spec.rb"
- "./spec/features/ide/user_opens_merge_request_spec.rb"
-- "./spec/features/merge_request/user_merges_immediately_spec.rb"
-- "./spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb"
-- "./spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb"
-- "./spec/features/merge_request/user_resolves_wip_mr_spec.rb"
-- "./spec/features/merge_request/user_sees_deployment_widget_spec.rb"
- "./spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb"
-- "./spec/features/merge_request/user_sees_merge_widget_spec.rb"
-- "./spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb"
-- "./spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb"
-- "./spec/features/merge_request/user_sees_pipelines_spec.rb"
-- "./spec/features/project_group_variables_spec.rb"
-- "./spec/features/project_variables_spec.rb"
-- "./spec/features/projects/badges/list_spec.rb"
-- "./spec/features/projects/environments_pod_logs_spec.rb"
- "./spec/features/projects/infrastructure_registry_spec.rb"
-- "./spec/features/projects/jobs_spec.rb"
-- "./spec/features/projects/package_files_spec.rb"
-- "./spec/features/projects/pipelines/pipeline_spec.rb"
-- "./spec/features/projects/pipelines/pipelines_spec.rb"
-- "./spec/features/projects/serverless/functions_spec.rb"
-- "./spec/features/projects/settings/pipelines_settings_spec.rb"
-- "./spec/features/runners_spec.rb"
-- "./spec/features/security/project/internal_access_spec.rb"
-- "./spec/features/security/project/private_access_spec.rb"
-- "./spec/features/security/project/public_access_spec.rb"
-- "./spec/features/triggers_spec.rb"
-- "./spec/finders/ci/pipelines_finder_spec.rb"
- "./spec/finders/ci/pipelines_for_merge_request_finder_spec.rb"
- "./spec/finders/ci/runners_finder_spec.rb"
-- "./spec/finders/clusters/knative_services_finder_spec.rb"
-- "./spec/finders/projects/serverless/functions_finder_spec.rb"
- "./spec/frontend/fixtures/runner.rb"
-- "./spec/graphql/mutations/ci/runner/delete_spec.rb"
- "./spec/graphql/resolvers/ci/group_runners_resolver_spec.rb"
-- "./spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb"
-- "./spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb"
-- "./spec/graphql/types/ci/job_token_scope_type_spec.rb"
-- "./spec/helpers/packages_helper_spec.rb"
- "./spec/lib/api/entities/package_spec.rb"
-- "./spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb"
-- "./spec/lib/gitlab/prometheus/query_variables_spec.rb"
-- "./spec/mailers/emails/pipelines_spec.rb"
+- "./spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb"
+- "./spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb"
- "./spec/migrations/20210907211557_finalize_ci_builds_bigint_conversion_spec.rb"
-- "./spec/migrations/cleanup_legacy_artifact_migration_spec.rb"
-- "./spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb"
-- "./spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb"
-- "./spec/migrations/schedule_migrate_security_scans_spec.rb"
-- "./spec/models/ci/build_spec.rb"
-- "./spec/models/ci/job_artifact_spec.rb"
-- "./spec/models/ci/job_token/scope_spec.rb"
+- "./spec/migrations/associate_existing_dast_builds_with_variables_spec.rb"
+- "./spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb"
+- "./spec/migrations/schedule_pages_metadata_migration_spec.rb"
- "./spec/models/ci/pipeline_spec.rb"
- "./spec/models/ci/runner_spec.rb"
-- "./spec/models/clusters/applications/runner_spec.rb"
-- "./spec/models/deployment_spec.rb"
-- "./spec/models/environment_spec.rb"
- "./spec/models/merge_request_spec.rb"
- "./spec/models/project_spec.rb"
- "./spec/models/user_spec.rb"
-- "./spec/presenters/ci/build_runner_presenter_spec.rb"
-- "./spec/presenters/ci/pipeline_presenter_spec.rb"
- "./spec/presenters/packages/detail/package_presenter_spec.rb"
-- "./spec/requests/api/ci/pipelines_spec.rb"
-- "./spec/requests/api/ci/runner/jobs_request_post_spec.rb"
- "./spec/requests/api/ci/runner/runners_post_spec.rb"
- "./spec/requests/api/ci/runners_spec.rb"
-- "./spec/requests/api/commit_statuses_spec.rb"
+- "./spec/requests/api/graphql/ci/runner_spec.rb"
- "./spec/requests/api/graphql/group_query_spec.rb"
-- "./spec/requests/api/graphql/merge_request/merge_request_spec.rb"
-- "./spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb"
-- "./spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb"
-- "./spec/requests/api/graphql/mutations/environments/canary_ingress/update_spec.rb"
-- "./spec/requests/api/graphql/mutations/merge_requests/create_spec.rb"
- "./spec/requests/api/graphql/packages/composer_spec.rb"
- "./spec/requests/api/graphql/packages/conan_spec.rb"
- "./spec/requests/api/graphql/packages/maven_spec.rb"
- "./spec/requests/api/graphql/packages/nuget_spec.rb"
- "./spec/requests/api/graphql/packages/package_spec.rb"
- "./spec/requests/api/graphql/packages/pypi_spec.rb"
-- "./spec/requests/api/graphql/project/merge_request/pipelines_spec.rb"
-- "./spec/requests/api/graphql/project/merge_request_spec.rb"
-- "./spec/requests/api/graphql/project/merge_requests_spec.rb"
-- "./spec/requests/api/graphql/project/pipeline_spec.rb"
-- "./spec/requests/api/merge_requests_spec.rb"
- "./spec/requests/api/package_files_spec.rb"
-- "./spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb"
-- "./spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb"
-- "./spec/services/ci/create_pipeline_service/needs_spec.rb"
-- "./spec/services/ci/create_pipeline_service_spec.rb"
-- "./spec/services/ci/destroy_pipeline_service_spec.rb"
-- "./spec/services/ci/expire_pipeline_cache_service_spec.rb"
-- "./spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb"
-- "./spec/services/ci/job_artifacts/destroy_associations_service_spec.rb"
-- "./spec/services/ci/job_artifacts/destroy_batch_service_spec.rb"
-- "./spec/services/ci/pipeline_processing/shared_processing_service.rb"
-- "./spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb"
-- "./spec/services/ci/register_job_service_spec.rb"
-- "./spec/services/clusters/applications/prometheus_config_service_spec.rb"
-- "./spec/services/deployments/older_deployments_drop_service_spec.rb"
-- "./spec/services/environments/auto_stop_service_spec.rb"
- "./spec/services/environments/stop_service_spec.rb"
-- "./spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb"
-- "./spec/services/merge_requests/create_service_spec.rb"
- "./spec/services/merge_requests/post_merge_service_spec.rb"
-- "./spec/services/merge_requests/refresh_service_spec.rb"
-- "./spec/support/prometheus/additional_metrics_shared_examples.rb"
-- "./spec/support/shared_examples/ci/pipeline_email_shared_examples.rb"
- "./spec/support/shared_examples/features/packages_shared_examples.rb"
-- "./spec/support/shared_examples/features/search_settings_shared_examples.rb"
-- "./spec/support/shared_examples/features/variable_list_shared_examples.rb"
- "./spec/support/shared_examples/models/concerns/limitable_shared_examples.rb"
-- "./spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb"
- "./spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb"
- "./spec/support/shared_examples/requests/api/graphql/packages/package_details_shared_examples.rb"
-- "./spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb"
-- "./spec/support/shared_examples/requests/api/status_shared_examples.rb"
- "./spec/support/shared_examples/requests/graphql_shared_examples.rb"
-- "./spec/support/shared_examples/services/onboarding_progress_shared_examples.rb"
- "./spec/support/shared_examples/services/packages_shared_examples.rb"
-- "./spec/support/shared_examples/workers/idempotency_shared_examples.rb"
-- "./spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb"
-- "./spec/workers/pipeline_process_worker_spec.rb"
-- "./spec/workers/pipeline_schedule_worker_spec.rb"
diff --git a/spec/support/database/multiple_databases.rb b/spec/support/database/multiple_databases.rb
index 8ce642a682c..5e1ae60536f 100644
--- a/spec/support/database/multiple_databases.rb
+++ b/spec/support/database/multiple_databases.rb
@@ -5,5 +5,57 @@ module Database
def skip_if_multiple_databases_not_setup
skip 'Skipping because multiple databases not set up' unless Gitlab::Database.has_config?(:ci)
end
+
+ # The usage of this method switches temporarily used `connection_handler`
+ # allowing full manipulation of ActiveRecord::Base connections without
+ # having side effects like:
+ # - misaligned transactions since this is managed by `BeforeAllAdapter`
+ # - removal of primary connections
+ #
+ # The execution within a block ensures safe cleanup of all allocated resources.
+ #
+ # rubocop:disable Database/MultipleDatabases
+ def with_reestablished_active_record_base(reconnect: true)
+ connection_classes = ActiveRecord::Base.connection_handler.connection_pool_names.map(&:constantize).to_h do |klass|
+ [klass, klass.connection_db_config]
+ end
+
+ original_handler = ActiveRecord::Base.connection_handler
+ new_handler = ActiveRecord::ConnectionAdapters::ConnectionHandler.new
+ ActiveRecord::Base.connection_handler = new_handler
+
+ if reconnect
+ connection_classes.each { |klass, db_config| klass.establish_connection(db_config) }
+ end
+
+ yield
+ ensure
+ ActiveRecord::Base.connection_handler = original_handler
+ new_handler&.clear_all_connections!
+ end
+ # rubocop:enable Database/MultipleDatabases
+ end
+
+ module ActiveRecordBaseEstablishConnection
+ def establish_connection(*args)
+ # rubocop:disable Database/MultipleDatabases
+ if connected? && connection&.transaction_open? && ActiveRecord::Base.connection_handler == ActiveRecord::Base.default_connection_handler
+ raise "Cannot re-establish '#{self}.establish_connection' within an open transaction (#{connection&.open_transactions.to_i}). " \
+ "Use `with_reestablished_active_record_base` instead or add `:reestablished_active_record_base` to rspec context."
+ end
+ # rubocop:enable Database/MultipleDatabases
+
+ super
+ end
end
end
+
+RSpec.configure do |config|
+ config.around(:each, :reestablished_active_record_base) do |example|
+ with_reestablished_active_record_base(reconnect: example.metadata.fetch(:reconnect, true)) do
+ example.run
+ end
+ end
+end
+
+ActiveRecord::Base.singleton_class.prepend(::Database::ActiveRecordBaseEstablishConnection) # rubocop:disable Database/MultipleDatabases
diff --git a/spec/support/database/prevent_cross_database_modification.rb b/spec/support/database/prevent_cross_database_modification.rb
index b4c968e3c41..7ded85b65ce 100644
--- a/spec/support/database/prevent_cross_database_modification.rb
+++ b/spec/support/database/prevent_cross_database_modification.rb
@@ -33,8 +33,10 @@ module Database
end
def cleanup_with_cross_database_modification_prevented
- ActiveSupport::Notifications.unsubscribe(PreventCrossDatabaseModification.cross_database_context[:subscriber])
- PreventCrossDatabaseModification.cross_database_context[:enabled] = false
+ if PreventCrossDatabaseModification.cross_database_context
+ ActiveSupport::Notifications.unsubscribe(PreventCrossDatabaseModification.cross_database_context[:subscriber])
+ PreventCrossDatabaseModification.cross_database_context[:enabled] = false
+ end
end
end
@@ -55,8 +57,11 @@ module Database
end
def self.prevent_cross_database_modification!(connection, sql)
+ return unless cross_database_context
return unless cross_database_context[:enabled]
+ return if connection.pool.instance_of?(ActiveRecord::ConnectionAdapters::NullPool)
+
database = connection.pool.db_config.name
if sql.start_with?('SAVEPOINT')
@@ -74,6 +79,8 @@ module Database
return if cross_database_context[:transaction_depth_by_db].values.all?(&:zero?)
+ # PgQuery might fail in some cases due to limited nesting:
+ # https://github.com/pganalyze/pg_query/issues/209
parsed_query = PgQuery.parse(sql)
tables = sql.downcase.include?(' for update') ? parsed_query.tables : parsed_query.dml_tables
@@ -87,7 +94,8 @@ module Database
if schemas.many?
raise Database::PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError,
"Cross-database data modification of '#{schemas.to_a.join(", ")}' were detected within " \
- "a transaction modifying the '#{all_tables.to_a.join(", ")}'"
+ "a transaction modifying the '#{all_tables.to_a.join(", ")}' tables." \
+ "Please refer to https://docs.gitlab.com/ee/development/database/multiple_databases.html#removing-cross-database-transactions for details on how to resolve this exception."
end
end
end
@@ -96,16 +104,20 @@ end
Gitlab::Database.singleton_class.prepend(
Database::PreventCrossDatabaseModification::GitlabDatabaseMixin)
+CROSS_DB_MODIFICATION_ALLOW_LIST = Set.new(YAML.load_file(File.join(__dir__, 'cross-database-modification-allowlist.yml'))).freeze
+
RSpec.configure do |config|
config.include(::Database::PreventCrossDatabaseModification::SpecHelpers)
# Using before and after blocks because the around block causes problems with the let_it_be
# record creations. It makes an extra savepoint which breaks the transaction count logic.
- config.before(:each, :prevent_cross_database_modification) do
- with_cross_database_modification_prevented
+ config.before do |example_file|
+ if CROSS_DB_MODIFICATION_ALLOW_LIST.exclude?(example_file.file_path)
+ with_cross_database_modification_prevented
+ end
end
- config.after(:each, :prevent_cross_database_modification) do
+ config.after do |example_file|
cleanup_with_cross_database_modification_prevented
end
end
diff --git a/spec/support/database/prevent_cross_joins.rb b/spec/support/database/prevent_cross_joins.rb
index 4b78aa9014c..f5ed2a8f22e 100644
--- a/spec/support/database/prevent_cross_joins.rb
+++ b/spec/support/database/prevent_cross_joins.rb
@@ -22,9 +22,10 @@ module Database
CrossJoinAcrossUnsupportedTablesError = Class.new(StandardError)
ALLOW_THREAD_KEY = :allow_cross_joins_across_databases
+ ALLOW_ANNOTATE_KEY = ALLOW_THREAD_KEY.to_s.freeze
def self.validate_cross_joins!(sql)
- return if Thread.current[ALLOW_THREAD_KEY]
+ return if Thread.current[ALLOW_THREAD_KEY] || sql.include?(ALLOW_ANNOTATE_KEY)
# Allow spec/support/database_cleaner.rb queries to disable/enable triggers for many tables
# See https://gitlab.com/gitlab-org/gitlab/-/issues/339396
@@ -32,21 +33,14 @@ module Database
# PgQuery might fail in some cases due to limited nesting:
# https://github.com/pganalyze/pg_query/issues/209
- #
- # Also, we disable GC while parsing because of https://github.com/pganalyze/pg_query/issues/226
- begin
- GC.disable
- tables = PgQuery.parse(sql).tables
- ensure
- GC.enable
- end
+ tables = PgQuery.parse(sql).tables
schemas = Database::GitlabSchema.table_schemas(tables)
if schemas.include?(:gitlab_ci) && schemas.include?(:gitlab_main)
Thread.current[:has_cross_join_exception] = true
raise CrossJoinAcrossUnsupportedTablesError,
- "Unsupported cross-join across '#{tables.join(", ")}' modifying '#{schemas.to_a.join(", ")}' discovered " \
+ "Unsupported cross-join across '#{tables.join(", ")}' querying '#{schemas.to_a.join(", ")}' discovered " \
"when executing query '#{sql}'. Please refer to https://docs.gitlab.com/ee/development/database/multiple_databases.html#removing-joins-between-ci_-and-non-ci_-tables for details on how to resolve this exception."
end
end
@@ -63,6 +57,10 @@ module Database
ensure
ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber
end
+
+ def allow_cross_joins_across_databases(url:, &block)
+ ::Gitlab::Database.allow_cross_joins_across_databases(url: url, &block)
+ end
end
module GitlabDatabaseMixin
@@ -75,12 +73,21 @@ module Database
Thread.current[ALLOW_THREAD_KEY] = old_value
end
end
+
+ module ActiveRecordRelationMixin
+ def allow_cross_joins_across_databases(url:)
+ super.annotate(ALLOW_ANNOTATE_KEY)
+ end
+ end
end
end
Gitlab::Database.singleton_class.prepend(
Database::PreventCrossJoins::GitlabDatabaseMixin)
+ActiveRecord::Relation.prepend(
+ Database::PreventCrossJoins::ActiveRecordRelationMixin)
+
ALLOW_LIST = Set.new(YAML.load_file(File.join(__dir__, 'cross-join-allowlist.yml'))).freeze
RSpec.configure do |config|
diff --git a/spec/support/database_cleaner.rb b/spec/support/database_cleaner.rb
index b31881e3082..8f706fdebc9 100644
--- a/spec/support/database_cleaner.rb
+++ b/spec/support/database_cleaner.rb
@@ -17,32 +17,9 @@ RSpec.configure do |config|
delete_from_all_tables!(except: ['work_item_types'])
# Postgres maximum number of columns in a table is 1600 (https://github.com/postgres/postgres/blob/de41869b64d57160f58852eab20a27f248188135/src/include/access/htup_details.h#L23-L47).
- # And since:
- # "The DROP COLUMN form does not physically remove the column, but simply makes
- # it invisible to SQL operations. Subsequent insert and update operations in the
- # table will store a null value for the column. Thus, dropping a column is quick
- # but it will not immediately reduce the on-disk size of your table, as the space
- # occupied by the dropped column is not reclaimed.
- # The space will be reclaimed over time as existing rows are updated."
- # according to https://www.postgresql.org/docs/current/sql-altertable.html.
# We drop and recreate the database if any table has more than 1200 columns, just to be safe.
- max_allowed_columns = 1200
- tables_with_more_than_allowed_columns =
- ApplicationRecord.connection.execute("SELECT attrelid::regclass::text AS table, COUNT(*) AS column_count FROM pg_attribute GROUP BY attrelid HAVING COUNT(*) > #{max_allowed_columns}")
-
- if tables_with_more_than_allowed_columns.any?
- tables_with_more_than_allowed_columns.each do |result|
- puts "The #{result['table']} table has #{result['column_count']} columns."
- end
- puts "Recreating the database"
- start = Gitlab::Metrics::System.monotonic_time
-
- ActiveRecord::Tasks::DatabaseTasks.drop_current
- ActiveRecord::Tasks::DatabaseTasks.create_current
- ActiveRecord::Tasks::DatabaseTasks.load_schema_current
- ActiveRecord::Tasks::DatabaseTasks.migrate
-
- puts "Database re-creation done in #{Gitlab::Metrics::System.monotonic_time - start}"
+ if any_connection_class_with_more_than_allowed_columns?
+ recreate_all_databases!
end
end
diff --git a/spec/support/database_load_balancing.rb b/spec/support/database_load_balancing.rb
index f22c69ea613..014575e8a82 100644
--- a/spec/support/database_load_balancing.rb
+++ b/spec/support/database_load_balancing.rb
@@ -1,22 +1,30 @@
# frozen_string_literal: true
RSpec.configure do |config|
- config.before(:each, :db_load_balancing) do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+ config.around(:each, :database_replica) do |example|
+ old_proxies = []
- config = Gitlab::Database::LoadBalancing::Configuration
- .new(ActiveRecord::Base, [Gitlab::Database.main.config['host']])
- lb = ::Gitlab::Database::LoadBalancing::LoadBalancer.new(config)
- proxy = ::Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
+ Gitlab::Database::LoadBalancing.base_models.each do |model|
+ config = Gitlab::Database::LoadBalancing::Configuration
+ .new(model, [model.connection_db_config.configuration_hash[:host]])
+ lb = Gitlab::Database::LoadBalancing::LoadBalancer.new(config)
- allow(ActiveRecord::Base).to receive(:load_balancing_proxy).and_return(proxy)
+ old_proxies << [model, model.connection]
- ::Gitlab::Database::LoadBalancing::Session.clear_session
+ model.connection =
+ Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
+ end
+
+ Gitlab::Database::LoadBalancing::Session.clear_session
redis_shared_state_cleanup!
- end
- config.after(:each, :db_load_balancing) do
- ::Gitlab::Database::LoadBalancing::Session.clear_session
+ example.run
+
+ Gitlab::Database::LoadBalancing::Session.clear_session
redis_shared_state_cleanup!
+
+ old_proxies.each do |(model, proxy)|
+ model.connection = proxy
+ end
end
end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 940ff2751d3..316d645f99f 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -2,7 +2,7 @@
module DbCleaner
def all_connection_classes
- ::ActiveRecord::Base.connection_handler.connection_pool_names.map(&:constantize)
+ ::BeforeAllAdapter.all_connection_classes
end
def delete_from_all_tables!(except: [])
@@ -20,6 +20,79 @@ module DbCleaner
DatabaseCleaner[:active_record, { connection: connection_class }]
end
end
+
+ def any_connection_class_with_more_than_allowed_columns?
+ all_connection_classes.any? do |connection_class|
+ more_than_allowed_columns?(connection_class)
+ end
+ end
+
+ def more_than_allowed_columns?(connection_class)
+ # Postgres maximum number of columns in a table is 1600 (https://github.com/postgres/postgres/blob/de41869b64d57160f58852eab20a27f248188135/src/include/access/htup_details.h#L23-L47).
+ # And since:
+ # "The DROP COLUMN form does not physically remove the column, but simply makes
+ # it invisible to SQL operations. Subsequent insert and update operations in the
+ # table will store a null value for the column. Thus, dropping a column is quick
+ # but it will not immediately reduce the on-disk size of your table, as the space
+ # occupied by the dropped column is not reclaimed.
+ # The space will be reclaimed over time as existing rows are updated."
+ # according to https://www.postgresql.org/docs/current/sql-altertable.html.
+ # We drop and recreate the database if any table has more than 1200 columns, just to be safe.
+ max_allowed_columns = 1200
+ tables_with_more_than_allowed_columns = connection_class.connection.execute(<<-SQL)
+ SELECT attrelid::regclass::text AS table, COUNT(*) AS column_count
+ FROM pg_attribute
+ GROUP BY attrelid
+ HAVING COUNT(*) > #{max_allowed_columns}
+ SQL
+
+ tables_with_more_than_allowed_columns.each do |result|
+ puts "The #{result['table']} (#{connection_class.connection_db_config.name}) table has #{result['column_count']} columns."
+ end
+
+ tables_with_more_than_allowed_columns.any?
+ end
+
+ def recreate_all_databases!
+ start = Gitlab::Metrics::System.monotonic_time
+
+ puts "Recreating the database"
+
+ force_disconnect_all_connections!
+
+ ActiveRecord::Tasks::DatabaseTasks.drop_current
+ ActiveRecord::Tasks::DatabaseTasks.create_current
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current
+
+ # Migrate each database individually
+ with_reestablished_active_record_base do
+ all_connection_classes.each do |connection_class|
+ ActiveRecord::Base.establish_connection(connection_class.connection_db_config)
+
+ ActiveRecord::Tasks::DatabaseTasks.migrate
+ end
+ end
+
+ puts "Databases re-creation done in #{Gitlab::Metrics::System.monotonic_time - start}"
+ end
+
+ def force_disconnect_all_connections!
+ all_connection_classes.each do |connection_class|
+ # We use `connection_pool` to avoid going through
+ # Load Balancer since it does retry ops
+ pool = connection_class.connection_pool
+
+ # Force disconnect https://www.cybertec-postgresql.com/en/terminating-database-connections-in-postgresql/
+ pool.connection.execute(<<-SQL)
+ SELECT pg_terminate_backend(pid)
+ FROM pg_stat_activity
+ WHERE datname = #{pool.connection.quote(pool.db_config.database)}
+ AND pid != pg_backend_pid();
+ SQL
+
+ connection_class.connection_pool.disconnect!
+ end
+ end
end
DbCleaner.prepend_mod_with('DbCleaner')
diff --git a/spec/support/helpers/dependency_proxy_helpers.rb b/spec/support/helpers/dependency_proxy_helpers.rb
index 9413cb93199..75dc09ec159 100644
--- a/spec/support/helpers/dependency_proxy_helpers.rb
+++ b/spec/support/helpers/dependency_proxy_helpers.rb
@@ -34,12 +34,20 @@ module DependencyProxyHelpers
def build_jwt(user = nil, expire_time: nil)
JSONWebToken::HMACToken.new(::Auth::DependencyProxyAuthenticationService.secret).tap do |jwt|
- jwt['user_id'] = user.id if user.is_a?(User)
- jwt['deploy_token'] = user.token if user.is_a?(DeployToken)
- jwt.expire_time = expire_time || jwt.issued_at + 1.minute
+ if block_given?
+ yield(jwt)
+ else
+ jwt['user_id'] = user.id if user.is_a?(User)
+ jwt['deploy_token'] = user.token if user.is_a?(DeployToken)
+ jwt.expire_time = expire_time || jwt.issued_at + 1.minute
+ end
end
end
+ def jwt_token_authorization_headers(jwt)
+ { 'AUTHORIZATION' => "Bearer #{jwt.encoded}" }
+ end
+
private
def registry
diff --git a/spec/support/helpers/feature_flag_helpers.rb b/spec/support/helpers/feature_flag_helpers.rb
index 51ba9039b70..4e57002a7c6 100644
--- a/spec/support/helpers/feature_flag_helpers.rb
+++ b/spec/support/helpers/feature_flag_helpers.rb
@@ -71,7 +71,7 @@ module FeatureFlagHelpers
end
def add_linked_issue_button
- find('.js-issue-count-badge-add-button')
+ find_button 'Add a related issue'
end
def remove_linked_issue_button
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 5174c145a93..fb909008f12 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -3,6 +3,8 @@
require 'action_dispatch/testing/test_request'
require 'fileutils'
+require_relative '../../../lib/gitlab/popen'
+
module JavaScriptFixturesHelpers
extend ActiveSupport::Concern
include Gitlab::Popen
@@ -25,17 +27,6 @@ module JavaScriptFixturesHelpers
'tmp/tests/frontend/fixtures' + (Gitlab.ee? ? '-ee' : '')
end
- # Public: Removes all fixture files from given directory
- #
- # directory_name - directory of the fixtures (relative to .fixture_root_path)
- #
- def clean_frontend_fixtures(directory_name)
- full_directory_name = File.expand_path(directory_name, fixture_root_path)
- Dir[File.expand_path('*.{html,json,md}', full_directory_name)].each do |file_name|
- FileUtils.rm(file_name)
- end
- end
-
def remove_repository(project)
Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
end
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index 826108a63a5..96e79427278 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -52,4 +52,12 @@ module NavbarStructureHelper
new_sub_nav_item_name: _('Infrastructure Registry')
)
end
+
+ def insert_infrastructure_google_cloud_nav
+ insert_after_sub_nav_item(
+ _('Terraform'),
+ within: _('Infrastructure'),
+ new_sub_nav_item_name: _('Google Cloud')
+ )
+ end
end
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index 5ab778c11cb..6f530d57caf 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -79,6 +79,18 @@ module StubGitlabCalls
end
end
+ def stub_container_registry_info(info: {})
+ allow(ContainerRegistry::Client)
+ .to receive(:registry_info)
+ .and_return(info)
+ end
+
+ def stub_container_registry_network_error(client_method:)
+ allow_next_instance_of(ContainerRegistry::Client) do |client|
+ allow(client).to receive(client_method).and_raise(::Faraday::Error, nil, nil)
+ end
+ end
+
def stub_commonmark_sourcepos_disabled
allow_any_instance_of(Banzai::Filter::MarkdownEngines::CommonMark)
.to receive(:render_options)
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index b1a9aade043..5ead1813439 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -54,15 +54,8 @@ module UsageDataHelpers
clusters_platforms_eks
clusters_platforms_gke
clusters_platforms_user
- clusters_applications_helm
- clusters_applications_ingress
- clusters_applications_cert_managers
- clusters_applications_prometheus
- clusters_applications_crossplane
- clusters_applications_runner
- clusters_applications_knative
- clusters_applications_elastic_stack
- clusters_applications_jupyter
+ clusters_integrations_elastic_stack
+ clusters_integrations_prometheus
clusters_management_project
in_review_folder
grafana_integrated_projects
@@ -112,6 +105,7 @@ module UsageDataHelpers
projects_with_expiration_policy_enabled_with_older_than_set_to_7d
projects_with_expiration_policy_enabled_with_older_than_set_to_14d
projects_with_expiration_policy_enabled_with_older_than_set_to_30d
+ projects_with_expiration_policy_enabled_with_older_than_set_to_60d
projects_with_expiration_policy_enabled_with_older_than_set_to_90d
projects_with_expiration_policy_enabled_with_cadence_set_to_1d
projects_with_expiration_policy_enabled_with_cadence_set_to_7d
diff --git a/spec/support/matchers/be_request_urgency.rb b/spec/support/matchers/be_request_urgency.rb
new file mode 100644
index 00000000000..e88f157366b
--- /dev/null
+++ b/spec/support/matchers/be_request_urgency.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define :be_request_urgency do |expected|
+ match do |actual|
+ actual.is_a?(::Gitlab::EndpointAttributes::Config::RequestUrgency) &&
+ actual.name == expected
+ end
+end
diff --git a/spec/support/matchers/graphql_matchers.rb b/spec/support/matchers/graphql_matchers.rb
index 904b7efdd7f..dcaec176687 100644
--- a/spec/support/matchers/graphql_matchers.rb
+++ b/spec/support/matchers/graphql_matchers.rb
@@ -3,14 +3,30 @@
RSpec::Matchers.define_negated_matcher :be_nullable, :be_non_null
RSpec::Matchers.define :require_graphql_authorizations do |*expected|
+ def permissions_for(klass)
+ if klass.respond_to?(:required_permissions)
+ klass.required_permissions
+ else
+ [klass.to_graphql.metadata[:authorize]]
+ end
+ end
+
match do |klass|
- permissions = if klass.respond_to?(:required_permissions)
- klass.required_permissions
- else
- [klass.to_graphql.metadata[:authorize]]
- end
+ actual = permissions_for(klass)
+
+ expect(actual).to match_array(expected)
+ end
+
+ failure_message do |klass|
+ actual = permissions_for(klass)
+ missing = actual - expected
+ extra = expected - actual
- expect(permissions).to eq(expected)
+ message = []
+ message << "is missing permissions: #{missing.inspect}" if missing.any?
+ message << "contained unexpected permissions: #{extra.inspect}" if extra.any?
+
+ message.join("\n")
end
end
diff --git a/spec/support/matchers/markdown_matchers.rb b/spec/support/matchers/markdown_matchers.rb
index dfdb5bc01ae..f01c4075eeb 100644
--- a/spec/support/matchers/markdown_matchers.rb
+++ b/spec/support/matchers/markdown_matchers.rb
@@ -41,7 +41,7 @@ module MarkdownMatchers
set_default_markdown_messages
match do |actual|
- expect(actual).to have_selector('gl-emoji', count: 10)
+ expect(actual).to have_selector('gl-emoji', count: 12)
emoji_element = actual.at_css('gl-emoji')
expect(emoji_element['data-name'].to_s).not_to be_empty
diff --git a/spec/support/redis.rb b/spec/support/redis.rb
index eeeb93fa811..421079af8e0 100644
--- a/spec/support/redis.rb
+++ b/spec/support/redis.rb
@@ -38,4 +38,20 @@ RSpec.configure do |config|
redis_trace_chunks_cleanup!
end
+
+ config.around(:each, :clean_gitlab_redis_rate_limiting) do |example|
+ redis_rate_limiting_cleanup!
+
+ example.run
+
+ redis_rate_limiting_cleanup!
+ end
+
+ config.around(:each, :clean_gitlab_redis_sessions) do |example|
+ redis_sessions_cleanup!
+
+ example.run
+
+ redis_sessions_cleanup!
+ end
end
diff --git a/spec/support/redis/redis_helpers.rb b/spec/support/redis/redis_helpers.rb
index 3511d906203..f27d873eb31 100644
--- a/spec/support/redis/redis_helpers.rb
+++ b/spec/support/redis/redis_helpers.rb
@@ -22,4 +22,14 @@ module RedisHelpers
def redis_trace_chunks_cleanup!
Gitlab::Redis::TraceChunks.with(&:flushdb)
end
+
+ # Usage: rate limiting state (for Rack::Attack)
+ def redis_rate_limiting_cleanup!
+ Gitlab::Redis::RateLimiting.with(&:flushdb)
+ end
+
+ # Usage: session state
+ def redis_sessions_cleanup!
+ Gitlab::Redis::Sessions.with(&:flushdb)
+ end
end
diff --git a/spec/support/redis/redis_new_instance_shared_examples.rb b/spec/support/redis/redis_new_instance_shared_examples.rb
new file mode 100644
index 00000000000..e9b1e3e4da1
--- /dev/null
+++ b/spec/support/redis/redis_new_instance_shared_examples.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_class|
+ let(:instance_specific_config_file) { "config/redis.#{name}.yml" }
+ let(:environment_config_file_name) { "GITLAB_REDIS_#{name.upcase}_CONFIG_FILE" }
+ let(:fallback_config_file) { nil }
+
+ before do
+ allow(fallback_class).to receive(:config_file_name).and_return(fallback_config_file)
+ end
+
+ include_examples "redis_shared_examples"
+
+ describe '.config_file_name' do
+ subject { described_class.config_file_name }
+
+ let(:rails_root) { Dir.mktmpdir('redis_shared_examples') }
+
+ before do
+ # Undo top-level stub of config_file_name because we are testing that method now.
+ allow(described_class).to receive(:config_file_name).and_call_original
+
+ allow(described_class).to receive(:rails_root).and_return(rails_root)
+ FileUtils.mkdir_p(File.join(rails_root, 'config'))
+ end
+
+ after do
+ FileUtils.rm_rf(rails_root)
+ end
+
+ context 'when there is only a resque.yml' do
+ before do
+ FileUtils.touch(File.join(rails_root, 'config/resque.yml'))
+ end
+
+ it { expect(subject).to eq("#{rails_root}/config/resque.yml") }
+
+ context 'and there is a global env override' do
+ before do
+ stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override')
+ end
+
+ it { expect(subject).to eq('global override') }
+
+ context "and #{fallback_class.name.demodulize} has a different config file" do
+ let(:fallback_config_file) { 'fallback config file' }
+
+ it { expect(subject).to eq('fallback config file') }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/redis/redis_shared_examples.rb b/spec/support/redis/redis_shared_examples.rb
index 25eab5fd6e4..dd916aea3e8 100644
--- a/spec/support/redis/redis_shared_examples.rb
+++ b/spec/support/redis/redis_shared_examples.rb
@@ -255,6 +255,28 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
+ describe '#db' do
+ let(:rails_env) { 'development' }
+
+ subject { described_class.new(rails_env).db }
+
+ context 'with old format' do
+ let(:config_file_name) { config_old_format_host }
+
+ it 'returns the correct db' do
+ expect(subject).to eq(redis_database)
+ end
+ end
+
+ context 'with new format' do
+ let(:config_file_name) { config_new_format_host }
+
+ it 'returns the correct db' do
+ expect(subject).to eq(redis_database)
+ end
+ end
+ end
+
describe '#sentinels' do
subject { described_class.new(rails_env).sentinels }
@@ -327,6 +349,12 @@ RSpec.shared_examples "redis_shared_examples" do
expect(subject.send(:fetch_config)).to eq false
end
+
+ it 'has a value for the legacy default URL' do
+ allow(subject).to receive(:fetch_config) { false }
+
+ expect(subject.send(:raw_config_hash)).to include(url: a_string_matching(%r{\Aredis://localhost:638[012]\Z}))
+ end
end
def clear_raw_config
diff --git a/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
new file mode 100644
index 00000000000..62d708420c3
--- /dev/null
+++ b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'bulk imports requests context' do |url|
+ let(:page_response_headers) do
+ {
+ 'Content-Type' => 'application/json',
+ 'X-Next-Page' => 2,
+ 'X-Page' => 1,
+ 'X-Per-Page' => 20,
+ 'X-Total' => 42,
+ 'X-Total-Pages' => 2
+ }
+ end
+
+ let(:request_headers) { { 'Authorization' => 'Bearer demo-pat', 'Content-Type' => 'application/json' } }
+
+ before do
+ stub_request(:get, "#{url}/api/v4/version")
+ .with(headers: request_headers)
+ .to_return(
+ status: 200,
+ body: { version: ::BulkImport.min_gl_version_for_project_migration.to_s }.to_json,
+ headers: { 'Content-Type' => 'application/json' })
+
+ stub_request(:get, "https://gitlab.example.com/api/v4/groups?min_access_level=50&page=1&per_page=20&search=test&top_level_only=true")
+ .with(headers: request_headers)
+ .to_return(status: 200,
+ body: [{
+ id: 2595440,
+ web_url: 'https://gitlab.com/groups/test',
+ name: 'Test',
+ path: 'stub-test-group',
+ full_name: 'Test',
+ full_path: 'stub-test-group'
+ }].to_json,
+ headers: page_response_headers
+ )
+
+ stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=20&top_level_only=true&min_access_level=50&search=" % { url: url })
+ .to_return(
+ body: [{
+ id: 2595438,
+ web_url: 'https://gitlab.com/groups/auto-breakfast',
+ name: 'Stub',
+ path: 'stub-group',
+ full_name: 'Stub',
+ full_path: 'stub-group'
+ }].to_json,
+ headers: page_response_headers
+ )
+ end
+end
diff --git a/spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb
index 6b9ddc70691..7176e38bf7c 100644
--- a/spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb
@@ -3,7 +3,7 @@
RSpec.shared_context 'relation tree restorer shared context' do
include ImportExport::CommonUtil
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:shared) { Gitlab::ImportExport::Shared.new(importable) }
let(:attributes) { relation_reader.consume_attributes(importable_name) }
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index 5a72b330707..b7966e25b38 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -39,17 +39,25 @@ RSpec.shared_context 'structured_logger' do
)
end
+ let(:db_payload_defaults) do
+ metrics =
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_counter_keys +
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_duration_keys +
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_keys +
+ [:db_duration_s]
+
+ metrics.each_with_object({}) do |key, result|
+ result[key.to_s] = 0
+ end
+ end
+
let(:end_payload) do
- start_payload.merge(
+ start_payload.merge(db_payload_defaults).merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
'job_status' => 'done',
'duration_s' => 0.0,
'completed_at' => timestamp.to_f,
- 'cpu_s' => 1.111112,
- 'db_duration_s' => 0.0,
- 'db_cached_count' => 0,
- 'db_count' => 0,
- 'db_write_count' => 0
+ 'cpu_s' => 1.111112
)
end
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
index 73de631e293..0d992f33c61 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -15,8 +15,12 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
let(:redis_seconds_metric) { double('redis seconds metric') }
let(:elasticsearch_seconds_metric) { double('elasticsearch seconds metric') }
let(:elasticsearch_requests_total) { double('elasticsearch calls total metric') }
+ let(:load_balancing_metric) { double('load balancing metric') }
before do
+ allow(Gitlab::Metrics).to receive(:histogram).and_call_original
+ allow(Gitlab::Metrics).to receive(:counter).and_call_original
+
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_queue_duration_seconds, anything, anything, anything).and_return(queue_duration_seconds)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_completion_seconds, anything, anything, anything).and_return(completion_seconds_metric)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
@@ -28,6 +32,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_redis_requests_total, anything).and_return(redis_requests_total)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_elasticsearch_requests_total, anything).and_return(elasticsearch_requests_total)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric)
diff --git a/spec/support/shared_contexts/policies/project_policy_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
index de1b46c65ad..d7e4864cb08 100644
--- a/spec/support/shared_contexts/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
@@ -49,6 +49,7 @@ RSpec.shared_context 'ProjectPolicy context' do
resolve_note update_build update_commit_status update_container_image
update_deployment update_environment update_merge_request
update_metrics_dashboard_annotation update_pipeline update_release destroy_release
+ read_resource_group update_resource_group
]
end
diff --git a/spec/support/shared_examples/ci/stuck_builds_shared_examples.rb b/spec/support/shared_examples/ci/stuck_builds_shared_examples.rb
new file mode 100644
index 00000000000..4fcea18393c
--- /dev/null
+++ b/spec/support/shared_examples/ci/stuck_builds_shared_examples.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'job is dropped with failure reason' do |failure_reason|
+ it 'changes status' do
+ service.execute
+ job.reload
+
+ expect(job).to be_failed
+ expect(job.failure_reason).to eq(failure_reason)
+ end
+
+ context 'when job has data integrity problem' do
+ it 'drops the job and logs the reason' do
+ job.update_columns(yaml_variables: '[{"key" => "value"}]')
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: job.id))
+ .once
+ .and_call_original
+
+ service.execute
+ job.reload
+
+ expect(job).to be_failed
+ expect(job.failure_reason).to eq('data_integrity_failure')
+ end
+ end
+end
+
+RSpec.shared_examples 'job is unchanged' do
+ it 'does not change status' do
+ expect { service.execute }.not_to change(job, :status)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index e8f7e62d0d7..30710e43357 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -299,7 +299,7 @@ RSpec.shared_examples 'wiki controller actions' do
expect(response.headers['Content-Disposition']).to match(/^inline/)
expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq('true')
expect(response.cache_control[:public]).to be(false)
- expect(response.headers['Cache-Control']).to eq('no-store')
+ expect(response.headers['Cache-Control']).to eq('private, no-store')
end
end
end
diff --git a/spec/support/shared_examples/features/container_registry_shared_examples.rb b/spec/support/shared_examples/features/container_registry_shared_examples.rb
new file mode 100644
index 00000000000..06b2b8c621c
--- /dev/null
+++ b/spec/support/shared_examples/features/container_registry_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'handling feature network errors with the container registry' do
+ it 'displays the error message' do
+ visit_container_registry
+
+ expect(page).to have_content 'We are having trouble connecting to the Container Registry'
+ end
+end
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index 318ba67b9e9..6c06cbf9082 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -3,9 +3,9 @@
RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name|
let(:form_selector) { '.js-main-target-form' }
let(:dropdown_selector) { "#{form_selector} .comment-type-dropdown" }
- let(:toggle_selector) { "#{dropdown_selector} .dropdown-toggle" }
+ let(:toggle_selector) { "#{dropdown_selector} .gl-dropdown-toggle" }
let(:menu_selector) { "#{dropdown_selector} .dropdown-menu" }
- let(:submit_selector) { "#{form_selector} .js-comment-submit-button" }
+ let(:submit_selector) { "#{form_selector} .js-comment-submit-button > button:first-child" }
let(:close_selector) { "#{form_selector} .btn-comment-and-close" }
let(:comments_selector) { '.timeline > .note.timeline-entry:not(.being-posted)' }
let(:comment) { 'My comment' }
@@ -43,13 +43,11 @@ RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name
expect(items.first).to have_content 'Comment'
expect(items.first).to have_content "Add a general comment to this #{resource_name}."
- expect(items.first).to have_selector '[data-testid="check-icon"]'
- expect(items.first['class']).to match 'droplab-item-selected'
+ expect(items.first).to have_selector '[data-testid="dropdown-item-checkbox"]'
expect(items.last).to have_content 'Start thread'
expect(items.last).to have_content "Discuss a specific suggestion or question#{' that needs to be resolved' if resource_name == 'merge request'}."
- expect(items.last).not_to have_selector '[data-testid="check-icon"]'
- expect(items.last['class']).not_to match 'droplab-item-selected'
+ expect(items.last).not_to have_selector '[data-testid="dropdown-item-checkbox"]'
end
it 'closes the menu when clicking the toggle or body' do
@@ -75,14 +73,14 @@ RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name
expect(find(dropdown_selector)).to have_content 'Comment'
find(toggle_selector).click
- execute_script("document.querySelector('#{menu_selector} .divider').click()")
+ execute_script("document.querySelector('#{menu_selector} .dropdown-divider').click()")
else
execute_script("document.querySelector('#{menu_selector}').click()")
expect(page).to have_selector menu_selector
expect(find(dropdown_selector)).to have_content 'Comment'
- execute_script("document.querySelector('#{menu_selector} .divider').click()")
+ execute_script("document.querySelector('#{menu_selector} .dropdown-divider').click()")
expect(page).to have_selector menu_selector
end
@@ -97,7 +95,7 @@ RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name
end
it 'updates the submit button text and closes the dropdown' do
- expect(find(submit_selector).value).to eq 'Start thread'
+ expect(find(submit_selector).text).to eq 'Start thread'
expect(page).not_to have_selector menu_selector
end
@@ -137,12 +135,10 @@ RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name
items = all("#{menu_selector} li")
expect(items.first).to have_content 'Comment'
- expect(items.first).not_to have_selector '[data-testid="check-icon"]'
- expect(items.first['class']).not_to match 'droplab-item-selected'
+ expect(items.first).not_to have_selector '[data-testid="dropdown-item-checkbox"]'
expect(items.last).to have_content 'Start thread'
- expect(items.last).to have_selector '[data-testid="check-icon"]'
- expect(items.last['class']).to match 'droplab-item-selected'
+ expect(items.last).to have_selector '[data-testid="dropdown-item-checkbox"]'
end
describe 'when selecting "Comment"' do
@@ -153,7 +149,7 @@ RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name
it 'updates the submit button text and closes the dropdown' do
button = find(submit_selector)
- expect(button.value).to eq 'Comment'
+ expect(button.text).to eq 'Comment'
expect(page).not_to have_selector menu_selector
end
@@ -166,12 +162,10 @@ RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name
aggregate_failures do
expect(items.first).to have_content 'Comment'
- expect(items.first).to have_selector '[data-testid="check-icon"]'
- expect(items.first['class']).to match 'droplab-item-selected'
+ expect(items.first).to have_selector '[data-testid="dropdown-item-checkbox"]'
expect(items.last).to have_content 'Start thread'
- expect(items.last).not_to have_selector '[data-testid="check-icon"]'
- expect(items.last['class']).not_to match 'droplab-item-selected'
+ expect(items.last).not_to have_selector '[data-testid="dropdown-item-checkbox"]'
end
end
end
diff --git a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
index 7adf303bde4..85434ba7afd 100644
--- a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'it uploads and commits a new text file' do
+RSpec.shared_examples 'it uploads and commits a new text file' do |drop: false|
it 'uploads and commits a new text file', :js do
find('.add-to-tree').click
@@ -10,7 +10,11 @@ RSpec.shared_examples 'it uploads and commits a new text file' do
wait_for_requests
end
- attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+ if drop
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ else
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+ end
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -32,7 +36,7 @@ RSpec.shared_examples 'it uploads and commits a new text file' do
end
end
-RSpec.shared_examples 'it uploads and commits a new image file' do
+RSpec.shared_examples 'it uploads and commits a new image file' do |drop: false|
it 'uploads and commits a new image file', :js do
find('.add-to-tree').click
@@ -42,7 +46,11 @@ RSpec.shared_examples 'it uploads and commits a new image file' do
wait_for_requests
end
- attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg'), make_visible: true)
+ if drop
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg'))
+ else
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg'), make_visible: true)
+ end
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -58,7 +66,7 @@ RSpec.shared_examples 'it uploads and commits a new image file' do
end
end
-RSpec.shared_examples 'it uploads and commits a new pdf file' do
+RSpec.shared_examples 'it uploads and commits a new pdf file' do |drop: false|
it 'uploads and commits a new pdf file', :js do
find('.add-to-tree').click
@@ -68,7 +76,11 @@ RSpec.shared_examples 'it uploads and commits a new pdf file' do
wait_for_requests
end
- attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'git-cheat-sheet.pdf'), make_visible: true)
+ if drop
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'git-cheat-sheet.pdf'))
+ else
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'git-cheat-sheet.pdf'), make_visible: true)
+ end
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -84,7 +96,7 @@ RSpec.shared_examples 'it uploads and commits a new pdf file' do
end
end
-RSpec.shared_examples 'it uploads and commits a new file to a forked project' do
+RSpec.shared_examples 'it uploads and commits a new file to a forked project' do |drop: false|
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
@@ -100,7 +112,12 @@ RSpec.shared_examples 'it uploads and commits a new file to a forked project' do
find('.add-to-tree').click
click_link('Upload file')
- attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+
+ if drop
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ else
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+ end
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -123,7 +140,7 @@ RSpec.shared_examples 'it uploads and commits a new file to a forked project' do
end
end
-RSpec.shared_examples 'it uploads a file to a sub-directory' do
+RSpec.shared_examples 'it uploads a file to a sub-directory' do |drop: false|
it 'uploads a file to a sub-directory', :js do
click_link 'files'
@@ -133,7 +150,12 @@ RSpec.shared_examples 'it uploads a file to a sub-directory' do
find('.add-to-tree').click
click_link('Upload file')
- attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+
+ if drop
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ else
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+ end
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -150,11 +172,15 @@ RSpec.shared_examples 'it uploads a file to a sub-directory' do
end
end
-RSpec.shared_examples 'uploads and commits a new text file via "upload file" button' do
+RSpec.shared_examples 'uploads and commits a new text file via "upload file" button' do |drop: false|
it 'uploads and commits a new text file via "upload file" button', :js do
find('[data-testid="upload-file-button"]').click
- attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+ if drop
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ else
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+ end
page.within('#details-modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
diff --git a/spec/support/shared_examples/graphql/connection_shared_examples.rb b/spec/support/shared_examples/graphql/connection_shared_examples.rb
index 4cba5b5a69d..895bab1f51a 100644
--- a/spec/support/shared_examples/graphql/connection_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/connection_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'a connection with collection methods' do
- %i[to_a size include? empty?].each do |method_name|
+ %i[to_a size map include? empty?].each do |method_name|
it "responds to #{method_name}" do
expect(connection).to respond_to(method_name)
end
diff --git a/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb b/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
index eaeb5faee3b..37a805902a9 100644
--- a/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
@@ -9,7 +9,7 @@
# data_path: the keys necessary to dig into the return GraphQL data to get the
# returned results
# first_param: number of items expected (like a page size)
-# expected_results: array of comparison data of all items sorted correctly
+# all_records: array of comparison data of all items sorted correctly
# pagination_query: method that specifies the GraphQL query
# pagination_results_data: method that extracts the sorted data used to compare against
# the expected results
@@ -38,9 +38,9 @@
# let(:ordered_issues) { issues.sort_by(&:weight) }
#
# it_behaves_like 'sorted paginated query' do
-# let(:sort_param) { :WEIGHT_ASC }
-# let(:first_param) { 2 }
-# let(:expected_results) { ordered_issues.map(&:iid) }
+# let(:sort_param) { :WEIGHT_ASC }
+# let(:first_param) { 2 }
+# let(:all_records) { ordered_issues.map(&:iid) }
# end
# end
#
@@ -51,7 +51,7 @@ RSpec.shared_examples 'sorted paginated query' do |conditions = {}|
let(:node_path) { ['id'] }
it_behaves_like 'requires variables' do
- let(:required_variables) { [:sort_param, :first_param, :expected_results, :data_path, :current_user] }
+ let(:required_variables) { [:sort_param, :first_param, :all_records, :data_path, :current_user] }
end
describe do
@@ -101,13 +101,13 @@ RSpec.shared_examples 'sorted paginated query' do |conditions = {}|
context 'when sorting' do
it 'sorts correctly' do
- expect(results).to eq expected_results
+ expect(results).to eq all_records
end
context 'when paginating' do
let(:params) { sort_argument.merge(first: first_param) }
- let(:first_page) { expected_results.first(first_param) }
- let(:rest) { expected_results.drop(first_param) }
+ let(:first_page) { all_records.first(first_param) }
+ let(:rest) { all_records.drop(first_param) }
it 'paginates correctly' do
expect(results).to eq first_page
@@ -130,7 +130,7 @@ RSpec.shared_examples 'sorted paginated query' do |conditions = {}|
it 'fetches last elements without error' do
post_graphql(pagination_query(params), current_user: current_user)
- expect(results.first).to eq(expected_results.last)
+ expect(results.first).to eq(all_records.last)
end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index 3760325675a..8b4ecd7d5ae 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -35,8 +35,8 @@ RSpec.shared_examples 'common trace features' do
stub_feature_flags(gitlab_ci_archived_trace_consistent_reads: trace.job.project)
end
- it 'calls ::Gitlab::Database::LoadBalancing::Sticking.unstick_or_continue_sticking' do
- expect(::Gitlab::Database::LoadBalancing::Sticking).to receive(:unstick_or_continue_sticking)
+ it 'calls ::ApplicationRecord.sticking.unstick_or_continue_sticking' do
+ expect(::ApplicationRecord.sticking).to receive(:unstick_or_continue_sticking)
.with(described_class::LOAD_BALANCING_STICKING_NAMESPACE, trace.job.id)
.and_call_original
@@ -49,8 +49,8 @@ RSpec.shared_examples 'common trace features' do
stub_feature_flags(gitlab_ci_archived_trace_consistent_reads: false)
end
- it 'does not call ::Gitlab::Database::LoadBalancing::Sticking.unstick_or_continue_sticking' do
- expect(::Gitlab::Database::LoadBalancing::Sticking).not_to receive(:unstick_or_continue_sticking)
+ it 'does not call ::ApplicationRecord.sticking.unstick_or_continue_sticking' do
+ expect(::ApplicationRecord.sticking).not_to receive(:unstick_or_continue_sticking)
trace.read { |stream| stream }
end
@@ -305,8 +305,8 @@ RSpec.shared_examples 'common trace features' do
stub_feature_flags(gitlab_ci_archived_trace_consistent_reads: trace.job.project)
end
- it 'calls ::Gitlab::Database::LoadBalancing::Sticking.stick' do
- expect(::Gitlab::Database::LoadBalancing::Sticking).to receive(:stick)
+ it 'calls ::ApplicationRecord.sticking.stick' do
+ expect(::ApplicationRecord.sticking).to receive(:stick)
.with(described_class::LOAD_BALANCING_STICKING_NAMESPACE, trace.job.id)
.and_call_original
@@ -319,8 +319,8 @@ RSpec.shared_examples 'common trace features' do
stub_feature_flags(gitlab_ci_archived_trace_consistent_reads: false)
end
- it 'does not call ::Gitlab::Database::LoadBalancing::Sticking.stick' do
- expect(::Gitlab::Database::LoadBalancing::Sticking).not_to receive(:stick)
+ it 'does not call ::ApplicationRecord.sticking.stick' do
+ expect(::ApplicationRecord.sticking).not_to receive(:stick)
subject
end
@@ -497,7 +497,7 @@ RSpec.shared_examples 'trace with disabled live trace feature' do
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(File.exist?(src_path)).to be_falsy
expect(src_checksum)
- .to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
+ .to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
@@ -523,7 +523,7 @@ RSpec.shared_examples 'trace with disabled live trace feature' do
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(build.old_trace).to be_nil
expect(src_checksum)
- .to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
+ .to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
@@ -861,7 +861,7 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum)
- .to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
+ .to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb
index 6e12b5a0e85..bd8bdd70ce5 100644
--- a/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb
@@ -33,3 +33,38 @@ RSpec.shared_examples_for 'value stream analytics event' do
end
end
end
+
+RSpec.shared_examples_for 'LEFT JOIN-able value stream analytics event' do
+ let(:params) { {} }
+ let(:instance) { described_class.new(params) }
+ let(:record_with_data) { nil }
+ let(:record_without_data) { nil }
+ let(:scope) { instance.object_type.all }
+
+ let(:records) do
+ scope_with_left_join = instance.include_in(scope)
+ scope_with_left_join.select(scope.model.arel_table[:id], instance.timestamp_projection.as('timestamp_column_data')).to_a
+ end
+
+ it 'can use the event as LEFT JOIN' do
+ expected_record_count = record_without_data.nil? ? 1 : 2
+
+ expect(records.count).to eq(expected_record_count)
+ end
+
+ context 'when looking at the record with data' do
+ subject(:record) { records.to_a.find { |r| r.id == record_with_data.id } }
+
+ it 'contains the timestamp expression' do
+ expect(record.timestamp_column_data).not_to eq(nil)
+ end
+ end
+
+ context 'when looking at the record without data' do
+ subject(:record) { records.to_a.find { |r| r.id == record_without_data.id } }
+
+ it 'returns nil for the timestamp expression' do
+ expect(record.timestamp_column_data).to eq(nil) if record_without_data
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb
new file mode 100644
index 00000000000..5ce698c4701
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+RSpec.shared_examples 'a permitted attribute' do |relation_sym, permitted_attributes|
+ let(:prohibited_attributes) { %i[remote_url my_attributes my_ids token my_id test] }
+
+ let(:import_export_config) { Gitlab::ImportExport::Config.new.to_h }
+ let(:project_relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
+
+ let(:relation_hash) { (permitted_attributes + prohibited_attributes).map(&:to_s).zip([]).to_h }
+ let(:relation_name) { project_relation_factory.overrides[relation_sym]&.to_sym || relation_sym }
+ let(:relation_class) { project_relation_factory.relation_class(relation_name) }
+ let(:excluded_keys) { import_export_config.dig(:excluded_keys, relation_sym) || [] }
+
+ let(:cleaned_hash) do
+ Gitlab::ImportExport::AttributeCleaner.new(
+ relation_hash: relation_hash,
+ relation_class: relation_class,
+ excluded_keys: excluded_keys
+ ).clean
+ end
+
+ let(:permitted_hash) { subject.permit(relation_sym, relation_hash) }
+
+ if described_class.new.permitted_attributes_defined?(relation_sym)
+ it 'contains only attributes that are defined as permitted in the import/export config' do
+ expect(permitted_hash.keys).to contain_exactly(*permitted_attributes.map(&:to_s))
+ end
+
+ it 'does not contain attributes that would be cleaned with AttributeCleaner' do
+ expect(cleaned_hash.keys).to include(*permitted_hash.keys)
+ end
+
+ it 'does not contain prohibited attributes that are not related to given relation' do
+ expect(permitted_hash.keys).not_to include(*prohibited_attributes.map(&:to_s))
+ end
+ else
+ it 'is disabled' do
+ expect(subject).not_to be_permitted_attributes_defined(relation_sym)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index c6d6ff6bc1d..c06083ba952 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -4,14 +4,20 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
let(:db_config_name) { ::Gitlab::Database.db_config_names.first }
let(:expected_payload_defaults) do
+ result = {}
metrics =
::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_counter_keys +
- ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_duration_keys +
::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_keys
- metrics.each_with_object({}) do |key, result|
+ metrics.each do |key|
result[key] = 0
end
+
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.load_balancing_metric_duration_keys.each do |key|
+ result[key] = 0.0
+ end
+
+ result
end
def transform_hash(hash, another_hash)
@@ -36,8 +42,8 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
"db_primary_#{db_config_name}_cached_count": record_cached_query ? 1 : 0,
db_primary_count: record_query ? 1 : 0,
"db_primary_#{db_config_name}_count": record_query ? 1 : 0,
- db_primary_duration_s: record_query ? 0.002 : 0,
- "db_primary_#{db_config_name}_duration_s": record_query ? 0.002 : 0,
+ db_primary_duration_s: record_query ? 0.002 : 0.0,
+ "db_primary_#{db_config_name}_duration_s": record_query ? 0.002 : 0.0,
db_primary_wal_count: record_wal_query ? 1 : 0,
"db_primary_#{db_config_name}_wal_count": record_wal_query ? 1 : 0,
db_primary_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
@@ -52,19 +58,29 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
"db_replica_#{db_config_name}_cached_count": record_cached_query ? 1 : 0,
db_replica_count: record_query ? 1 : 0,
"db_replica_#{db_config_name}_count": record_query ? 1 : 0,
- db_replica_duration_s: record_query ? 0.002 : 0,
- "db_replica_#{db_config_name}_duration_s": record_query ? 0.002 : 0,
+ db_replica_duration_s: record_query ? 0.002 : 0.0,
+ "db_replica_#{db_config_name}_duration_s": record_query ? 0.002 : 0.0,
db_replica_wal_count: record_wal_query ? 1 : 0,
"db_replica_#{db_config_name}_wal_count": record_wal_query ? 1 : 0,
db_replica_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
"db_replica_#{db_config_name}_wal_cached_count": record_wal_query && record_cached_query ? 1 : 0
})
else
- {
+ transform_hash(expected_payload_defaults, {
db_count: record_query ? 1 : 0,
db_write_count: record_write_query ? 1 : 0,
- db_cached_count: record_cached_query ? 1 : 0
- }
+ db_cached_count: record_cached_query ? 1 : 0,
+ db_primary_cached_count: 0,
+ "db_primary_#{db_config_name}_cached_count": 0,
+ db_primary_count: 0,
+ "db_primary_#{db_config_name}_count": 0,
+ db_primary_duration_s: 0.0,
+ "db_primary_#{db_config_name}_duration_s": 0.0,
+ db_primary_wal_count: 0,
+ "db_primary_#{db_config_name}_wal_count": 0,
+ db_primary_wal_cached_count: 0,
+ "db_primary_#{db_config_name}_wal_cached_count": 0
+ })
end
expect(described_class.db_counter_payload).to eq(expected)
@@ -89,7 +105,7 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
end
RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do |db_role|
- let(:db_config_name) { ::Gitlab::Database.db_config_name(ApplicationRecord.connection) }
+ let(:db_config_name) { ::Gitlab::Database.db_config_name(ApplicationRecord.retrieve_connection) }
it 'increments only db counters' do
if record_query
diff --git a/spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb b/spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb
new file mode 100644
index 00000000000..f928fb1eb43
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'StageEventModel' do
+ describe '.upsert_data' do
+ let(:time) { Time.parse(Time.current.to_s(:db)) } # truncating the timestamp so we can compare it with the timestamp loaded from the DB
+ let(:input_data) do
+ [
+ {
+ stage_event_hash_id: 1,
+ issuable_id: 2,
+ group_id: 3,
+ project_id: 4,
+ author_id: 5,
+ milestone_id: 6,
+ start_event_timestamp: time,
+ end_event_timestamp: time
+ },
+ {
+ stage_event_hash_id: 7,
+ issuable_id: 8,
+ group_id: 10,
+ project_id: 11,
+ author_id: 12,
+ milestone_id: 13,
+ start_event_timestamp: time,
+ end_event_timestamp: time
+ }
+ ]
+ end
+
+ let(:column_order) do
+ [
+ :stage_event_hash_id,
+ described_class.issuable_id_column,
+ :group_id,
+ :project_id,
+ :milestone_id,
+ :author_id,
+ :start_event_timestamp,
+ :end_event_timestamp
+ ]
+ end
+
+ subject(:upsert_data) { described_class.upsert_data(input_data) }
+
+ it 'inserts the data' do
+ upsert_data
+
+ expect(described_class.count).to eq(input_data.count)
+ end
+
+ it 'does not produce duplicate rows' do
+ 2.times { upsert_data }
+
+ expect(described_class.count).to eq(input_data.count)
+ end
+
+ it 'inserts the data correctly' do
+ upsert_data
+
+ output_data = described_class.all.map do |record|
+ column_order.map { |column| record[column] }
+ end.sort
+
+ expect(input_data.map(&:values).sort).to eq(output_data)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/ttl_expirable_shared_examples.rb b/spec/support/shared_examples/models/concerns/ttl_expirable_shared_examples.rb
new file mode 100644
index 00000000000..a4e0d6c871e
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/ttl_expirable_shared_examples.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'ttl_expirable' do
+ let_it_be(:class_symbol) { described_class.model_name.param_key.to_sym }
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:status) }
+ end
+
+ describe '.updated_before' do
+ # rubocop:disable Rails/SaveBang
+ let_it_be_with_reload(:item1) { create(class_symbol) }
+ let_it_be(:item2) { create(class_symbol) }
+ # rubocop:enable Rails/SaveBang
+
+ before do
+ item1.update_column(:updated_at, 1.month.ago)
+ end
+
+ it 'returns items with created at older than the supplied number of days' do
+ expect(described_class.updated_before(10)).to contain_exactly(item1)
+ end
+ end
+
+ describe '.active' do
+ # rubocop:disable Rails/SaveBang
+ let_it_be(:item1) { create(class_symbol) }
+ let_it_be(:item2) { create(class_symbol, :expired) }
+ let_it_be(:item3) { create(class_symbol, status: :error) }
+ # rubocop:enable Rails/SaveBang
+
+ it 'returns only active items' do
+ expect(described_class.active).to contain_exactly(item1)
+ end
+ end
+
+ describe '.lock_next_by' do
+ let_it_be(:item1) { create(class_symbol, created_at: 1.month.ago, updated_at: 1.day.ago) }
+ let_it_be(:item2) { create(class_symbol, created_at: 1.year.ago, updated_at: 1.year.ago) }
+ let_it_be(:item3) { create(class_symbol, created_at: 2.years.ago, updated_at: 1.month.ago) }
+
+ it 'returns the first item sorted by the argument' do
+ expect(described_class.lock_next_by(:updated_at)).to contain_exactly(item2)
+ expect(described_class.lock_next_by(:created_at)).to contain_exactly(item3)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
index 274fbae3dfd..750d3dd11e3 100644
--- a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
@@ -193,42 +193,6 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
end
end
- describe '#needs_update?' do
- subject { distribution.needs_update? }
-
- context 'with new distribution' do
- let(:distribution) { create(factory, container: distribution_with_suite.container) }
-
- it { is_expected.to be_truthy }
- end
-
- context 'with file' do
- context 'without valid_time_duration_seconds' do
- let(:distribution) { create(factory, :with_file, container: distribution_with_suite.container) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'with valid_time_duration_seconds' do
- let(:distribution) { create(factory, :with_file, container: distribution_with_suite.container, valid_time_duration_seconds: 2.days.to_i) }
-
- context 'when not yet expired' do
- it { is_expected.to be_falsey }
- end
-
- context 'when expired' do
- it do
- distribution
-
- travel_to(4.days.from_now) do
- is_expected.to be_truthy
- end
- end
- end
- end
- end
- end
-
if container == :project
describe 'project distribution specifics' do
describe 'relationships' do
diff --git a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
index b86c0529338..e45be21f152 100644
--- a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
@@ -85,7 +85,18 @@ RSpec.shared_examples 'Composer package creation' do |user_type, status, add_mem
expect(response).to have_gitlab_http_status(status)
end
+
it_behaves_like 'a package tracking event', described_class.name, 'push_package'
+
+ context 'when package creation fails' do
+ before do
+ allow_next_instance_of(::Packages::Composer::CreatePackageService) do |create_package_service|
+ allow(create_package_service).to receive(:execute).and_raise(StandardError)
+ end
+ end
+
+ it_behaves_like 'not a package tracking event'
+ end
end
end
diff --git a/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
index e776cf13217..e1e75be2494 100644
--- a/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
@@ -79,3 +79,40 @@ RSpec.shared_examples 'returns repositories for allowed users' do |user_type, sc
end
end
end
+
+RSpec.shared_examples 'handling network errors with the container registry' do
+ before do
+ stub_container_registry_network_error(client_method: :repository_tags)
+ end
+
+ it 'returns a connection error' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ expect(json_response['message']).to include('We are having trouble connecting to the Container Registry')
+ end
+end
+
+RSpec.shared_examples 'handling graphql network errors with the container registry' do
+ before do
+ stub_container_registry_network_error(client_method: :repository_tags)
+ end
+
+ it 'returns a connection error' do
+ subject
+
+ expect_graphql_errors_to_include('We are having trouble connecting to the Container Registry')
+ end
+end
+
+RSpec.shared_examples 'not hitting graphql network errors with the container registry' do
+ before do
+ stub_container_registry_network_error(client_method: :repository_tags)
+ end
+
+ it 'does not return any error' do
+ subject
+
+ expect_graphql_errors_to_be_empty
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
index 274516cd87b..01ed6c26576 100644
--- a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
@@ -62,9 +62,10 @@ RSpec.shared_examples 'group and project boards query' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { }
- let(:first_param) { 2 }
- let(:expected_results) do
+ let(:sort_param) { }
+ let(:first_param) { 2 }
+
+ let(:all_records) do
if board_parent.multiple_issue_boards_available?
boards.map { |board| global_id_of(board) }
else
diff --git a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
index af4c9286e7c..367c6d4fa3a 100644
--- a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
@@ -17,9 +17,11 @@ RSpec.shared_examples 'group and project packages query' do
let(:package_names) { graphql_data_at(resource_type, :packages, :nodes, :name) }
let(:target_shas) { graphql_data_at(resource_type, :packages, :nodes, :metadata, :target_sha) }
let(:packages) { graphql_data_at(resource_type, :packages, :nodes) }
+ let(:packages_count) { graphql_data_at(resource_type, :packages, :count) }
let(:fields) do
<<~QUERY
+ count
nodes {
#{all_graphql_fields_for('packages'.classify, excluded: ['project'])}
metadata { #{query_graphql_fragment('ComposerMetadata')} }
@@ -55,6 +57,10 @@ RSpec.shared_examples 'group and project packages query' do
it 'deals with metadata' do
expect(target_shas).to contain_exactly(composer_metadatum.target_sha)
end
+
+ it 'returns the count of the packages' do
+ expect(packages_count).to eq(4)
+ end
end
context 'when the user does not have access to the resource' do
@@ -95,7 +101,7 @@ RSpec.shared_examples 'group and project packages query' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { order }
let(:first_param) { 4 }
- let(:expected_results) { ascending_packages }
+ let(:all_records) { ascending_packages }
end
end
end
@@ -105,7 +111,7 @@ RSpec.shared_examples 'group and project packages query' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { order }
let(:first_param) { 4 }
- let(:expected_results) { ascending_packages.reverse }
+ let(:all_records) { ascending_packages.reverse }
end
end
end
diff --git a/spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb b/spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb
new file mode 100644
index 00000000000..9f67bd69560
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'issuable anonymous search' do
+ context 'with anonymous user' do
+ context 'with disable_anonymous_search disabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: false)
+ end
+
+ it 'returns issuables matching given search string for title' do
+ get api(url), params: { scope: 'all', search: issuable.title }
+
+ expect_paginated_array_response(result)
+ end
+
+ it 'returns issuables matching given search string for description' do
+ get api(url), params: { scope: 'all', search: issuable.description }
+
+ expect_paginated_array_response(result)
+ end
+ end
+
+ context 'with disable_anonymous_search enabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: true)
+ end
+
+ it "returns 422 error" do
+ get api(url), params: { scope: 'all', search: issuable.title }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq('User must be authenticated to use search')
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb b/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
index cb06c9fa596..3e9c4a5eb68 100644
--- a/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
@@ -15,6 +15,22 @@ RSpec.shared_examples 'storing arguments in the application context' do
end
end
+# For the API we need a slightly different approach as `client_id` is
+# calculated in API logging code.
+RSpec.shared_examples 'storing arguments in the application context for the API' do
+ it 'places the expected params in the application context' do
+ expect(::API::API::LOG_FORMATTER).to receive(:call) do
+ expect(Gitlab::ApplicationContext.current).to include(log_hash(expected_params))
+ end
+
+ subject
+ end
+
+ def log_hash(hash)
+ hash.transform_keys! { |key| "meta.#{key}" }
+ end
+end
+
RSpec.shared_examples 'not executing any extra queries for the application context' do |expected_extra_queries = 0|
it 'does not execute more queries than without adding anything to the application context' do
# Call the subject once to memoize all factories being used for the spec, so they won't
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index 2a19ff6f590..b294467d482 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
#
# Requires let variables:
-# * throttle_setting_prefix: "throttle_authenticated_api", "throttle_authenticated_web", "throttle_protected_paths", "throttle_authenticated_packages_api", "throttle_authenticated_git_lfs", "throttle_authenticated_files_api"
+# * throttle_setting_prefix: "throttle_authenticated_api", "throttle_authenticated_web", "throttle_protected_paths", "throttle_authenticated_packages_api", "throttle_authenticated_git_lfs", "throttle_authenticated_files_api", "throttle_authenticated_deprecated_api"
# * request_method
# * request_args
# * other_user_request_args
@@ -16,7 +16,8 @@ RSpec.shared_examples 'rate-limited token-authenticated requests' do
"throttle_authenticated_web" => "throttle_authenticated_web",
"throttle_authenticated_packages_api" => "throttle_authenticated_packages_api",
"throttle_authenticated_git_lfs" => "throttle_authenticated_git_lfs",
- "throttle_authenticated_files_api" => "throttle_authenticated_files_api"
+ "throttle_authenticated_files_api" => "throttle_authenticated_files_api",
+ "throttle_authenticated_deprecated_api" => "throttle_authenticated_deprecated_api"
}
end
diff --git a/spec/support/shared_examples/services/dependency_proxy_settings_shared_examples.rb b/spec/support/shared_examples/services/dependency_proxy_settings_shared_examples.rb
new file mode 100644
index 00000000000..2c1dc2da560
--- /dev/null
+++ b/spec/support/shared_examples/services/dependency_proxy_settings_shared_examples.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'updating the dependency proxy group settings attributes' do |from: {}, to: {}|
+ it 'updates the dependency proxy settings' do
+ expect { subject }
+ .to change { group_settings.reload.enabled }.from(from[:enabled]).to(to[:enabled])
+ end
+end
diff --git a/spec/support/shared_examples/workers/concerns/dependency_proxy/cleanup_worker_shared_examples.rb b/spec/support/shared_examples/workers/concerns/dependency_proxy/cleanup_worker_shared_examples.rb
new file mode 100644
index 00000000000..c9014ad549c
--- /dev/null
+++ b/spec/support/shared_examples/workers/concerns/dependency_proxy/cleanup_worker_shared_examples.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'dependency_proxy_cleanup_worker' do
+ let_it_be(:group) { create(:group) }
+
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ subject(:perform_work) { worker.perform_work }
+
+ context 'with no work to do' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with work to do' do
+ let_it_be(:artifact1) { create(factory_type, :expired, group: group) }
+ let_it_be(:artifact2) { create(factory_type, :expired, group: group, updated_at: 6.months.ago, created_at: 2.years.ago) }
+ let_it_be_with_reload(:artifact3) { create(factory_type, :expired, group: group, updated_at: 1.year.ago, created_at: 1.year.ago) }
+ let_it_be(:artifact4) { create(factory_type, group: group, updated_at: 2.years.ago, created_at: 2.years.ago) }
+
+ it 'deletes the oldest expired artifact based on updated_at', :aggregate_failures do
+ expect(worker).to receive(:log_extra_metadata_on_done).with("#{factory_type}_id".to_sym, artifact3.id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:group_id, group.id)
+
+ expect { perform_work }.to change { artifact1.class.count }.by(-1)
+ end
+ end
+ end
+
+ describe '#max_running_jobs' do
+ let(:capacity) { 5 }
+
+ subject { worker.max_running_jobs }
+
+ before do
+ stub_application_setting(dependency_proxy_ttl_group_policy_worker_capacity: capacity)
+ end
+
+ it { is_expected.to eq(capacity) }
+ end
+
+ describe '#remaining_work_count' do
+ let_it_be(:expired_artifacts) do
+ (1..3).map do |_|
+ create(factory_type, :expired, group: group)
+ end
+ end
+
+ subject { worker.remaining_work_count }
+
+ it { is_expected.to eq(3) }
+ end
+end
diff --git a/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
index 1b09b5fe613..d6e96ef37d6 100644
--- a/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
+++ b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
@@ -1,11 +1,14 @@
# frozen_string_literal: true
-# Expects `subject` to be a job/worker instance
+# Expects `subject` to be a job/worker instance and
+# `job_args` to be arguments to #perform if it takes arguments
RSpec.shared_examples 'reenqueuer' do
before do
allow(subject).to receive(:sleep) # faster tests
end
+ let(:subject_perform) { defined?(job_args) ? subject.perform(job_args) : subject.perform }
+
it 'implements lease_timeout' do
expect(subject.lease_timeout).to be_a(ActiveSupport::Duration)
end
@@ -18,12 +21,13 @@ RSpec.shared_examples 'reenqueuer' do
it 'tries to obtain a lease' do
expect_to_obtain_exclusive_lease(subject.lease_key)
- subject.perform
+ subject_perform
end
end
end
-# Expects `subject` to be a job/worker instance
+# Expects `subject` to be a job/worker instance and
+# `job_args` to be arguments to #perform if it takes arguments
RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_duration|
before do
# Allow Timecop freeze and travel without the block form
@@ -38,13 +42,15 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
Timecop.safe_mode = true
end
+ let(:subject_perform) { defined?(job_args) ? subject.perform(job_args) : subject.perform }
+
context 'when the work finishes in 0 seconds' do
let(:actual_duration) { 0 }
it 'sleeps exactly the minimum duration' do
expect(subject).to receive(:sleep).with(a_value_within(0.01).of(minimum_duration))
- subject.perform
+ subject_perform
end
end
@@ -54,7 +60,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'sleeps 90% of minimum duration' do
expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.9 * minimum_duration))
- subject.perform
+ subject_perform
end
end
@@ -64,7 +70,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'sleeps 10% of minimum duration' do
expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.1 * minimum_duration))
- subject.perform
+ subject_perform
end
end
@@ -74,7 +80,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'does not sleep' do
expect(subject).not_to receive(:sleep)
- subject.perform
+ subject_perform
end
end
@@ -84,7 +90,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'does not sleep' do
expect(subject).not_to receive(:sleep)
- subject.perform
+ subject_perform
end
end
@@ -94,7 +100,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'does not sleep' do
expect(subject).not_to receive(:sleep)
- subject.perform
+ subject_perform
end
end
diff --git a/spec/support_specs/database/multiple_databases_spec.rb b/spec/support_specs/database/multiple_databases_spec.rb
new file mode 100644
index 00000000000..6ad15fd6594
--- /dev/null
+++ b/spec/support_specs/database/multiple_databases_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Database::MultipleDatabases' do
+ describe '.with_reestablished_active_record_base' do
+ context 'when doing establish_connection' do
+ context 'on ActiveRecord::Base' do
+ it 'raises exception' do
+ expect { ActiveRecord::Base.establish_connection(:main) }.to raise_error /Cannot re-establish/
+ end
+
+ context 'when using with_reestablished_active_record_base' do
+ it 'does not raise exception' do
+ with_reestablished_active_record_base do
+ expect { ActiveRecord::Base.establish_connection(:main) }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context 'on Ci::CiDatabaseRecord' do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ it 'raises exception' do
+ expect { Ci::CiDatabaseRecord.establish_connection(:ci) }.to raise_error /Cannot re-establish/
+ end
+
+ context 'when using with_reestablished_active_record_base' do
+ it 'does not raise exception' do
+ with_reestablished_active_record_base do
+ expect { Ci::CiDatabaseRecord.establish_connection(:main) }.not_to raise_error
+ end
+ end
+ end
+ end
+ end
+
+ context 'when trying to access connection' do
+ context 'when reconnect is true' do
+ it 'does not raise exception' do
+ with_reestablished_active_record_base(reconnect: true) do
+ expect { ActiveRecord::Base.connection.execute("SELECT 1") }.not_to raise_error # rubocop:disable Database/MultipleDatabases
+ end
+ end
+ end
+
+ context 'when reconnect is false' do
+ it 'does raise exception' do
+ with_reestablished_active_record_base(reconnect: false) do
+ expect { ActiveRecord::Base.connection.execute("SELECT 1") }.to raise_error(ActiveRecord::ConnectionNotEstablished) # rubocop:disable Database/MultipleDatabases
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support_specs/database/prevent_cross_joins_spec.rb b/spec/support_specs/database/prevent_cross_joins_spec.rb
index e9a95fe77a5..0fbcd190c2c 100644
--- a/spec/support_specs/database/prevent_cross_joins_spec.rb
+++ b/spec/support_specs/database/prevent_cross_joins_spec.rb
@@ -22,6 +22,12 @@ RSpec.describe Database::PreventCrossJoins do
described_class::CrossJoinAcrossUnsupportedTablesError)
end
+ context 'when annotation is used' do
+ it 'does not raise exception' do
+ expect { main_and_ci_allowed_via_annotate }.not_to raise_error
+ end
+ end
+
context 'when allow_cross_joins_across_databases is used' do
it 'does not raise exception' do
expect { main_and_ci_query_allowlisted }.not_to raise_error
@@ -52,6 +58,12 @@ RSpec.describe Database::PreventCrossJoins do
end
end
+ def main_and_ci_allowed_via_annotate
+ main_and_ci_query do |relation|
+ relation.allow_cross_joins_across_databases(url: 'http://issue-url')
+ end
+ end
+
def main_only_query
Issue.joins(:project).last
end
@@ -61,6 +73,8 @@ RSpec.describe Database::PreventCrossJoins do
end
def main_and_ci_query
- Ci::Build.joins(:project).last
+ relation = Ci::Build.joins(:project)
+ relation = yield(relation) if block_given?
+ relation.last
end
end
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 91cd09fc6e6..ad4ada9a9f1 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -293,53 +293,24 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
end
describe '#migrate_with_instrumentation' do
- subject { run_rake_task('gitlab:db:migration_testing') }
+ describe '#up' do
+ subject { run_rake_task('gitlab:db:migration_testing:up') }
- let(:ctx) { double('ctx', migrations: all_migrations, schema_migration: double, get_all_versions: existing_versions) }
- let(:instrumentation) { instance_double(Gitlab::Database::Migrations::Instrumentation, observations: observations) }
- let(:existing_versions) { [1] }
- let(:all_migrations) { [double('migration1', version: 1, name: 'test'), pending_migration] }
- let(:pending_migration) { double('migration2', version: 2, name: 'test') }
- let(:filename) { Gitlab::Database::Migrations::Instrumentation::STATS_FILENAME }
- let(:result_dir) { Dir.mktmpdir }
- let(:observations) { %w[some data] }
+ it 'delegates to the migration runner' do
+ expect(::Gitlab::Database::Migrations::Runner).to receive_message_chain(:up, :run)
- before do
- allow(ActiveRecord::Base.connection).to receive(:migration_context).and_return(ctx)
- allow(Gitlab::Database::Migrations::Instrumentation).to receive(:new).and_return(instrumentation)
- allow(ActiveRecord::Migrator).to receive_message_chain('new.run').with(any_args).with(no_args)
-
- allow(instrumentation).to receive(:observe).and_yield
-
- stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', result_dir)
- end
-
- after do
- FileUtils.rm_rf(result_dir)
- end
-
- it 'creates result directory when one does not exist' do
- FileUtils.rm_rf(result_dir)
-
- expect { subject }.to change { Dir.exist?(result_dir) }.from(false).to(true)
- end
-
- it 'instruments the pending migration' do
- expect(instrumentation).to receive(:observe).with(version: 2, name: 'test').and_yield
-
- subject
+ subject
+ end
end
- it 'executes the pending migration' do
- expect(ActiveRecord::Migrator).to receive_message_chain('new.run').with(:up, ctx.migrations, ctx.schema_migration, pending_migration.version).with(no_args)
+ describe '#down' do
+ subject { run_rake_task('gitlab:db:migration_testing:down') }
- subject
- end
-
- it 'writes observations out to JSON file' do
- subject
+ it 'delegates to the migration runner' do
+ expect(::Gitlab::Database::Migrations::Runner).to receive_message_chain(:down, :run)
- expect(File.read(File.join(result_dir, filename))).to eq(observations.to_json)
+ subject
+ end
end
end
diff --git a/spec/tasks/gitlab/packages/composer_rake_spec.rb b/spec/tasks/gitlab/packages/composer_rake_spec.rb
deleted file mode 100644
index f4f43bf77d8..00000000000
--- a/spec/tasks/gitlab/packages/composer_rake_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'rake_helper'
-
-RSpec.describe 'gitlab:packages:build_composer_cache namespace rake task', :silence_stdout do
- let_it_be(:package_name) { 'sample-project' }
- let_it_be(:package_name2) { 'sample-project2' }
- let_it_be(:json) { { 'name' => package_name } }
- let_it_be(:json2) { { 'name' => package_name2 } }
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
- let_it_be(:project2) { create(:project, :custom_repo, files: { 'composer.json' => json2.to_json }, group: group) }
-
- let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
- let!(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
- let!(:package3) { create(:composer_package, :with_metadatum, project: project2, name: package_name2, version: '3.0.0', json: json2) }
-
- before :all do
- Rake.application.rake_require 'tasks/gitlab/packages/composer'
- end
-
- subject do
- run_rake_task("gitlab:packages:build_composer_cache")
- end
-
- it 'generates the cache files' do
- expect { subject }.to change { Packages::Composer::CacheFile.count }.by(2)
- end
-end
diff --git a/spec/tooling/danger/product_intelligence_spec.rb b/spec/tooling/danger/product_intelligence_spec.rb
index 4ab911b6590..5fd44ef5de0 100644
--- a/spec/tooling/danger/product_intelligence_spec.rb
+++ b/spec/tooling/danger/product_intelligence_spec.rb
@@ -59,6 +59,14 @@ RSpec.describe Tooling::Danger::ProductIntelligence do
it { is_expected.to be_empty }
end
+
+ context 'with growth experiment label' do
+ before do
+ allow(fake_helper).to receive(:mr_has_labels?).with('growth experiment').and_return(true)
+ end
+
+ it { is_expected.to be_empty }
+ end
end
describe '#matching_changed_files' do
@@ -74,7 +82,7 @@ RSpec.describe Tooling::Danger::ProductIntelligence do
context 'with snowplow files changed' do
context 'when vue file changed' do
- let(:changed_lines) { ['+data-track-event'] }
+ let(:changed_lines) { ['+data-track-action'] }
it { is_expected.to match_array(['components/welcome.vue']) }
end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index c7715eb43fc..5edd9e54cc5 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -93,6 +93,9 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/spec/foo' | [:backend]
'ee/spec/foo/bar' | [:backend]
+ 'spec/migrations/foo' | [:database]
+ 'ee/spec/migrations/foo' | [:database]
+
'spec/features/foo' | [:test]
'ee/spec/features/foo' | [:test]
'spec/support/shared_examples/features/foo' | [:test]
@@ -277,4 +280,17 @@ RSpec.describe Tooling::Danger::ProjectHelper do
is_expected.to eq('gitlab-foss')
end
end
+
+ describe '#file_lines' do
+ let(:filename) { 'spec/foo_spec.rb' }
+ let(:file_spy) { spy }
+
+ it 'returns the chomped file lines' do
+ expect(project_helper).to receive(:read_file).with(filename).and_return(file_spy)
+
+ project_helper.file_lines(filename)
+
+ expect(file_spy).to have_received(:lines).with(chomp: true)
+ end
+ end
end
diff --git a/spec/tooling/danger/specs_spec.rb b/spec/tooling/danger/specs_spec.rb
new file mode 100644
index 00000000000..a5978020c9d
--- /dev/null
+++ b/spec/tooling/danger/specs_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'rspec-parameterized'
+require 'gitlab-dangerfiles'
+require 'danger'
+require 'danger/plugins/helper'
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/specs'
+require_relative '../../../tooling/danger/project_helper'
+
+RSpec.describe Tooling::Danger::Specs do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:fake_project_helper) { double('fake-project-helper', helper: fake_helper).tap { |h| h.class.include(Tooling::Danger::ProjectHelper) } }
+ let(:file_lines) do
+ [
+ " describe 'foo' do",
+ " expect(foo).to match(['bar'])",
+ " end",
+ " expect(foo).to match(['bar'])", # same line as line 1 above, we expect two different suggestions
+ " ",
+ " expect(foo).to match ['bar']",
+ " expect(foo).to eq(['bar'])",
+ " expect(foo).to eq ['bar']",
+ " expect(foo).to(match(['bar']))",
+ " expect(foo).to(eq(['bar']))",
+ " foo.eq(['bar'])"
+ ]
+ end
+
+ let(:matching_lines) do
+ [
+ "+ expect(foo).to match(['bar'])",
+ "+ expect(foo).to match(['bar'])",
+ "+ expect(foo).to match ['bar']",
+ "+ expect(foo).to eq(['bar'])",
+ "+ expect(foo).to eq ['bar']",
+ "+ expect(foo).to(match(['bar']))",
+ "+ expect(foo).to(eq(['bar']))"
+ ]
+ end
+
+ subject(:specs) { fake_danger.new(helper: fake_helper) }
+
+ before do
+ allow(specs).to receive(:project_helper).and_return(fake_project_helper)
+ end
+
+ describe '#add_suggestions_for_match_with_array' do
+ let(:filename) { 'spec/foo_spec.rb' }
+
+ before do
+ expect(specs).to receive(:added_line_matching_match_with_array).and_return(matching_lines)
+ allow(specs.project_helper).to receive(:file_lines).and_return(file_lines)
+ end
+
+ it 'adds suggestions at the correct lines' do
+ expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 2)
+ expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 4)
+ expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array ['bar']"), file: filename, line: 6)
+ expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 7)
+ expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array ['bar']"), file: filename, line: 8)
+ expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to(match_array(['bar']))"), file: filename, line: 9)
+ expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to(match_array(['bar']))"), file: filename, line: 10)
+
+ specs.add_suggestions_for_match_with_array(filename)
+ end
+ end
+
+ describe '#changed_specs_files' do
+ let(:base_expected_files) { %w[spec/foo_spec.rb ee/spec/foo_spec.rb spec/bar_spec.rb ee/spec/bar_spec.rb spec/zab_spec.rb ee/spec/zab_spec.rb] }
+
+ before do
+ all_changed_files = %w[
+ app/workers/a.rb
+ app/workers/b.rb
+ app/workers/e.rb
+ spec/foo_spec.rb
+ ee/spec/foo_spec.rb
+ spec/bar_spec.rb
+ ee/spec/bar_spec.rb
+ spec/zab_spec.rb
+ ee/spec/zab_spec.rb
+ ]
+
+ allow(specs.helper).to receive(:all_changed_files).and_return(all_changed_files)
+ end
+
+ it 'returns added, modified, and renamed_after files by default' do
+ expect(specs.changed_specs_files).to match_array(base_expected_files)
+ end
+
+ context 'with include_ee: :exclude' do
+ it 'returns spec files without EE-specific files' do
+ expect(specs.changed_specs_files(ee: :exclude)).not_to include(%w[ee/spec/foo_spec.rb ee/spec/bar_spec.rb ee/spec/zab_spec.rb])
+ end
+ end
+
+ context 'with include_ee: :only' do
+ it 'returns EE-specific spec files only' do
+ expect(specs.changed_specs_files(ee: :only)).to match_array(%w[ee/spec/foo_spec.rb ee/spec/bar_spec.rb ee/spec/zab_spec.rb])
+ end
+ end
+ end
+
+ describe '#added_line_matching_match_with_array' do
+ let(:filename) { 'spec/foo_spec.rb' }
+ let(:changed_lines) do
+ [
+ " expect(foo).to match(['bar'])",
+ " expect(foo).to match(['bar'])",
+ " expect(foo).to match ['bar']",
+ " expect(foo).to eq(['bar'])",
+ " expect(foo).to eq ['bar']",
+ "- expect(foo).to match(['bar'])",
+ "- expect(foo).to match(['bar'])",
+ "- expect(foo).to match ['bar']",
+ "- expect(foo).to eq(['bar'])",
+ "- expect(foo).to eq ['bar']"
+ ] + matching_lines
+ end
+
+ before do
+ allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines)
+ end
+
+ it 'returns added, modified, and renamed_after files by default' do
+ expect(specs.added_line_matching_match_with_array(filename)).to match_array(matching_lines)
+ end
+ end
+end
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index 89abe337347..0623a67a60e 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -63,7 +63,14 @@ RSpec.describe Quality::TestLevel do
context 'with a prefix' do
it 'returns a pattern' do
expect(described_class.new('ee/').pattern(:system))
- .to eq("ee/spec/{features}{,/**/}*_spec.rb")
+ .to eq("{ee/}spec/{features}{,/**/}*_spec.rb")
+ end
+ end
+
+ context 'with several prefixes' do
+ it 'returns a pattern' do
+ expect(described_class.new(['', 'ee/', 'jh/']).pattern(:system))
+ .to eq("{,ee/,jh/}spec/{features}{,/**/}*_spec.rb")
end
end
@@ -138,7 +145,14 @@ RSpec.describe Quality::TestLevel do
context 'with a prefix' do
it 'returns a regexp' do
expect(described_class.new('ee/').regexp(:system))
- .to eq(%r{ee/spec/(features)})
+ .to eq(%r{(ee/)spec/(features)})
+ end
+ end
+
+ context 'with several prefixes' do
+ it 'returns a regexp' do
+ expect(described_class.new(['', 'ee/', 'jh/']).regexp(:system))
+ .to eq(%r{(|ee/|jh/)spec/(features)})
end
end
diff --git a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
index 2c37565328a..d4e97d96dfd 100644
--- a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
@@ -58,6 +58,15 @@ RSpec.describe 'layouts/nav/sidebar/_admin' do
it_behaves_like 'page has active sub tab', 'Users'
end
+ context 'on topics' do
+ before do
+ allow(controller).to receive(:controller_name).and_return('admin/topics')
+ end
+
+ it_behaves_like 'page has active tab', 'Overview'
+ it_behaves_like 'page has active sub tab', 'Topics'
+ end
+
context 'on messages' do
before do
allow(controller).to receive(:controller_name).and_return('broadcast_messages')
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index adfe1cee6d6..20c5d9992be 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -580,6 +580,23 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
end
+
+ describe 'Google Cloud' do
+ it 'has a link to the google cloud page' do
+ render
+ expect(rendered).to have_link('Google Cloud', href: project_google_cloud_index_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the google cloud page' do
+ render
+
+ expect(rendered).not_to have_link('Google Cloud')
+ end
+ end
+ end
end
describe 'Packages and Registries' do
diff --git a/spec/views/projects/branches/index.html.haml_spec.rb b/spec/views/projects/branches/index.html.haml_spec.rb
new file mode 100644
index 00000000000..9954d9ecaec
--- /dev/null
+++ b/spec/views/projects/branches/index.html.haml_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/branches/index.html.haml' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+
+ let(:branches) { repository.branches }
+ let(:active_branch) { branches.find { |b| b.name == 'master' } }
+ let(:stale_branch) { branches.find { |b| b.name == 'feature' } }
+
+ before do
+ assign(:project, project)
+ assign(:repository, repository)
+ assign(:mode, 'overview')
+ assign(:active_branches, [active_branch])
+ assign(:stale_branches, [stale_branch])
+ assign(:overview_max_branches, 5)
+ assign(:branch_pipeline_statuses, {})
+ assign(:refs_pipelines, {})
+ end
+
+ it 'renders list of active and stale branches' do
+ content = render
+
+ expect(content).to include(active_branch.name)
+ expect(content).to include(stale_branch.name)
+ end
+
+ context 'when Gitaly is unavailable' do
+ it 'renders an error' do
+ assign(:gitaly_unavailable, true)
+
+ content = render
+
+ expect(content).to include('Unable to load branches')
+ expect(content).to include(
+ 'The git server, Gitaly, is not available at this time. Please contact your administrator.'
+ )
+ end
+ end
+end
diff --git a/spec/views/projects/commits/_commit.html.haml_spec.rb b/spec/views/projects/commits/_commit.html.haml_spec.rb
index abbb3a168c3..ed93240abc1 100644
--- a/spec/views/projects/commits/_commit.html.haml_spec.rb
+++ b/spec/views/projects/commits/_commit.html.haml_spec.rb
@@ -11,6 +11,24 @@ RSpec.describe 'projects/commits/_commit.html.haml' do
allow(view).to receive(:current_application_settings).and_return(Gitlab::CurrentSettings.current_application_settings)
end
+ context 'with different committer' do
+ let(:ref) { 'master' }
+ let(:committer) { create(:user) }
+
+ it 'renders committed by user' do
+ allow(commit).to receive(:different_committer?).and_return(true)
+ allow(commit).to receive(:committer).and_return(committer)
+
+ render partial: template, locals: {
+ project: project,
+ ref: ref,
+ commit: commit
+ }
+
+ expect(rendered).to have_text("#{committer.name} committed")
+ end
+ end
+
context 'with a signed commit' do
let(:ref) { GpgHelpers::SIGNED_COMMIT_SHA }
diff --git a/spec/views/projects/services/edit.html.haml_spec.rb b/spec/views/projects/services/edit.html.haml_spec.rb
index a5460adbd2c..372ccf82a68 100644
--- a/spec/views/projects/services/edit.html.haml_spec.rb
+++ b/spec/views/projects/services/edit.html.haml_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'projects/services/edit' do
it do
render
- expect(rendered).not_to have_text('Recent Deliveries')
+ expect(rendered).not_to have_text('Recent events')
end
context 'integration using WebHooks' do
@@ -25,7 +25,7 @@ RSpec.describe 'projects/services/edit' do
it do
render
- expect(rendered).to have_text('Recent Deliveries')
+ expect(rendered).to have_text('Recent events')
end
end
end
diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb
index 2702ab9e2a9..ebd526284d1 100644
--- a/spec/views/projects/tags/index.html.haml_spec.rb
+++ b/spec/views/projects/tags/index.html.haml_spec.rb
@@ -3,10 +3,11 @@
require 'spec_helper'
RSpec.describe 'projects/tags/index.html.haml' do
- let(:project) { create(:project, :repository) }
- let(:tags) { TagsFinder.new(project.repository, {}).execute }
- let(:git_tag) { project.repository.tags.last }
- let(:release) { create(:release, project: project, sha: git_tag.target_commit.sha) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:tags) { project.repository.tags }
+ let_it_be(:git_tag) { project.repository.tags.last }
+ let_it_be(:release) { create(:release, project: project, sha: git_tag.target_commit.sha) }
+
let(:pipeline) { create(:ci_pipeline, :success, project: project, ref: git_tag.name, sha: release.sha) }
before do
@@ -86,4 +87,17 @@ RSpec.describe 'projects/tags/index.html.haml' do
expect(page.all('.tags .content-list li')).not_to have_css 'svg.s24'
end
end
+
+ context 'when Gitaly is unavailable' do
+ it 'renders an error' do
+ assign(:tags_loading_error, GRPC::Unavailable.new)
+
+ content = render
+
+ expect(content).to include("Unable to load tags")
+ expect(content).to include(
+ "The git server, Gitaly, is not available at this time. Please contact your administrator."
+ )
+ end
+ end
end
diff --git a/spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb b/spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb
new file mode 100644
index 00000000000..57a0726000f
--- /dev/null
+++ b/spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker do
+ include ExclusiveLeaseHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ subject(:worker) { described_class.new }
+
+ include_examples 'an idempotent worker' do
+ let(:job_args) { [project.id, user.id] }
+
+ it 'does not change authorizations when run twice' do
+ project.add_developer(user)
+
+ user.project_authorizations.delete_all
+
+ expect { worker.perform(project.id, user.id) }.to change { project.project_authorizations.reload.size }.by(1)
+ expect { worker.perform(project.id, user.id) }.not_to change { project.project_authorizations.reload.size }
+ end
+ end
+
+ describe '#perform' do
+ it 'does not fail if the project does not exist' do
+ expect do
+ worker.perform(non_existing_record_id, user.id)
+ end.not_to raise_error
+ end
+
+ it 'does not fail if the user does not exist' do
+ expect do
+ worker.perform(project.id, non_existing_record_id)
+ end.not_to raise_error
+ end
+
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService' do
+ expect_next_instance_of(AuthorizedProjectUpdate::ProjectRecalculatePerUserService, project, user) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ worker.perform(project.id, user.id)
+ end
+
+ context 'exclusive lease' do
+ let(:lock_key) { "#{described_class.superclass.name.underscore}/projects/#{project.id}" }
+ let(:timeout) { 10.seconds }
+
+ context 'when exclusive lease has not been taken' do
+ it 'obtains a new exclusive lease' do
+ expect_to_obtain_exclusive_lease(lock_key, timeout: timeout)
+
+ worker.perform(project.id, user.id)
+ end
+ end
+
+ context 'when exclusive lease has already been taken' do
+ before do
+ stub_exclusive_lease_taken(lock_key, timeout: timeout)
+ end
+
+ it 'raises an error' do
+ expect { worker.perform(project.id, user.id) }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
index 0da58343773..da4b726c0b5 100644
--- a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshFromReplicaWorker do
end
end
- context 'with load balancing enabled', :db_load_balancing do
+ context 'with load balancing enabled' do
it 'reads from the replica database' do
expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 4e34d2348d6..2ca7837066b 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -3,7 +3,9 @@
require 'spec_helper'
RSpec.describe BuildFinishedWorker do
- subject { described_class.new.perform(build.id) }
+ let(:worker) { described_class.new }
+
+ subject { worker.perform(build.id) }
describe '#perform' do
context 'when build exists' do
@@ -63,6 +65,30 @@ RSpec.describe BuildFinishedWorker do
subject
end
end
+
+ context 'when project is deleted' do
+ before do
+ allow(build).to receive(:project).and_return(nil)
+ end
+
+ it 'does no processing' do
+ expect(worker).not_to receive(:process_build)
+
+ subject
+ end
+ end
+
+ context 'when project is pending_delete' do
+ before do
+ build.project.update_attribute(:pending_delete, true)
+ end
+
+ it 'does no processing' do
+ expect(worker).not_to receive(:process_build)
+
+ subject
+ end
+ end
end
context 'when build does not exist' do
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index b67c5c62f76..12e29573156 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe BulkImportWorker do
expect { subject.perform(bulk_import.id) }
.to change(BulkImports::Tracker, :count)
- .by(BulkImports::Groups::Stage.pipelines.size * 2)
+ .by(BulkImports::Groups::Stage.new(bulk_import).pipelines.size * 2)
expect(entity_1.trackers).not_to be_empty
expect(entity_2.trackers).not_to be_empty
@@ -111,10 +111,10 @@ RSpec.describe BulkImportWorker do
end
context 'when there are project entities to process' do
- it 'does not enqueue ExportRequestWorker' do
+ it 'enqueues ExportRequestWorker' do
create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
- expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
subject.perform(bulk_import.id)
end
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index cb280c6d263..f838bff528c 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe BulkImports::ExportRequestWorker do
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let_it_be(:entity) { create(:bulk_import_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
let_it_be(:version_url) { 'https://gitlab.example/api/v4/version' }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
@@ -20,16 +19,30 @@ RSpec.describe BulkImports::ExportRequestWorker do
allow(Gitlab::HTTP).to receive(:post).and_return(response_double)
end
- include_examples 'an idempotent worker' do
- it 'requests relations export' do
- expected = "/groups/foo%2Fbar/export_relations"
+ shared_examples 'requests relations export for api resource' do
+ include_examples 'an idempotent worker' do
+ it 'requests relations export' do
+ expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ expect(client).to receive(:post).with(expected).twice
+ end
- expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
- expect(client).to receive(:post).with(expected).twice
+ perform_multiple(job_args)
end
-
- perform_multiple(job_args)
end
end
+
+ context 'when entity is group' do
+ let(:entity) { create(:bulk_import_entity, :group_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
+ let(:expected) { '/groups/foo%2Fbar/export_relations'}
+
+ include_examples 'requests relations export for api resource'
+ end
+
+ context 'when entity is project' do
+ let(:entity) { create(:bulk_import_entity, :project_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
+ let(:expected) { '/projects/foo%2Fbar/export_relations' }
+
+ include_examples 'requests relations export for api resource'
+ end
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 56f28654ac5..c902d1f2034 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -22,47 +22,65 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('FakePipeline', pipeline_class)
- allow(BulkImports::Groups::Stage)
- .to receive(:pipelines)
- .and_return([[0, pipeline_class]])
+ allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
+ allow(instance).to receive(:pipelines)
+ .and_return([[0, pipeline_class]])
+ end
end
- it 'runs the given pipeline successfully' do
- pipeline_tracker = create(
- :bulk_import_tracker,
- entity: entity,
- pipeline_name: 'FakePipeline'
- )
+ shared_examples 'successfully runs the pipeline' do
+ it 'runs the given pipeline successfully' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'FakePipeline',
+ entity_id: entity.id
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- worker: described_class.name,
- pipeline_name: 'FakePipeline',
- entity_id: entity.id
- )
- end
+ expect(subject).to receive(:jid).and_return('jid')
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
- expect(subject).to receive(:jid).and_return('jid')
+ pipeline_tracker.reload
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ expect(pipeline_tracker.status_name).to eq(:finished)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+ end
- pipeline_tracker.reload
+ it_behaves_like 'successfully runs the pipeline' do
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+ end
+ end
- expect(pipeline_tracker.status_name).to eq(:finished)
- expect(pipeline_tracker.jid).to eq('jid')
+ it_behaves_like 'successfully runs the pipeline' do
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ :started,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+ end
end
context 'when the pipeline cannot be found' do
it 'logs the error' do
pipeline_tracker = create(
:bulk_import_tracker,
- :started,
+ :finished,
entity: entity,
pipeline_name: 'FakePipeline'
)
@@ -126,6 +144,39 @@ RSpec.describe BulkImports::PipelineWorker do
expect(pipeline_tracker.status_name).to eq(:failed)
expect(pipeline_tracker.jid).to eq('jid')
end
+
+ context 'when it is a network error' do
+ it 'reenqueue on retriable network errors' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+
+ exception = BulkImports::NetworkError.new(
+ response: double(code: 429, headers: {})
+ )
+
+ expect_next_instance_of(pipeline_class) do |pipeline|
+ expect(pipeline)
+ .to receive(:run)
+ .and_raise(exception)
+ end
+
+ expect(subject).to receive(:jid).and_return('jid')
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(
+ 60.seconds,
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ pipeline_tracker.entity.id
+ )
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
end
context 'when ndjson pipeline' do
@@ -156,9 +207,10 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('NdjsonPipeline', ndjson_pipeline)
- allow(BulkImports::Groups::Stage)
- .to receive(:pipelines)
- .and_return([[0, ndjson_pipeline]])
+ allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
+ allow(instance).to receive(:pipelines)
+ .and_return([[0, ndjson_pipeline]])
+ end
end
it 'runs the pipeline successfully' do
diff --git a/spec/workers/ci/create_downstream_pipeline_worker_spec.rb b/spec/workers/ci/create_downstream_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..7a75da850d9
--- /dev/null
+++ b/spec/workers/ci/create_downstream_pipeline_worker_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreateDownstreamPipelineWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:bridge) { create(:ci_bridge, user: user, pipeline: pipeline) }
+
+ let(:service) { double('pipeline creation service') }
+
+ describe '#perform' do
+ context 'when bridge exists' do
+ it 'calls cross project pipeline creation service' do
+ expect(Ci::CreateDownstreamPipelineService)
+ .to receive(:new)
+ .with(project, user)
+ .and_return(service)
+
+ expect(service).to receive(:execute).with(bridge)
+
+ described_class.new.perform(bridge.id)
+ end
+ end
+
+ context 'when bridge does not exist' do
+ it 'does nothing' do
+ expect(Ci::CreateDownstreamPipelineService)
+ .not_to receive(:new)
+
+ described_class.new.perform(non_existing_record_id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/stuck_builds/drop_running_worker_spec.rb b/spec/workers/ci/stuck_builds/drop_running_worker_spec.rb
new file mode 100644
index 00000000000..6d3aa71fe81
--- /dev/null
+++ b/spec/workers/ci/stuck_builds/drop_running_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropRunningWorker do
+ include ExclusiveLeaseHelpers
+
+ let(:worker) { described_class.new }
+ let(:lease_uuid) { SecureRandom.uuid }
+
+ describe '#perform' do
+ subject { worker.perform }
+
+ it_behaves_like 'an idempotent worker'
+
+ it 'executes an instance of Ci::StuckBuilds::DropRunningService' do
+ expect_to_obtain_exclusive_lease(worker.lease_key, lease_uuid)
+
+ expect_next_instance_of(Ci::StuckBuilds::DropRunningService) do |service|
+ expect(service).to receive(:execute).exactly(:once)
+ end
+
+ expect_to_cancel_exclusive_lease(worker.lease_key, lease_uuid)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb b/spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb
new file mode 100644
index 00000000000..57be799d890
--- /dev/null
+++ b/spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropScheduledWorker do
+ include ExclusiveLeaseHelpers
+
+ let(:worker) { described_class.new }
+ let(:lease_uuid) { SecureRandom.uuid }
+
+ describe '#perform' do
+ subject { worker.perform }
+
+ it_behaves_like 'an idempotent worker'
+
+ it 'executes an instance of Ci::StuckBuilds::DropScheduledService with an exclusive lease' do
+ expect_to_obtain_exclusive_lease(worker.lease_key, lease_uuid)
+
+ expect_next_instance_of(Ci::StuckBuilds::DropScheduledService) do |service|
+ expect(service).to receive(:execute).exactly(:once)
+ end
+
+ expect_to_cancel_exclusive_lease(worker.lease_key, lease_uuid)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/cleanup_container_repository_worker_spec.rb b/spec/workers/cleanup_container_repository_worker_spec.rb
index 9cf8974a2a1..6ae4308bd46 100644
--- a/spec/workers/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/cleanup_container_repository_worker_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
it 'executes the destroy service' do
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
- .with(project, user, params.merge('container_expiration_policy' => false))
+ .with(repository, user, params.merge('container_expiration_policy' => false))
.and_return(service)
expect(service).to receive(:execute)
@@ -49,7 +49,7 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
expect(repository).to receive(:start_expiration_policy!).and_call_original
expect(repository).to receive(:reset_expiration_policy_started_at!).and_call_original
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
- .with(project, nil, params.merge('container_expiration_policy' => true))
+ .with(repository, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
expect(service).to receive(:execute).and_return(status: :success)
@@ -62,7 +62,7 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
expect(repository).to receive(:start_expiration_policy!).and_call_original
expect(repository).not_to receive(:reset_expiration_policy_started_at!)
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
- .with(project, nil, params.merge('container_expiration_policy' => true))
+ .with(repository, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
expect(service).to receive(:execute).and_return(status: :error, message: 'timeout while deleting tags')
diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb
index ac4e4a682c8..af038c81b9e 100644
--- a/spec/workers/concerns/application_worker_spec.rb
+++ b/spec/workers/concerns/application_worker_spec.rb
@@ -248,6 +248,10 @@ RSpec.describe ApplicationWorker do
end
describe '.perform_async' do
+ before do
+ stub_const(worker.name, worker)
+ end
+
shared_examples_for 'worker utilizes load balancing capabilities' do |data_consistency|
before do
worker.data_consistency(data_consistency)
@@ -282,6 +286,10 @@ RSpec.describe ApplicationWorker do
end
describe '.bulk_perform_async' do
+ before do
+ stub_const(worker.name, worker)
+ end
+
it 'enqueues jobs in bulk' do
Sidekiq::Testing.fake! do
worker.bulk_perform_async([['Foo', [1]], ['Foo', [2]]])
@@ -293,6 +301,10 @@ RSpec.describe ApplicationWorker do
end
describe '.bulk_perform_in' do
+ before do
+ stub_const(worker.name, worker)
+ end
+
context 'when delay is valid' do
it 'correctly schedules jobs' do
Sidekiq::Testing.fake! do
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index c1ac5ffebe8..b5252294b27 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::ObjectImporter do
+RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures do
let(:worker) do
Class.new do
def self.name
@@ -26,9 +26,15 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
let(:importer_class) { double(:importer_class, name: 'klass_name') }
let(:importer_instance) { double(:importer_instance) }
let(:client) { double(:client) }
+ let(:github_identifiers) do
+ {
+ some_id: 1,
+ some_type: '_some_type_'
+ }
+ end
- before do
- stub_const('MockRepresantation', Class.new do
+ let(:representation_class) do
+ Class.new do
include Gitlab::GithubImport::Representation::ToHash
include Gitlab::GithubImport::Representation::ExposeAttribute
@@ -41,7 +47,20 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
def initialize(attributes)
@attributes = attributes
end
- end)
+
+ def github_identifiers
+ {
+ some_id: 1,
+ some_type: '_some_type_'
+ }
+ end
+ end
+ end
+
+ let(:stubbed_representation) { representation_class }
+
+ before do
+ stub_const('MockRepresantation', stubbed_representation)
end
describe '#import', :clean_gitlab_redis_cache do
@@ -64,7 +83,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- github_id: 1,
+ github_identifiers: github_identifiers,
message: 'starting importer',
project_id: project.id,
importer: 'klass_name'
@@ -73,7 +92,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- github_id: 1,
+ github_identifiers: github_identifiers,
message: 'importer finished',
project_id: project.id,
importer: 'klass_name'
@@ -101,7 +120,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- github_id: 1,
+ github_identifiers: github_identifiers,
message: 'starting importer',
project_id: project.id,
importer: 'klass_name'
@@ -125,21 +144,25 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(project.import_failures.last.exception_message).to eq('some error')
end
- it 'logs error when representation does not have a github_id' do
- expect(importer_class).not_to receive(:new)
+ context 'without github_identifiers defined' do
+ let(:stubbed_representation) { representation_class.instance_eval { undef_method :github_identifiers } }
- expect(Gitlab::Import::ImportFailureService)
- .to receive(:track)
- .with(
- project_id: project.id,
- exception: a_kind_of(KeyError),
- error_source: 'klass_name',
- fail_import: true
- )
- .and_call_original
+ it 'logs error when representation does not have a github_id' do
+ expect(importer_class).not_to receive(:new)
- expect { worker.import(project, client, { 'number' => 10 }) }
- .to raise_error(KeyError, 'key not found: :github_id')
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: a_kind_of(NoMethodError),
+ error_source: 'klass_name',
+ fail_import: true
+ )
+ .and_call_original
+
+ expect { worker.import(project, client, { 'number' => 10 }) }
+ .to raise_error(NoMethodError, /^undefined method `github_identifiers/)
+ end
end
end
end
diff --git a/spec/workers/concerns/worker_context_spec.rb b/spec/workers/concerns/worker_context_spec.rb
index ebdb752d900..80b427b2b42 100644
--- a/spec/workers/concerns/worker_context_spec.rb
+++ b/spec/workers/concerns/worker_context_spec.rb
@@ -13,6 +13,10 @@ RSpec.describe WorkerContext do
end
end
+ before do
+ stub_const(worker.name, worker)
+ end
+
describe '.worker_context' do
it 'allows modifying the context for the entire worker' do
worker.worker_context(user: build_stubbed(:user))
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index fdba67638c1..d4126fe688a 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -74,6 +74,30 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
end
+
+ context 'the cache hit ratio field' do
+ where(:after_truncate_size, :cached_tags_count, :ratio) do
+ nil | nil | nil
+ 10 | nil | nil
+ nil | 10 | nil
+ 0 | 5 | nil
+ 10 | 0 | 0
+ 10 | 5 | 0.5
+ 3 | 10 | (10 / 3.to_f)
+ end
+
+ with_them do
+ it 'is logged properly' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository, cleanup_tags_service_before_truncate_size: after_truncate_size, cleanup_tags_service_after_truncate_size: after_truncate_size, cleanup_tags_service_cached_tags_count: cached_tags_count)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: false, cache_hit_ratio: ratio)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
+
+ subject
+ end
+ end
+ end
end
context 'with an erroneous cleanup' do
@@ -372,7 +396,16 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
- def cleanup_service_response(status: :finished, repository:, cleanup_tags_service_original_size: 100, cleanup_tags_service_before_truncate_size: 80, cleanup_tags_service_after_truncate_size: 80, cleanup_tags_service_before_delete_size: 50, cleanup_tags_service_deleted_size: 50)
+ def cleanup_service_response(
+ status: :finished,
+ repository:,
+ cleanup_tags_service_original_size: 100,
+ cleanup_tags_service_before_truncate_size: 80,
+ cleanup_tags_service_after_truncate_size: 80,
+ cleanup_tags_service_before_delete_size: 50,
+ cleanup_tags_service_deleted_size: 50,
+ cleanup_tags_service_cached_tags_count: 0
+ )
ServiceResponse.success(
message: "cleanup #{status}",
payload: {
@@ -381,21 +414,35 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
cleanup_tags_service_original_size: cleanup_tags_service_original_size,
cleanup_tags_service_before_truncate_size: cleanup_tags_service_before_truncate_size,
cleanup_tags_service_after_truncate_size: cleanup_tags_service_after_truncate_size,
- cleanup_tags_service_before_delete_size: cleanup_tags_service_before_delete_size
+ cleanup_tags_service_before_delete_size: cleanup_tags_service_before_delete_size,
+ cleanup_tags_service_cached_tags_count: cleanup_tags_service_cached_tags_count
}.compact
)
end
- def expect_log_extra_metadata(service_response:, cleanup_status: :finished, truncated: false)
+ def expect_log_extra_metadata(service_response:, cleanup_status: :finished, truncated: false, cache_hit_ratio: 0)
expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, repository.project.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, cleanup_status)
- %i[cleanup_tags_service_original_size cleanup_tags_service_before_truncate_size cleanup_tags_service_after_truncate_size cleanup_tags_service_before_delete_size cleanup_tags_service_deleted_size].each do |field|
+ %i[
+ cleanup_tags_service_original_size
+ cleanup_tags_service_before_truncate_size
+ cleanup_tags_service_after_truncate_size
+ cleanup_tags_service_before_delete_size
+ cleanup_tags_service_deleted_size
+ cleanup_tags_service_cached_tags_count
+ ].each do |field|
value = service_response.payload[field]
expect(worker).to receive(:log_extra_metadata_on_done).with(field, value) unless value.nil?
end
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_truncated, truncated)
+
+ after_truncate_size = service_response.payload[:cleanup_tags_service_after_truncate_size]
+ if cache_hit_ratio && after_truncate_size && after_truncate_size != 0
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_cache_hit_ratio, cache_hit_ratio)
+ end
+
expect(worker).to receive(:log_extra_metadata_on_done).with(:running_jobs_count, 0)
if service_response.error?
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index 9f370b10f6a..ebf80041151 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe ContainerExpirationPolicyWorker do
subject
end
- context 'with load balancing enabled', :db_load_balancing do
+ context 'with load balancing enabled' do
it 'reads the counts from the replica' do
expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
diff --git a/spec/workers/create_note_diff_file_worker_spec.rb b/spec/workers/create_note_diff_file_worker_spec.rb
index 4c1df8ade06..6d1d6d93e44 100644
--- a/spec/workers/create_note_diff_file_worker_spec.rb
+++ b/spec/workers/create_note_diff_file_worker_spec.rb
@@ -14,5 +14,23 @@ RSpec.describe CreateNoteDiffFileWorker do
described_class.new.perform(diff_note.id)
end
+
+ context "when the supplied diff_note_id doesn't belong to an existing DiffNote" do
+ it "returns nil without raising an error" do
+ expect_any_instance_of(DiffNote).not_to receive(:create_diff_file)
+ .and_call_original
+
+ described_class.new.perform(non_existing_record_id)
+ end
+ end
+
+ context "when called with a missing diff_note id" do
+ it "returns nil without creating diff file" do
+ expect_any_instance_of(DiffNote).not_to receive(:create_diff_file)
+ .and_call_original
+
+ described_class.new.perform(nil)
+ end
+ end
end
end
diff --git a/spec/workers/database/drop_detached_partitions_worker_spec.rb b/spec/workers/database/drop_detached_partitions_worker_spec.rb
index 42c3fa3c188..8693878ddd5 100644
--- a/spec/workers/database/drop_detached_partitions_worker_spec.rb
+++ b/spec/workers/database/drop_detached_partitions_worker_spec.rb
@@ -6,16 +6,15 @@ RSpec.describe Database::DropDetachedPartitionsWorker do
describe '#perform' do
subject { described_class.new.perform }
- let(:dropper) { instance_double('DropDetachedPartitions', perform: nil) }
let(:monitoring) { instance_double('PartitionMonitoring', report_metrics: nil) }
before do
- allow(Gitlab::Database::Partitioning::DetachedPartitionDropper).to receive(:new).and_return(dropper)
+ allow(Gitlab::Database::Partitioning).to receive(:drop_detached_partitions)
allow(Gitlab::Database::Partitioning::PartitionMonitoring).to receive(:new).and_return(monitoring)
end
- it 'delegates to DropPartitionsPendingDrop' do
- expect(dropper).to receive(:perform)
+ it 'delegates to Partitioning.drop_detached_partitions' do
+ expect(Gitlab::Database::Partitioning).to receive(:drop_detached_partitions)
subject
end
diff --git a/spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb b/spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb
new file mode 100644
index 00000000000..b67a56cca7b
--- /dev/null
+++ b/spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DependencyProxy::CleanupBlobWorker do
+ let_it_be(:factory_type) { :dependency_proxy_blob }
+
+ it_behaves_like 'dependency_proxy_cleanup_worker'
+end
diff --git a/spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb b/spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb
new file mode 100644
index 00000000000..d53b3e6a1fd
--- /dev/null
+++ b/spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DependencyProxy::CleanupManifestWorker do
+ let_it_be(:factory_type) { :dependency_proxy_manifest }
+
+ it_behaves_like 'dependency_proxy_cleanup_worker'
+end
diff --git a/spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb b/spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb
new file mode 100644
index 00000000000..d3234f4c212
--- /dev/null
+++ b/spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DependencyProxy::ImageTtlGroupPolicyWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let_it_be(:policy) { create(:image_ttl_group_policy) }
+ let_it_be(:group) { policy.group }
+
+ subject { worker.perform }
+
+ context 'when there are images to expire' do
+ let_it_be_with_reload(:old_blob) { create(:dependency_proxy_blob, group: group, updated_at: 1.year.ago) }
+ let_it_be_with_reload(:old_manifest) { create(:dependency_proxy_manifest, group: group, updated_at: 1.year.ago) }
+ let_it_be_with_reload(:new_blob) { create(:dependency_proxy_blob, group: group) }
+ let_it_be_with_reload(:new_manifest) { create(:dependency_proxy_manifest, group: group) }
+
+ it 'calls the limited capacity workers', :aggregate_failures do
+ expect(DependencyProxy::CleanupBlobWorker).to receive(:perform_with_capacity)
+ expect(DependencyProxy::CleanupManifestWorker).to receive(:perform_with_capacity)
+
+ subject
+ end
+
+ it 'updates the old images to expired' do
+ expect { subject }
+ .to change { old_blob.reload.status }.from('default').to('expired')
+ .and change { old_manifest.reload.status }.from('default').to('expired')
+ .and not_change { new_blob.reload.status }
+ .and not_change { new_manifest.reload.status }
+ end
+ end
+
+ context 'when there are no images to expire' do
+ it 'does not do anything', :aggregate_failures do
+ expect(DependencyProxy::CleanupBlobWorker).not_to receive(:perform_with_capacity)
+ expect(DependencyProxy::CleanupManifestWorker).not_to receive(:perform_with_capacity)
+
+ subject
+ end
+ end
+
+ context 'counts logging' do
+ let_it_be(:expired_blob) { create(:dependency_proxy_blob, :expired, group: group) }
+ let_it_be(:expired_blob2) { create(:dependency_proxy_blob, :expired, group: group) }
+ let_it_be(:expired_manifest) { create(:dependency_proxy_manifest, :expired, group: group) }
+ let_it_be(:processing_blob) { create(:dependency_proxy_blob, status: :processing, group: group) }
+ let_it_be(:processing_manifest) { create(:dependency_proxy_manifest, status: :processing, group: group) }
+ let_it_be(:error_blob) { create(:dependency_proxy_blob, status: :error, group: group) }
+ let_it_be(:error_manifest) { create(:dependency_proxy_manifest, status: :error, group: group) }
+
+ it 'logs all the counts', :aggregate_failures do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:expired_dependency_proxy_blob_count, 2)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:expired_dependency_proxy_manifest_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:processing_dependency_proxy_blob_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:processing_dependency_proxy_manifest_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:error_dependency_proxy_blob_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:error_dependency_proxy_manifest_count, 1)
+
+ subject
+ end
+
+ context 'with load balancing enabled', :db_load_balancing do
+ it 'reads the counts from the replica' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/email_receiver_worker_spec.rb b/spec/workers/email_receiver_worker_spec.rb
index d26c08fb221..83e13ded7b3 100644
--- a/spec/workers/email_receiver_worker_spec.rb
+++ b/spec/workers/email_receiver_worker_spec.rb
@@ -80,6 +80,21 @@ RSpec.describe EmailReceiverWorker, :mailer do
expect(email).to be_nil
end
end
+
+ context 'when the error is RateLimitedService::RateLimitedError' do
+ let(:error) { RateLimitedService::RateLimitedError.new(key: :issues_create, rate_limiter: Gitlab::ApplicationRateLimiter) }
+
+ it 'does not report the error to the sender' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(error).and_call_original
+
+ perform_enqueued_jobs do
+ described_class.new.perform(raw_message)
+ end
+
+ email = ActionMailer::Base.deliveries.last
+ expect(email).to be_nil
+ end
+ end
end
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 235a1f6e3dd..9a4b27997e9 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Every Sidekiq worker' do
describe "feature category declarations" do
let(:feature_categories) do
- YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).map(&:to_sym).to_set
+ Gitlab::FeatureCategories.default.categories.map(&:to_sym).to_set
end
# All Sidekiq worker classes should declare a valid `feature_category`
@@ -155,11 +155,13 @@ RSpec.describe 'Every Sidekiq worker' do
'Ci::BuildScheduleWorker' => 3,
'Ci::BuildTraceChunkFlushWorker' => 3,
'Ci::CreateCrossProjectPipelineWorker' => 3,
+ 'Ci::CreateDownstreamPipelineWorker' => 3,
'Ci::DailyBuildGroupReportResultsWorker' => 3,
'Ci::DeleteObjectsWorker' => 0,
'Ci::DropPipelineWorker' => 3,
'Ci::InitialPipelineProcessWorker' => 3,
'Ci::MergeRequests::AddTodoWhenBuildFailsWorker' => 3,
+ 'Ci::Minutes::UpdateProjectAndNamespaceUsageWorker' => 3,
'Ci::PipelineArtifacts::CoverageReportWorker' => 3,
'Ci::PipelineArtifacts::CreateQualityReportWorker' => 3,
'Ci::PipelineBridgeStatusWorker' => 3,
@@ -197,6 +199,8 @@ RSpec.describe 'Every Sidekiq worker' do
'DeleteMergedBranchesWorker' => 3,
'DeleteStoredFilesWorker' => 3,
'DeleteUserWorker' => 3,
+ 'DependencyProxy::CleanupBlobWorker' => 3,
+ 'DependencyProxy::CleanupManifestWorker' => 3,
'Deployments::AutoRollbackWorker' => 3,
'Deployments::DropOlderDeploymentsWorker' => 3,
'Deployments::FinishedWorker' => 3,
@@ -233,29 +237,27 @@ RSpec.describe 'Every Sidekiq worker' do
'FlushCounterIncrementsWorker' => 3,
'Geo::Batch::ProjectRegistrySchedulerWorker' => 3,
'Geo::Batch::ProjectRegistryWorker' => 3,
- 'Geo::ContainerRepositorySyncWorker' => 3,
+ 'Geo::ContainerRepositorySyncWorker' => 1,
'Geo::DesignRepositoryShardSyncWorker' => false,
- 'Geo::DesignRepositorySyncWorker' => 3,
+ 'Geo::DesignRepositorySyncWorker' => 1,
'Geo::DestroyWorker' => 3,
'Geo::EventWorker' => 3,
'Geo::FileDownloadWorker' => 3,
'Geo::FileRegistryRemovalWorker' => 3,
'Geo::FileRemovalWorker' => 3,
- 'Geo::HashedStorageAttachmentsMigrationWorker' => 3,
- 'Geo::HashedStorageMigrationWorker' => 3,
- 'Geo::ProjectSyncWorker' => 3,
+ 'Geo::ProjectSyncWorker' => 1,
'Geo::RenameRepositoryWorker' => 3,
- 'Geo::RepositoriesCleanUpWorker' => 3,
'Geo::RepositoryCleanupWorker' => 3,
'Geo::RepositoryShardSyncWorker' => false,
'Geo::RepositoryVerification::Primary::ShardWorker' => false,
'Geo::RepositoryVerification::Primary::SingleWorker' => false,
'Geo::RepositoryVerification::Secondary::SingleWorker' => false,
'Geo::ReverificationBatchWorker' => 0,
- 'Geo::Scheduler::Primary::SchedulerWorker' => 3,
- 'Geo::Scheduler::SchedulerWorker' => 3,
- 'Geo::Scheduler::Secondary::SchedulerWorker' => 3,
+ 'Geo::Scheduler::Primary::SchedulerWorker' => false,
+ 'Geo::Scheduler::SchedulerWorker' => false,
+ 'Geo::Scheduler::Secondary::SchedulerWorker' => false,
'Geo::VerificationBatchWorker' => 0,
+ 'Geo::VerificationStateBackfillWorker' => false,
'Geo::VerificationTimeoutWorker' => false,
'Geo::VerificationWorker' => 3,
'GeoRepositoryDestroyWorker' => 3,
@@ -357,14 +359,13 @@ RSpec.describe 'Every Sidekiq worker' do
'ObjectPool::ScheduleJoinWorker' => 3,
'ObjectStorage::BackgroundMoveWorker' => 5,
'ObjectStorage::MigrateUploadsWorker' => 3,
- 'Packages::Composer::CacheUpdateWorker' => 3,
+ 'Packages::Composer::CacheUpdateWorker' => false,
'Packages::Go::SyncPackagesWorker' => 3,
'Packages::Maven::Metadata::SyncWorker' => 3,
'Packages::Nuget::ExtractionWorker' => 3,
'Packages::Rubygems::ExtractionWorker' => 3,
'PagesDomainSslRenewalWorker' => 3,
'PagesDomainVerificationWorker' => 3,
- 'PagesRemoveWorker' => 3,
'PagesTransferWorker' => 3,
'PagesUpdateConfigurationWorker' => 3,
'PagesWorker' => 3,
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
index 6b14ccea105..e9af39ed2df 100644
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe ExpireJobCacheWorker do
let(:job_args) { job.id }
+ it_behaves_like 'an idempotent worker'
+
it_behaves_like 'worker with data consistency',
described_class,
data_consistency: :delayed
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
index 132fe1dc618..dd976eef28b 100644
--- a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -7,39 +7,27 @@ RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker do
let(:worker) { described_class.new }
describe '#perform' do
- it 'marks the import as finished' do
+ it 'marks the import as finished and reports import statistics' do
expect(project).to receive(:after_import)
- expect(worker).to receive(:report_import_time).with(project)
-
- worker.import(double(:client), project)
- end
- end
-
- describe '#report_import_time' do
- it 'reports the total import time' do
- expect(worker.histogram)
- .to receive(:observe)
- .with({ project: project.path_with_namespace }, a_kind_of(Numeric))
- .and_call_original
-
- expect(worker.counter)
- .to receive(:increment)
- .and_call_original
+ expect_next_instance_of(Gitlab::Import::Metrics) do |instance|
+ expect(instance).to receive(:track_finished_import)
+ expect(instance).to receive(:duration).and_return(3.005)
+ end
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
- .with(
- message: 'GitHub project import finished',
- import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
- object_counts: {
- 'fetched' => {},
- 'imported' => {}
- },
- project_id: project.id,
- duration_s: a_kind_of(Numeric)
- )
+ .with(
+ message: 'GitHub project import finished',
+ import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
+ object_counts: {
+ 'fetched' => {},
+ 'imported' => {}
+ },
+ project_id: project.id,
+ duration_s: 3.01
+ )
- worker.report_import_time(project)
+ worker.import(double(:client), project)
end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
index f68d0838501..7b2218b1725 100644
--- a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
@@ -3,15 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker do
- let(:project) { create(:project) }
- let(:import_state) { create(:import_state, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:import_state) { create(:import_state, project: project) }
+
let(:worker) { described_class.new }
+ let(:importer) { double(:importer) }
+ let(:client) { double(:client) }
describe '#import' do
it 'imports the base data of a project' do
- importer = double(:importer)
- client = double(:client)
-
described_class::IMPORTERS.each do |klass|
expect(klass)
.to receive(:new)
@@ -29,5 +29,23 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker do
worker.import(client, project)
end
+
+ it 'raises an error' do
+ exception = StandardError.new('_some_error_')
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::LabelsImporter) do |importer|
+ expect(importer).to receive(:execute).and_raise(exception)
+ end
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: true,
+ metrics: true
+ ).and_call_original
+
+ expect { worker.import(client, project) }.to raise_error(StandardError)
+ end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
index 29578f9bf37..b18b5ce64d1 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
@@ -3,14 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker do
- let(:project) { create(:project) }
- let(:import_state) { create(:import_state, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:import_state) { create(:import_state, project: project) }
+
let(:worker) { described_class.new }
+ let(:importer) { double(:importer) }
+ let(:client) { double(:client) }
describe '#import' do
it 'imports all the pull requests' do
- importer = double(:importer)
- client = double(:client)
waiter = Gitlab::JobWaiter.new(2, '123')
expect(Gitlab::GithubImport::Importer::PullRequestsImporter)
@@ -32,4 +33,22 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker do
worker.import(client, project)
end
end
+
+ it 'raises an error' do
+ exception = StandardError.new('_some_error_')
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::PullRequestsImporter) do |importer|
+ expect(importer).to receive(:execute).and_raise(exception)
+ end
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: true,
+ metrics: true
+ ).and_call_original
+
+ expect { worker.import(client, project) }.to raise_error(StandardError)
+ end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index 875fc082975..582cb76a6cd 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
- let(:project) { double(:project, id: 4) }
+ let_it_be(:project) { create(:project, :import_started) }
let(:worker) { described_class.new }
@@ -43,6 +43,15 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
expect(instance).to receive(:execute).and_raise(exception_class)
end
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception_class,
+ error_source: described_class.name,
+ fail_import: true,
+ metrics: true
+ ).and_call_original
+
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.not_to receive(:perform_async)
diff --git a/spec/workers/issue_placement_worker_spec.rb b/spec/workers/issue_placement_worker_spec.rb
index 780790dbb1b..50b9d58a5b0 100644
--- a/spec/workers/issue_placement_worker_spec.rb
+++ b/spec/workers/issue_placement_worker_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe IssuePlacementWorker do
it 'places all issues created at most 5 minutes before this one at the end, most recent last' do
expect { run_worker }.not_to change { irrelevant.reset.relative_position }
- expect(project.issues.order_relative_position_asc)
+ expect(project.issues.order_by_relative_position)
.to eq([issue_e, issue_b, issue_a, issue, issue_c, issue_f, issue_d])
expect(project.issues.where(relative_position: nil)).not_to exist
end
diff --git a/spec/workers/packages/composer/cache_cleanup_worker_spec.rb b/spec/workers/packages/composer/cache_cleanup_worker_spec.rb
index e69fe55acc2..39eac4e4ae1 100644
--- a/spec/workers/packages/composer/cache_cleanup_worker_spec.rb
+++ b/spec/workers/packages/composer/cache_cleanup_worker_spec.rb
@@ -18,12 +18,8 @@ RSpec.describe Packages::Composer::CacheCleanupWorker, type: :worker do
cache_file4.update_columns(namespace_id: nil)
end
- it 'deletes expired packages' do
- expect { subject }.to change { Packages::Composer::CacheFile.count }.by(-2)
- expect { cache_file1.reload }.not_to raise_error ActiveRecord::RecordNotFound
- expect { cache_file2.reload }.not_to raise_error ActiveRecord::RecordNotFound
- expect { cache_file3.reload }.to raise_error ActiveRecord::RecordNotFound
- expect { cache_file4.reload }.to raise_error ActiveRecord::RecordNotFound
+ it 'does nothing' do
+ expect { subject }.not_to change { Packages::Composer::CacheFile.count }
end
end
end
diff --git a/spec/workers/packages/composer/cache_update_worker_spec.rb b/spec/workers/packages/composer/cache_update_worker_spec.rb
index a0d8aa5d375..6c17d49e986 100644
--- a/spec/workers/packages/composer/cache_update_worker_spec.rb
+++ b/spec/workers/packages/composer/cache_update_worker_spec.rb
@@ -21,8 +21,8 @@ RSpec.describe Packages::Composer::CacheUpdateWorker, type: :worker do
include_examples 'an idempotent worker' do
context 'creating a package' do
- it 'updates the cache' do
- expect { subject }.to change { Packages::Composer::CacheFile.count }.by(1)
+ it 'does nothing' do
+ expect { subject }.to change { Packages::Composer::CacheFile.count }.by(0)
end
end
@@ -36,12 +36,12 @@ RSpec.describe Packages::Composer::CacheUpdateWorker, type: :worker do
package.destroy!
end
- it 'marks the file for deletion' do
+ it 'does nothing' do
expect { subject }.not_to change { Packages::Composer::CacheFile.count }
cache_file = Packages::Composer::CacheFile.last
- expect(cache_file.reload.delete_at).not_to be_nil
+ expect(cache_file.reload.delete_at).to be_nil
end
end
end
diff --git a/spec/workers/pages_remove_worker_spec.rb b/spec/workers/pages_remove_worker_spec.rb
deleted file mode 100644
index 9d49088b371..00000000000
--- a/spec/workers/pages_remove_worker_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PagesRemoveWorker do
- it 'does not raise error' do
- expect do
- described_class.new.perform(create(:project).id)
- end.not_to raise_error
- end
-end
diff --git a/spec/workers/pipeline_hooks_worker_spec.rb b/spec/workers/pipeline_hooks_worker_spec.rb
index 0ed00c0c66a..13a86c3d4fe 100644
--- a/spec/workers/pipeline_hooks_worker_spec.rb
+++ b/spec/workers/pipeline_hooks_worker_spec.rb
@@ -8,8 +8,10 @@ RSpec.describe PipelineHooksWorker do
let(:pipeline) { create(:ci_pipeline) }
it 'executes hooks for the pipeline' do
- expect_any_instance_of(Ci::Pipeline)
- .to receive(:execute_hooks)
+ hook_service = double
+
+ expect(Ci::Pipelines::HookService).to receive(:new).and_return(hook_service)
+ expect(hook_service).to receive(:execute)
described_class.new.perform(pipeline.id)
end
@@ -17,6 +19,8 @@ RSpec.describe PipelineHooksWorker do
context 'when pipeline does not exist' do
it 'does not raise exception' do
+ expect(Ci::Pipelines::HookService).not_to receive(:new)
+
expect { described_class.new.perform(123) }
.not_to raise_error
end
diff --git a/spec/workers/pipeline_process_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb
index f8140d11f2e..6e95b7a4753 100644
--- a/spec/workers/pipeline_process_worker_spec.rb
+++ b/spec/workers/pipeline_process_worker_spec.rb
@@ -29,16 +29,6 @@ RSpec.describe PipelineProcessWorker do
end
end
- context 'when the FF ci_idempotent_pipeline_process_worker is disabled' do
- before do
- stub_feature_flags(ci_idempotent_pipeline_process_worker: false)
- end
-
- it 'is not deduplicated' do
- expect(described_class).not_to be_deduplication_enabled
- end
- end
-
describe '#perform' do
context 'when pipeline exists' do
it 'processes pipeline' do
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index ddd295215a1..039f86f1911 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -428,7 +428,7 @@ RSpec.describe PostReceive do
end
it 'expires the status cache' do
- expect(snippet.repository).to receive(:empty?).and_return(true)
+ expect(snippet.repository).to receive(:empty?).at_least(:once).and_return(true)
expect(snippet.repository).to receive(:expire_status_cache)
perform
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index fc572c0d9c3..bb11d1dbb58 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -68,5 +68,20 @@ RSpec.describe RunPipelineScheduleWorker do
worker.perform(pipeline_schedule.id, user.id)
end
end
+
+ context 'when pipeline cannot be created' do
+ before do
+ allow(Ci::CreatePipelineService).to receive(:new) { raise Ci::CreatePipelineService::CreateError }
+ end
+
+ it 'logging a pipeline error' do
+ expect(worker)
+ .to receive(:log_extra_metadata_on_done)
+ .with(:pipeline_creation_error, an_instance_of(Ci::CreatePipelineService::CreateError))
+ .and_call_original
+
+ worker.perform(pipeline_schedule.id, user.id)
+ end
+ end
end
end
diff --git a/spec/workers/stuck_ci_jobs_worker_spec.rb b/spec/workers/stuck_ci_jobs_worker_spec.rb
index e0a5d3c6c1c..19ff8ec55c2 100644
--- a/spec/workers/stuck_ci_jobs_worker_spec.rb
+++ b/spec/workers/stuck_ci_jobs_worker_spec.rb
@@ -5,50 +5,34 @@ require 'spec_helper'
RSpec.describe StuckCiJobsWorker do
include ExclusiveLeaseHelpers
- let(:worker_lease_key) { StuckCiJobsWorker::EXCLUSIVE_LEASE_KEY }
- let(:worker_lease_uuid) { SecureRandom.uuid }
- let(:worker2) { described_class.new }
-
- subject(:worker) { described_class.new }
-
- before do
- stub_exclusive_lease(worker_lease_key, worker_lease_uuid)
- end
+ let(:worker) { described_class.new }
+ let(:lease_uuid) { SecureRandom.uuid }
describe '#perform' do
- it 'executes an instance of Ci::StuckBuildsDropService' do
- expect_next_instance_of(Ci::StuckBuilds::DropService) do |service|
- expect(service).to receive(:execute).exactly(:once)
- end
-
- worker.perform
- end
+ subject { worker.perform }
- context 'with an exclusive lease' do
- it 'does not execute concurrently' do
- expect(worker).to receive(:remove_lease).exactly(:once)
- expect(worker2).not_to receive(:remove_lease)
+ it 'enqueues a Ci::StuckBuilds::DropRunningWorker job' do
+ expect(Ci::StuckBuilds::DropRunningWorker).to receive(:perform_in).with(20.minutes).exactly(:once)
- worker.perform
+ subject
+ end
- stub_exclusive_lease_taken(worker_lease_key)
+ it 'enqueues a Ci::StuckBuilds::DropScheduledWorker job' do
+ expect(Ci::StuckBuilds::DropScheduledWorker).to receive(:perform_in).with(40.minutes).exactly(:once)
- worker2.perform
- end
+ subject
+ end
- it 'can execute in sequence' do
- expect(worker).to receive(:remove_lease).at_least(:once)
- expect(worker2).to receive(:remove_lease).at_least(:once)
+ it 'executes an instance of Ci::StuckBuilds::DropPendingService' do
+ expect_to_obtain_exclusive_lease(worker.lease_key, lease_uuid)
- worker.perform
- worker2.perform
+ expect_next_instance_of(Ci::StuckBuilds::DropPendingService) do |service|
+ expect(service).to receive(:execute).exactly(:once)
end
- it 'cancels exclusive leases after worker perform' do
- expect_to_cancel_exclusive_lease(worker_lease_key, worker_lease_uuid)
+ expect_to_cancel_exclusive_lease(worker.lease_key, lease_uuid)
- worker.perform
- end
+ subject
end
end
end