summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/channels/application_cable/connection_spec.rb4
-rw-r--r--spec/channels/awareness_channel_spec.rb80
-rw-r--r--spec/components/pajamas/alert_component_spec.rb54
-rw-r--r--spec/components/pajamas/banner_component_spec.rb42
-rw-r--r--spec/components/pajamas/button_component_spec.rb74
-rw-r--r--spec/components/pajamas/card_component_spec.rb30
-rw-r--r--spec/components/pajamas/checkbox_component_spec.rb10
-rw-r--r--spec/components/pajamas/radio_component_spec.rb6
-rw-r--r--spec/components/pajamas/spinner_component_spec.rb74
-rw-r--r--spec/components/pajamas/toggle_component_spec.rb32
-rw-r--r--spec/contracts/consumer/endpoints/project/pipelines.js16
-rw-r--r--spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js243
-rw-r--r--spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js99
-rw-r--r--spec/contracts/consumer/helpers/common_regex_patterns.js24
-rw-r--r--spec/contracts/consumer/helpers/graphql_query_extractor.js8
-rw-r--r--spec/contracts/consumer/resources/graphql/pipelines.js25
-rw-r--r--spec/contracts/consumer/specs/project/pipeline/index.spec.js44
-rw-r--r--spec/contracts/consumer/specs/project/pipeline/show.spec.js53
-rw-r--r--spec/contracts/consumer/yarn.lock4775
-rw-r--r--spec/contracts/contracts/project/pipeline/index/pipelines#index-get_list_project_pipelines.json472
-rw-r--r--spec/contracts/contracts/project/pipeline/show/pipelines#show-get_pipeline_header_data.json152
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline/get_list_project_pipelines_helper.rb16
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline/get_pipeline_header_data_helper.rb16
-rw-r--r--spec/contracts/provider/states/project/pipeline/pipeline_state.rb27
-rw-r--r--spec/contracts/provider/states/project/pipeline/pipelines_state.rb26
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb18
-rw-r--r--spec/controllers/admin/hooks_controller_spec.rb36
-rw-r--r--spec/controllers/admin/topics_controller_spec.rb22
-rw-r--r--spec/controllers/application_controller_spec.rb22
-rw-r--r--spec/controllers/concerns/harbor/artifact_spec.rb10
-rw-r--r--spec/controllers/concerns/harbor/repository_spec.rb10
-rw-r--r--spec/controllers/concerns/harbor/tag_spec.rb10
-rw-r--r--spec/controllers/graphql_controller_spec.rb12
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb20
-rw-r--r--spec/controllers/groups/variables_controller_spec.rb2
-rw-r--r--spec/controllers/groups_controller_spec.rb12
-rw-r--r--spec/controllers/import/available_namespaces_controller_spec.rb6
-rw-r--r--spec/controllers/import/bitbucket_controller_spec.rb36
-rw-r--r--spec/controllers/import/bitbucket_server_controller_spec.rb17
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb33
-rw-r--r--spec/controllers/import/fogbugz_controller_spec.rb50
-rw-r--r--spec/controllers/import/github_controller_spec.rb5
-rw-r--r--spec/controllers/import/gitlab_controller_spec.rb40
-rw-r--r--spec/controllers/profiles/emails_controller_spec.rb6
-rw-r--r--spec/controllers/profiles/personal_access_tokens_controller_spec.rb37
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb93
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb2
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb22
-rw-r--r--spec/controllers/projects/logs_controller_spec.rb214
-rw-r--r--spec/controllers/projects/mirrors_controller_spec.rb2
-rw-r--r--spec/controllers/projects/pipelines/tests_controller_spec.rb18
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb8
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb21
-rw-r--r--spec/controllers/projects/service_ping_controller_spec.rb12
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb469
-rw-r--r--spec/controllers/projects/settings/integrations_controller_spec.rb28
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb102
-rw-r--r--spec/controllers/projects/tracings_controller_spec.rb72
-rw-r--r--spec/controllers/projects_controller_spec.rb18
-rw-r--r--spec/controllers/registrations_controller_spec.rb3
-rw-r--r--spec/controllers/search_controller_spec.rb39
-rw-r--r--spec/db/docs_spec.rb5
-rw-r--r--spec/db/schema_spec.rb10
-rw-r--r--spec/events/pages/page_deleted_event_spec.rb1
-rw-r--r--spec/events/pages/page_deployed_event_spec.rb34
-rw-r--r--spec/events/projects/project_created_event_spec.rb34
-rw-r--r--spec/events/projects/project_deleted_event_spec.rb1
-rw-r--r--spec/events/projects/project_path_changed_event_spec.rb46
-rw-r--r--spec/factories/ci/runner_versions.rb7
-rw-r--r--spec/factories/ci/stages.rb19
-rw-r--r--spec/factories/clusters/applications/helm.rb4
-rw-r--r--spec/factories/clusters/clusters.rb1
-rw-r--r--spec/factories/clusters/integrations/elastic_stack.rb12
-rw-r--r--spec/factories/error_tracking/open_api.rb41
-rw-r--r--spec/factories/gitlab/database/postgres_autovacuum_activity.rb10
-rw-r--r--spec/factories/import_states.rb4
-rw-r--r--spec/factories/integrations.rb2
-rw-r--r--spec/factories/keys.rb17
-rw-r--r--spec/factories/oauth_access_tokens.rb1
-rw-r--r--spec/factories/project_group_links.rb2
-rw-r--r--spec/factories/project_hooks.rb6
-rw-r--r--spec/factories/project_tracing_settings.rb8
-rw-r--r--spec/factories/projects.rb22
-rw-r--r--spec/factories/projects/import_export/export_relation.rb11
-rw-r--r--spec/factories/sequences.rb1
-rw-r--r--spec/factories/snippets.rb15
-rw-r--r--spec/factories/usage_data.rb4
-rw-r--r--spec/factories/users/namespace_user_callouts.rb10
-rw-r--r--spec/factories/work_items.rb5
-rw-r--r--spec/features/admin/admin_groups_spec.rb28
-rw-r--r--spec/features/admin/admin_projects_spec.rb28
-rw-r--r--spec/features/admin/admin_runners_spec.rb52
-rw-r--r--spec/features/admin/admin_sees_background_migrations_spec.rb4
-rw-r--r--spec/features/admin/admin_system_info_spec.rb6
-rw-r--r--spec/features/admin/users/user_spec.rb4
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb19
-rw-r--r--spec/features/file_uploads/multipart_invalid_uploads_spec.rb2
-rw-r--r--spec/features/groups/group_runners_spec.rb44
-rw-r--r--spec/features/groups/group_settings_spec.rb4
-rw-r--r--spec/features/groups/import_export/import_file_spec.rb24
-rw-r--r--spec/features/groups/merge_requests_spec.rb2
-rw-r--r--spec/features/groups/settings/packages_and_registries_spec.rb22
-rw-r--r--spec/features/groups/settings/user_searches_in_settings_spec.rb2
-rw-r--r--spec/features/groups/show_spec.rb25
-rw-r--r--spec/features/groups_spec.rb18
-rw-r--r--spec/features/incidents/incident_details_spec.rb77
-rw-r--r--spec/features/incidents/incident_timeline_events_spec.rb70
-rw-r--r--spec/features/invites_spec.rb5
-rw-r--r--spec/features/issuables/issuable_list_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb4
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb16
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb14
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_merge_request_spec.rb38
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb28
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb36
-rw-r--r--spec/features/milestone_spec.rb2
-rw-r--r--spec/features/monitor_sidebar_link_spec.rb5
-rw-r--r--spec/features/nav/top_nav_responsive_spec.rb2
-rw-r--r--spec/features/nav/top_nav_tooltip_spec.rb4
-rw-r--r--spec/features/profiles/account_spec.rb2
-rw-r--r--spec/features/profiles/oauth_applications_spec.rb55
-rw-r--r--spec/features/profiles/password_spec.rb2
-rw-r--r--spec/features/projects/blobs/blame_spec.rb39
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb1551
-rw-r--r--spec/features/projects/branches_spec.rb2
-rw-r--r--spec/features/projects/ci/secure_files_spec.rb4
-rw-r--r--spec/features/projects/commits/multi_view_diff_spec.rb26
-rw-r--r--spec/features/projects/deploy_keys_spec.rb26
-rw-r--r--spec/features/projects/diffs/diff_show_spec.rb4
-rw-r--r--spec/features/projects/environments_pod_logs_spec.rb68
-rw-r--r--spec/features/projects/files/dockerfile_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/gitignore_dropdown_spec.rb4
-rw-r--r--spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb12
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb8
-rw-r--r--spec/features/projects/jobs/permissions_spec.rb6
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb23
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb59
-rw-r--r--spec/features/projects/jobs_spec.rb76
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb4
-rw-r--r--spec/features/projects/navbar_spec.rb18
-rw-r--r--spec/features/projects/new_project_spec.rb57
-rw-r--r--spec/features/projects/pipelines/legacy_pipelines_spec.rb4
-rw-r--r--spec/features/projects/releases/user_creates_release_spec.rb21
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb20
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb1
-rw-r--r--spec/features/projects/settings/secure_files_settings_spec.rb46
-rw-r--r--spec/features/projects/settings/secure_files_spec.rb101
-rw-r--r--spec/features/projects/settings/visibility_settings_spec.rb6
-rw-r--r--spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_collaboration_links_spec.rb2
-rw-r--r--spec/features/projects/tags/user_edits_tags_spec.rb47
-rw-r--r--spec/features/projects/tracings_spec.rb60
-rw-r--r--spec/features/projects_spec.rb93
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb103
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb1201
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb213
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb45
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb63
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb80
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb46
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb34
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb42
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb72
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb377
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb86
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb74
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb226
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb4
-rw-r--r--spec/features/tags/developer_updates_tag_spec.rb2
-rw-r--r--spec/features/unsubscribe_links_spec.rb2
-rw-r--r--spec/features/users/email_verification_on_login_spec.rb357
-rw-r--r--spec/features/users/google_analytics_csp_spec.rb15
-rw-r--r--spec/features/users/login_spec.rb4
-rw-r--r--spec/features/users/show_spec.rb6
-rw-r--r--spec/finders/autocomplete/users_finder_spec.rb2
-rw-r--r--spec/finders/branches_finder_spec.rb8
-rw-r--r--spec/finders/ci/runners_finder_spec.rb61
-rw-r--r--spec/finders/contributed_projects_finder_spec.rb10
-rw-r--r--spec/finders/groups/user_groups_finder_spec.rb30
-rw-r--r--spec/finders/joined_groups_finder_spec.rb2
-rw-r--r--spec/finders/packages/conan/package_finder_spec.rb2
-rw-r--r--spec/finders/packages/group_packages_finder_spec.rb4
-rw-r--r--spec/finders/projects_finder_spec.rb29
-rw-r--r--spec/finders/user_recent_events_finder_spec.rb374
-rw-r--r--spec/fixtures/api/schemas/entities/commit.json2
-rw-r--r--spec/fixtures/api/schemas/entities/github/user.json2
-rw-r--r--spec/fixtures/api/schemas/entities/member.json2
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_basic.json1
-rw-r--r--spec/fixtures/api/schemas/entities/note_user_entity.json2
-rw-r--r--spec/fixtures/api/schemas/entities/user.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/commit/basic.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/deploy_key.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/environment.json5
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/metadata.json26
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/single_version.json55
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project_hook.json62
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project_hooks.json10
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/system_hook.json16
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/user/basic.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/user/public.json2
-rw-r--r--spec/fixtures/csv_complex.csv6
-rw-r--r--spec/fixtures/emails/service_desk.eml1
-rw-r--r--spec/fixtures/gitlab/git/gitattributes16
-rw-r--r--spec/fixtures/gitlab/git/gitattributes_invalidbin0 -> 16 bytes
-rw-r--r--spec/fixtures/gitlab/import_export/labels.tar.gzbin0 -> 768 bytes
-rw-r--r--spec/fixtures/glfm/example_snapshots/examples_index.yml2017
-rw-r--r--spec/fixtures/glfm/example_snapshots/html.yml7479
-rw-r--r--spec/fixtures/glfm/example_snapshots/markdown.yml2201
-rw-r--r--spec/fixtures/glfm/example_snapshots/prosemirror_json.yml17018
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json82
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/tree/project/releases.ndjson4
-rw-r--r--spec/fixtures/product_intelligence/survey_response_schema.json58
-rw-r--r--spec/frontend/__helpers__/init_vue_mr_page_helper.js4
-rw-r--r--spec/frontend/__helpers__/matchers/index.js1
-rw-r--r--spec/frontend/__helpers__/matchers/to_match_expected_for_markdown.js60
-rw-r--r--spec/frontend/__helpers__/mock_user_callout_dismisser.js15
-rw-r--r--spec/frontend/__helpers__/performance.js8
-rw-r--r--spec/frontend/__helpers__/set_window_location_helper.js2
-rw-r--r--spec/frontend/__helpers__/shared_test_setup.js1
-rw-r--r--spec/frontend/__helpers__/web_worker_fake.js3
-rw-r--r--spec/frontend/access_tokens/components/access_token_table_app_spec.js15
-rw-r--r--spec/frontend/access_tokens/components/projects_field_spec.js8
-rw-r--r--spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap2
-rw-r--r--spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js2
-rw-r--r--spec/frontend/admin/deploy_keys/components/table_spec.js29
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_form_spec.js8
-rw-r--r--spec/frontend/admin/signup_restrictions/mock_data.js8
-rw-r--r--spec/frontend/admin/signup_restrictions/utils_spec.js4
-rw-r--r--spec/frontend/admin/statistics_panel/components/app_spec.js2
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js6
-rw-r--r--spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap6
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_form_spec.js2
-rw-r--r--spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap1
-rw-r--r--spec/frontend/batch_comments/components/draft_note_spec.js11
-rw-r--r--spec/frontend/batch_comments/components/preview_dropdown_spec.js26
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js12
-rw-r--r--spec/frontend/behaviors/shortcuts/keybindings_spec.js4
-rw-r--r--spec/frontend/blob/viewer/index_spec.js189
-rw-r--r--spec/frontend/boards/components/board_card_spec.js38
-rw-r--r--spec/frontend/boards/components/config_toggle_spec.js59
-rw-r--r--spec/frontend/boards/components/toggle_focus_spec.js47
-rw-r--r--spec/frontend/boards/mock_data.js2
-rw-r--r--spec/frontend/boards/project_select_spec.js8
-rw-r--r--spec/frontend/ci_lint/mock_data.js11
-rw-r--r--spec/frontend/ci_secure_files/components/secure_files_list_spec.js41
-rw-r--r--spec/frontend/clusters_list/components/agent_table_spec.js8
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js4
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/code_block_spec.js25
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/formatting_spec.js24
-rw-r--r--spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js35
-rw-r--r--spec/frontend/content_editor/components/top_toolbar_spec.js10
-rw-r--r--spec/frontend/content_editor/extensions/html_nodes_spec.js42
-rw-r--r--spec/frontend/content_editor/extensions/paste_markdown_spec.js2
-rw-r--r--spec/frontend/content_editor/markdown_snapshot_spec.js10
-rw-r--r--spec/frontend/content_editor/markdown_snapshot_spec_helper.js102
-rw-r--r--spec/frontend/content_editor/remark_markdown_processing_spec.js619
-rw-r--r--spec/frontend/content_editor/render_html_and_json_for_all_examples.js115
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js28
-rw-r--r--spec/frontend/content_editor/services/markdown_sourcemap_spec.js2
-rw-r--r--spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js2
-rw-r--r--spec/frontend/cycle_analytics/path_navigation_spec.js2
-rw-r--r--spec/frontend/cycle_analytics/value_stream_metrics_spec.js3
-rw-r--r--spec/frontend/design_management/components/design_sidebar_spec.js61
-rw-r--r--spec/frontend/design_management/components/image_spec.js5
-rw-r--r--spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap4
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap67
-rw-r--r--spec/frontend/design_management/pages/index_spec.js78
-rw-r--r--spec/frontend/diffs/components/app_spec.js2
-rw-r--r--spec/frontend/diffs/components/collapsed_files_warning_spec.js4
-rw-r--r--spec/frontend/diffs/components/diff_code_quality_spec.js66
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js6
-rw-r--r--spec/frontend/diffs/components/diff_expansion_cell_spec.js177
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js4
-rw-r--r--spec/frontend/diffs/components/diff_line_note_form_spec.js289
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js3
-rw-r--r--spec/frontend/diffs/components/diff_stats_spec.js3
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js32
-rw-r--r--spec/frontend/diffs/components/settings_dropdown_spec.js9
-rw-r--r--spec/frontend/diffs/components/tree_list_spec.js32
-rw-r--r--spec/frontend/diffs/mock_data/diff_code_quality.js62
-rw-r--r--spec/frontend/diffs/mock_data/diff_file.js4
-rw-r--r--spec/frontend/diffs/store/actions_spec.js4
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js5
-rw-r--r--spec/frontend/diffs/store/utils_spec.js6
-rw-r--r--spec/frontend/diffs/utils/diff_file_spec.js6
-rw-r--r--spec/frontend/dirty_submit/dirty_submit_form_spec.js1
-rw-r--r--spec/frontend/emoji/awards_app/store/actions_spec.js12
-rw-r--r--spec/frontend/environment.js6
-rw-r--r--spec/frontend/environments/canary_update_modal_spec.js2
-rw-r--r--spec/frontend/environments/confirm_rollback_modal_spec.js15
-rw-r--r--spec/frontend/environments/deploy_board_component_spec.js8
-rw-r--r--spec/frontend/environments/environment_item_spec.js4
-rw-r--r--spec/frontend/environments/environment_table_spec.js3
-rw-r--r--spec/frontend/environments/environments_app_spec.js4
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js24
-rw-r--r--spec/frontend/fixtures/api_deploy_keys.rb1
-rw-r--r--spec/frontend/fixtures/blob.rb1
-rw-r--r--spec/frontend/fixtures/deploy_keys.rb6
-rw-r--r--spec/frontend/fixtures/jobs.rb98
-rw-r--r--spec/frontend/fixtures/runner.rb16
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js11
-rw-r--r--spec/frontend/gitlab_pages/new/pages/pages_pipeline_wizard_spec.js102
-rw-r--r--spec/frontend/google_cloud/components/app_spec.js77
-rw-r--r--spec/frontend/google_cloud/components/errors/gcp_error_spec.js34
-rw-r--r--spec/frontend/google_cloud/components/errors/no_gcp_projects_spec.js33
-rw-r--r--spec/frontend/google_cloud/components/google_cloud_menu_spec.js40
-rw-r--r--spec/frontend/google_cloud/components/home_spec.js66
-rw-r--r--spec/frontend/google_cloud/components/incubation_banner_spec.js21
-rw-r--r--spec/frontend/google_cloud/components/revoke_oauth_spec.js2
-rw-r--r--spec/frontend/google_cloud/configuration/panel_spec.js65
-rw-r--r--spec/frontend/google_cloud/databases/cloudsql/create_instance_form_spec.js103
-rw-r--r--spec/frontend/google_cloud/databases/cloudsql/instance_table_spec.js65
-rw-r--r--spec/frontend/google_cloud/databases/panel_spec.js36
-rw-r--r--spec/frontend/google_cloud/databases/service_table_spec.js44
-rw-r--r--spec/frontend/google_cloud/deployments/panel_spec.js46
-rw-r--r--spec/frontend/google_cloud/deployments/service_table_spec.js (renamed from spec/frontend/google_cloud/components/deployments_service_table_spec.js)4
-rw-r--r--spec/frontend/google_cloud/gcp_regions/form_spec.js (renamed from spec/frontend/google_cloud/components/gcp_regions_form_spec.js)4
-rw-r--r--spec/frontend/google_cloud/gcp_regions/list_spec.js (renamed from spec/frontend/google_cloud/components/gcp_regions_list_spec.js)4
-rw-r--r--spec/frontend/google_cloud/service_accounts/form_spec.js (renamed from spec/frontend/google_cloud/components/service_accounts_form_spec.js)4
-rw-r--r--spec/frontend/google_cloud/service_accounts/list_spec.js (renamed from spec/frontend/google_cloud/components/service_accounts_list_spec.js)4
-rw-r--r--spec/frontend/google_tag_manager/index_spec.js11
-rw-r--r--spec/frontend/groups/components/group_folder_spec.js79
-rw-r--r--spec/frontend/groups/components/group_item_spec.js101
-rw-r--r--spec/frontend/groups/components/group_name_and_path_spec.js17
-rw-r--r--spec/frontend/groups/components/groups_spec.js72
-rw-r--r--spec/frontend/groups/mock_data.js21
-rw-r--r--spec/frontend/header_search/components/app_spec.js272
-rw-r--r--spec/frontend/header_search/components/header_search_scoped_items_spec.js47
-rw-r--r--spec/frontend/header_search/init_spec.js74
-rw-r--r--spec/frontend/header_search/mock_data.js75
-rw-r--r--spec/frontend/header_search/store/getters_spec.js8
-rw-r--r--spec/frontend/header_spec.js24
-rw-r--r--spec/frontend/ide/components/commit_sidebar/empty_state_spec.js26
-rw-r--r--spec/frontend/ide/components/commit_sidebar/list_spec.js56
-rw-r--r--spec/frontend/ide/components/commit_sidebar/success_message_spec.js30
-rw-r--r--spec/frontend/ide/components/ide_spec.js3
-rw-r--r--spec/frontend/ide/components/ide_tree_list_spec.js78
-rw-r--r--spec/frontend/ide/components/nav_dropdown_button_spec.js73
-rw-r--r--spec/frontend/ide/components/new_dropdown/modal_spec.js470
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js12
-rw-r--r--spec/frontend/ide/ide_router_spec.js3
-rw-r--r--spec/frontend/ide/stores/actions/file_spec.js3
-rw-r--r--spec/frontend/ide/stores/actions/merge_request_spec.js3
-rw-r--r--spec/frontend/ide/stores/actions/tree_spec.js3
-rw-r--r--spec/frontend/ide/stores/actions_spec.js3
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js32
-rw-r--r--spec/frontend/integrations/edit/components/active_checkbox_spec.js14
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js57
-rw-r--r--spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js3
-rw-r--r--spec/frontend/invite_members/components/import_project_members_modal_spec.js (renamed from spec/frontend/invite_members/components/import_a_project_modal_spec.js)44
-rw-r--r--spec/frontend/invite_members/components/import_project_members_trigger_spec.js49
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js173
-rw-r--r--spec/frontend/invite_members/components/invite_modal_base_spec.js2
-rw-r--r--spec/frontend/invite_members/components/members_token_select_spec.js5
-rw-r--r--spec/frontend/invite_members/mock_data/member_modal.js10
-rw-r--r--spec/frontend/invite_members/utils/member_utils_spec.js12
-rw-r--r--spec/frontend/invite_members/utils/response_message_parser_spec.js33
-rw-r--r--spec/frontend/issuable/issuable_form_spec.js19
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js61
-rw-r--r--spec/frontend/issues/list/mock_data.js1
-rw-r--r--spec/frontend/issues/list/utils_spec.js79
-rw-r--r--spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap3
-rw-r--r--spec/frontend/issues/show/components/description_spec.js45
-rw-r--r--spec/frontend/issues/show/components/edited_spec.js83
-rw-r--r--spec/frontend/issues/show/components/incidents/highlight_bar_spec.js2
-rw-r--r--spec/frontend/issues/show/components/incidents/mock_data.js33
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js181
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js61
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js129
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js78
-rw-r--r--spec/frontend/issues/show/components/incidents/utils_spec.js31
-rw-r--r--spec/frontend/jobs/bridge/app_spec.js146
-rw-r--r--spec/frontend/jobs/bridge/components/empty_state_spec.js58
-rw-r--r--spec/frontend/jobs/bridge/components/sidebar_spec.js99
-rw-r--r--spec/frontend/jobs/bridge/mock_data.js102
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js7
-rw-r--r--spec/frontend/jobs/components/job_log_controllers_spec.js61
-rw-r--r--spec/frontend/jobs/components/job_sidebar_details_container_spec.js4
-rw-r--r--spec/frontend/jobs/components/jobs_container_spec.js2
-rw-r--r--spec/frontend/jobs/components/log/collapsible_section_spec.js9
-rw-r--r--spec/frontend/jobs/components/log/line_spec.js42
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js91
-rw-r--r--spec/frontend/jobs/components/log/mock_data.js74
-rw-r--r--spec/frontend/jobs/components/table/cells/actions_cell_spec.js41
-rw-r--r--spec/frontend/jobs/components/table/cells/job_cell_spec.js32
-rw-r--r--spec/frontend/jobs/components/table/job_table_app_spec.js43
-rw-r--r--spec/frontend/jobs/components/table/jobs_table_spec.js10
-rw-r--r--spec/frontend/jobs/mock_data.js772
-rw-r--r--spec/frontend/jobs/store/mutations_spec.js94
-rw-r--r--spec/frontend/jobs/store/utils_spec.js111
-rw-r--r--spec/frontend/lib/dompurify_spec.js23
-rw-r--r--spec/frontend/lib/gfm/index_spec.js71
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js22
-rw-r--r--spec/frontend/lib/utils/navigation_utility_spec.js2
-rw-r--r--spec/frontend/lib/utils/rails_ujs_spec.js2
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js13
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js370
-rw-r--r--spec/frontend/logs/components/log_advanced_filters_spec.js175
-rw-r--r--spec/frontend/logs/components/log_control_buttons_spec.js88
-rw-r--r--spec/frontend/logs/components/log_simple_filters_spec.js134
-rw-r--r--spec/frontend/logs/components/tokens/token_with_loading_state_spec.js71
-rw-r--r--spec/frontend/logs/mock_data.js71
-rw-r--r--spec/frontend/logs/stores/actions_spec.js521
-rw-r--r--spec/frontend/logs/stores/getters_spec.js75
-rw-r--r--spec/frontend/logs/stores/mutations_spec.js257
-rw-r--r--spec/frontend/merge_request_tabs_spec.js19
-rw-r--r--spec/frontend/milestones/components/delete_milestone_modal_spec.js137
-rw-r--r--spec/frontend/milestones/components/milestone_combobox_spec.js8
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap2
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_spec.js98
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js37
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js12
-rw-r--r--spec/frontend/new_branch_spec.js23
-rw-r--r--spec/frontend/notebook/cells/code_spec.js56
-rw-r--r--spec/frontend/notebook/cells/markdown_spec.js2
-rw-r--r--spec/frontend/notebook/cells/output/index_spec.js56
-rw-r--r--spec/frontend/notebook/cells/prompt_spec.js42
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js122
-rw-r--r--spec/frontend/notes/components/note_signed_out_widget_spec.js37
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js6
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js23
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js49
-rw-r--r--spec/frontend/notes/components/toggle_replies_widget_spec.js83
-rw-r--r--spec/frontend/notes/stores/actions_spec.js17
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js6
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js12
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js44
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js8
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js5
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/app_spec.js27
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js25
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js21
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js21
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap3
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js7
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js9
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js267
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_spec.js81
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js30
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/mock_data.js30
-rw-r--r--spec/frontend/packages_and_registries/shared/components/settings_block_spec.js43
-rw-r--r--spec/frontend/pages/dashboard/todos/index/todos_spec.js25
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js15
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap244
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_info_card_spec.js57
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js6
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_trial_card_spec.js12
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js8
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js6
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_content_spec.js8
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js35
-rw-r--r--spec/frontend/pdf/index_spec.js39
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js22
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js128
-rw-r--r--spec/frontend/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js43
-rw-r--r--spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js23
-rw-r--r--spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js259
-rw-r--r--spec/frontend/pipeline_editor/mock_data.js2
-rw-r--r--spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js110
-rw-r--r--spec/frontend/pipeline_wizard/pipeline_wizard_spec.js8
-rw-r--r--spec/frontend/pipeline_wizard/templates/pages_spec.js89
-rw-r--r--spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js6
-rw-r--r--spec/frontend/pipelines/components/jobs/jobs_app_spec.js9
-rw-r--r--spec/frontend/pipelines/graph/graph_component_wrapper_spec.js17
-rw-r--r--spec/frontend/pipelines/graph/graph_view_selector_spec.js60
-rw-r--r--spec/frontend/pipelines/graph/job_group_dropdown_spec.js2
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js2
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js242
-rw-r--r--spec/frontend/pipelines/header_component_spec.js4
-rw-r--r--spec/frontend/pipelines/performance_insights_modal_spec.js122
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js1
-rw-r--r--spec/frontend/pipelines/test_reports/stores/mutations_spec.js2
-rw-r--r--spec/frontend/pipelines/test_reports/test_case_details_spec.js6
-rw-r--r--spec/frontend/pipelines/test_reports/test_reports_spec.js26
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js30
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_table_spec.js16
-rw-r--r--spec/frontend/pipelines/utils_spec.js44
-rw-r--r--spec/frontend/projects/new/components/new_project_url_select_spec.js25
-rw-r--r--spec/frontend/projects/pipelines/charts/components/app_spec.js80
-rw-r--r--spec/frontend/projects/settings/components/new_access_dropdown_spec.js23
-rw-r--r--spec/frontend/protected_branches/protected_branch_edit_spec.js36
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js6
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap12
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js62
-rw-r--r--spec/frontend/releases/components/app_index_spec.js31
-rw-r--r--spec/frontend/releases/components/confirm_delete_modal_spec.js89
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js26
-rw-r--r--spec/frontend/releases/components/release_block_spec.js5
-rw-r--r--spec/frontend/releases/components/tag_field_spec.js8
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js158
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js36
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js23
-rw-r--r--spec/frontend/reports/components/grouped_issues_list_spec.js7
-rw-r--r--spec/frontend/reports/mock_data/new_failures_report.json2
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap2
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js11
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js290
-rw-r--r--spec/frontend/repository/components/table/row_spec.js9
-rw-r--r--spec/frontend/repository/log_tree_spec.js12
-rw-r--r--spec/frontend/repository/utils/commit_spec.js2
-rw-r--r--spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js29
-rw-r--r--spec/frontend/runner/admin_runners/admin_runners_app_spec.js227
-rw-r--r--spec/frontend/runner/components/cells/runner_actions_cell_spec.js4
-rw-r--r--spec/frontend/runner/components/runner_delete_button_spec.js4
-rw-r--r--spec/frontend/runner/components/runner_details_spec.js32
-rw-r--r--spec/frontend/runner/components/runner_filtered_search_bar_spec.js10
-rw-r--r--spec/frontend/runner/components/runner_list_spec.js4
-rw-r--r--spec/frontend/runner/components/runner_pause_button_spec.js24
-rw-r--r--spec/frontend/runner/components/runner_type_tabs_spec.js137
-rw-r--r--spec/frontend/runner/components/search_tokens/tag_token_spec.js3
-rw-r--r--spec/frontend/runner/components/stat/runner_count_spec.js148
-rw-r--r--spec/frontend/runner/components/stat/runner_stats_spec.js61
-rw-r--r--spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js213
-rw-r--r--spec/frontend/runner/group_runners/group_runners_app_spec.js211
-rw-r--r--spec/frontend/runner/mock_data.js212
-rw-r--r--spec/frontend/runner/runner_search_utils_spec.js212
-rw-r--r--spec/frontend/security_configuration/components/app_spec.js173
-rw-r--r--spec/frontend/security_configuration/components/training_provider_list_spec.js77
-rw-r--r--spec/frontend/security_configuration/mock_data.js11
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap2
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js22
-rw-r--r--spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js14
-rw-r--r--spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js7
-rw-r--r--spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js41
-rw-r--r--spec/frontend/sidebar/lock/issuable_lock_form_spec.js45
-rw-r--r--spec/frontend/sidebar/mock_data.js13
-rw-r--r--spec/frontend/snippets/components/edit_spec.js3
-rw-r--r--spec/frontend/snippets/components/show_spec.js5
-rw-r--r--spec/frontend/surveys/merge_request_performance/app_spec.js143
-rw-r--r--spec/frontend/tabs/index_spec.js105
-rw-r--r--spec/frontend/test_setup.js2
-rw-r--r--spec/frontend/tracking/tracking_spec.js2
-rw-r--r--spec/frontend/user_lists/store/edit/mutations_spec.js4
-rw-r--r--spec/frontend/user_lists/store/new/mutations_spec.js2
-rw-r--r--spec/frontend/user_popovers_spec.js26
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js5
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js71
-rw-r--r--spec/frontend/vue_mr_widget/extensions/test_report/utils_spec.js242
-rw-r--r--spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js6
-rw-r--r--spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js169
-rw-r--r--spec/frontend/vue_mr_widget/test_extensions.js33
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js52
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js50
-rw-r--r--spec/frontend/vue_shared/components/deployment_instance/mock_data.js1
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js143
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js146
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js283
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js282
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap23
-rw-r--r--spec/frontend/vue_shared/components/notes/placeholder_note_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/page_size_selector_spec.js44
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap4
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js2
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js33
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/utils/package_json_linker_spec.js15
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js31
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js73
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js23
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js2
-rw-r--r--spec/frontend/work_items/components/item_title_spec.js2
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js407
-rw-r--r--spec/frontend/work_items/components/work_item_detail_modal_spec.js10
-rw-r--r--spec/frontend/work_items/components/work_item_information_spec.js48
-rw-r--r--spec/frontend/work_items/components/work_item_labels_spec.js171
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js65
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js141
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_spec.js30
-rw-r--r--spec/frontend/work_items/components/work_item_weight_spec.js154
-rw-r--r--spec/frontend/work_items/mock_data.js292
-rw-r--r--spec/frontend/work_items/pages/create_work_item_spec.js52
-rw-r--r--spec/frontend/work_items/pages/work_item_detail_spec.js135
-rw-r--r--spec/frontend/work_items/pages/work_item_root_spec.js1
-rw-r--r--spec/frontend_integration/diffs/diffs_interopability_spec.js5
-rw-r--r--spec/frontend_integration/ide/ide_integration_spec.js4
-rw-r--r--spec/frontend_integration/ide/user_opens_file_spec.js2
-rw-r--r--spec/frontend_integration/ide/user_opens_ide_spec.js3
-rw-r--r--spec/frontend_integration/ide/user_opens_mr_spec.js3
-rw-r--r--spec/frontend_integration/snippets/snippets_notes_spec.js62
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/emojis.js9
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/index.js1
-rw-r--r--spec/graphql/mutations/design_management/delete_spec.rb31
-rw-r--r--spec/graphql/mutations/pages/mark_onboarding_complete_spec.rb57
-rw-r--r--spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/container_repositories_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/todos_resolver_spec.rb (renamed from spec/graphql/resolvers/todo_resolver_spec.rb)2
-rw-r--r--spec/graphql/resolvers/users/groups_resolver_spec.rb19
-rw-r--r--spec/graphql/types/ci/detailed_status_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/job_token_scope_type_spec.rb4
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb17
-rw-r--r--spec/graphql/types/ci/status_action_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/variable_type_enum_spec.rb9
-rw-r--r--spec/graphql/types/ci/variable_type_spec.rb11
-rw-r--r--spec/graphql/types/group_type_spec.rb2
-rw-r--r--spec/graphql/types/issue_type_enum_spec.rb4
-rw-r--r--spec/graphql/types/project_type_spec.rb3
-rw-r--r--spec/graphql/types/query_type_spec.rb1
-rw-r--r--spec/graphql/types/release_type_spec.rb3
-rw-r--r--spec/graphql/types/work_items/widget_interface_spec.rb1
-rw-r--r--spec/graphql/types/work_items/widgets/assignees_type_spec.rb11
-rw-r--r--spec/graphql/types/work_items/widgets/description_input_type_spec.rb9
-rw-r--r--spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb9
-rw-r--r--spec/helpers/avatars_helper_spec.rb70
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb4
-rw-r--r--spec/helpers/commits_helper_spec.rb4
-rw-r--r--spec/helpers/diff_helper_spec.rb61
-rw-r--r--spec/helpers/emails_helper_spec.rb2
-rw-r--r--spec/helpers/environments_helper_spec.rb1
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb2
-rw-r--r--spec/helpers/groups_helper_spec.rb4
-rw-r--r--spec/helpers/learn_gitlab_helper_spec.rb85
-rw-r--r--spec/helpers/namespace_storage_limit_alert_helper_spec.rb11
-rw-r--r--spec/helpers/nav/new_dropdown_helper_spec.rb2
-rw-r--r--spec/helpers/projects/pipeline_helper_spec.rb6
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb70
-rw-r--r--spec/helpers/projects_helper_spec.rb6
-rw-r--r--spec/helpers/releases_helper_spec.rb6
-rw-r--r--spec/helpers/search_helper_spec.rb2
-rw-r--r--spec/helpers/sessions_helper_spec.rb47
-rw-r--r--spec/helpers/storage_helper_spec.rb14
-rw-r--r--spec/helpers/todos_helper_spec.rb65
-rw-r--r--spec/helpers/tree_helper_spec.rb54
-rw-r--r--spec/helpers/users/callouts_helper_spec.rb56
-rw-r--r--spec/helpers/web_hooks/web_hooks_helper_spec.rb120
-rw-r--r--spec/initializers/00_connection_logger_spec.rb39
-rw-r--r--spec/initializers/0_log_deprecations_spec.rb18
-rw-r--r--spec/initializers/100_patch_omniauth_oauth2_spec.rb2
-rw-r--r--spec/initializers/1_acts_as_taggable_spec.rb64
-rw-r--r--spec/initializers/enumerator_next_patch_spec.rb167
-rw-r--r--spec/initializers/set_active_support_hash_digest_class_spec.rb9
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb26
-rw-r--r--spec/lib/api/entities/ci/job_request/service_spec.rb51
-rw-r--r--spec/lib/api/entities/deploy_key_spec.rb9
-rw-r--r--spec/lib/api/helpers/pagination_strategies_spec.rb15
-rw-r--r--spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb3
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb4
-rw-r--r--spec/lib/banzai/filter/footnote_filter_spec.rb13
-rw-r--r--spec/lib/banzai/reference_parser/snippet_parser_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb65
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb155
-rw-r--r--spec/lib/bulk_imports/retry_pipeline_error_spec.rb13
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb11
-rw-r--r--spec/lib/error_tracking/collector/dsn_spec.rb34
-rw-r--r--spec/lib/error_tracking/stacktrace_builder_spec.rb29
-rw-r--r--spec/lib/feature_spec.rb10
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb23
-rw-r--r--spec/lib/generators/model/mocks/migration_file.txt26
-rw-r--r--spec/lib/generators/model/mocks/model_file.txt2
-rw-r--r--spec/lib/generators/model/mocks/spec_file.txt5
-rw-r--r--spec/lib/generators/model/model_generator_spec.rb47
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb6
-rw-r--r--spec/lib/gitlab/application_rate_limiter/base_strategy_spec.rb17
-rw-r--r--spec/lib/gitlab/application_rate_limiter/increment_per_action_spec.rb51
-rw-r--r--spec/lib/gitlab/application_rate_limiter/increment_per_actioned_resource_spec.rb52
-rw-r--r--spec/lib/gitlab/application_rate_limiter_spec.rb60
-rw-r--r--spec/lib/gitlab/auth/ldap/user_spec.rb18
-rw-r--r--spec/lib/gitlab/auth_spec.rb11
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb54
-rw-r--r--spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb106
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb5
-rw-r--r--spec/lib/gitlab/background_migration/batched_migration_job_spec.rb64
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb138
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/dismissed_vulnerabilities_strategy_spec.rb119
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy_spec.rb67
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb22
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_inactive_public_projects_spec.rb81
-rw-r--r--spec/lib/gitlab/background_migration/populate_operation_visibility_permissions_from_operations_spec.rb80
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb30
-rw-r--r--spec/lib/gitlab/background_migration/set_correct_vulnerability_state_spec.rb81
-rw-r--r--spec/lib/gitlab/background_migration/update_delayed_project_removal_to_null_for_user_namespaces_spec.rb49
-rw-r--r--spec/lib/gitlab/bare_repository_import/importer_spec.rb9
-rw-r--r--spec/lib/gitlab/bare_repository_import/repository_spec.rb11
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb9
-rw-r--r--spec/lib/gitlab/changelog/config_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/build/duration_parser_spec.rb (renamed from spec/lib/gitlab/ci/build/artifacts/expire_in_parser_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/build/image_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb58
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb48
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/reports/test_report_spec.rb (renamed from spec/lib/gitlab/ci/reports/test_reports_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/runner/metrics_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/runner_releases_spec.rb196
-rw-r--r--spec/lib/gitlab/ci/runner_upgrade_check_spec.rb170
-rw-r--r--spec/lib/gitlab/ci/status/stage/factory_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/status/stage/play_manual_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/tags/bulk_insert_spec.rb70
-rw-r--r--spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/feature_flags_spec.rb80
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb94
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb13
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb10
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb11
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb6
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb63
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb49
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb61
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status_spec.rb77
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb27
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb19
-rw-r--r--spec/lib/gitlab/database/loose_foreign_keys_spec.rb13
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb29
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb24
-rw-r--r--spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb4
-rw-r--r--spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb32
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb2
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb8
-rw-r--r--spec/lib/gitlab/database_spec.rb2
-rw-r--r--spec/lib/gitlab/dependency_linker/base_linker_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb8
-rw-r--r--spec/lib/gitlab/diff/formatters/image_formatter_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/formatters/text_formatter_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb19
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb80
-rw-r--r--spec/lib/gitlab/diff/position_tracer/image_strategy_spec.rb113
-rw-r--r--spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb138
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb6
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb6
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/lines_spec.rb97
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/pods_spec.rb35
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb18
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb1
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb115
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing_spec.rb1
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb48
-rw-r--r--spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb436
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sanitizer_processor_spec.rb114
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb20
-rw-r--r--spec/lib/gitlab/git/attributes_at_ref_parser_spec.rb2
-rw-r--r--spec/lib/gitlab/git/attributes_parser_spec.rb14
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb16
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb25
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb75
-rw-r--r--spec/lib/gitlab/git/conflict/parser_spec.rb79
-rw-r--r--spec/lib/gitlab/git/object_pool_spec.rb12
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb266
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb56
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb3
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb17
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb60
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb58
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb61
-rw-r--r--spec/lib/gitlab/github_import/importer/events/closed_spec.rb72
-rw-r--r--spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb96
-rw-r--r--spec/lib/gitlab/github_import/importer/events/renamed_spec.rb68
-rw-r--r--spec/lib/gitlab/github_import/importer/events/reopened_spec.rb56
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb112
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb128
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb48
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_event_spec.rb156
-rw-r--r--spec/lib/gitlab/github_import/single_endpoint_notes_importing_spec.rb29
-rw-r--r--spec/lib/gitlab/gitlab_import/importer_spec.rb4
-rw-r--r--spec/lib/gitlab/gpg/commit_spec.rb261
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/response_logger_spec.rb31
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb6
-rw-r--r--spec/lib/gitlab/harbor/client_spec.rb269
-rw-r--r--spec/lib/gitlab/harbor/query_spec.rb375
-rw-r--r--spec/lib/gitlab/hash_digest/facade_spec.rb36
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb52
-rw-r--r--spec/lib/gitlab/http_connection_adapter_spec.rb12
-rw-r--r--spec/lib/gitlab/http_spec.rb56
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml6
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb23
-rw-r--r--spec/lib/gitlab/import_export/members_mapper_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb81
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/issuable/clone/attributes_rewriter_spec.rb93
-rw-r--r--spec/lib/gitlab/issuable/clone/copy_resource_events_service_spec.rb91
-rw-r--r--spec/lib/gitlab/jira_import/issue_serializer_spec.rb7
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb19
-rw-r--r--spec/lib/gitlab/lograge/custom_options_spec.rb17
-rw-r--r--spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb2
-rw-r--r--spec/lib/gitlab/memory/watchdog_spec.rb308
-rw-r--r--spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb47
-rw-r--r--spec/lib/gitlab/metrics/memory_spec.rb40
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/sli_spec.rb8
-rw-r--r--spec/lib/gitlab/pages/cache_control_spec.rb42
-rw-r--r--spec/lib/gitlab/pages/deployment_update_spec.rb140
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb16
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb24
-rw-r--r--spec/lib/gitlab/quick_actions/users_extractor_spec.rb93
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb22
-rw-r--r--spec/lib/gitlab/regex_spec.rb14
-rw-r--r--spec/lib/gitlab/security/scan_configuration_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb77
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb10
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb79
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking/incident_management_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb27
-rw-r--r--spec/lib/gitlab/tracking_spec.rb59
-rw-r--r--spec/lib/gitlab/tree_summary_spec.rb146
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb50
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb30
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb107
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb34
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb40
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb71
-rw-r--r--spec/lib/gitlab/user_access_spec.rb34
-rw-r--r--spec/lib/gitlab/version_info_spec.rb159
-rw-r--r--spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb29
-rw-r--r--spec/lib/gitlab/x509/certificate_spec.rb62
-rw-r--r--spec/lib/gitlab/x509/commit_spec.rb41
-rw-r--r--spec/lib/gitlab/x509/signature_spec.rb2
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb38
-rw-r--r--spec/lib/initializer_connections_spec.rb63
-rw-r--r--spec/lib/json_web_token/rsa_token_spec.rb2
-rw-r--r--spec/lib/learn_gitlab/onboarding_spec.rb35
-rw-r--r--spec/lib/sidebars/projects/menus/monitor_menu_spec.rb28
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb19
-rw-r--r--spec/lib/unnested_in_filters/dsl_spec.rb31
-rw-r--r--spec/lib/unnested_in_filters/rewriter_spec.rb157
-rw-r--r--spec/mailers/devise_mailer_spec.rb30
-rw-r--r--spec/mailers/emails/admin_notification_spec.rb18
-rw-r--r--spec/mailers/emails/in_product_marketing_spec.rb8
-rw-r--r--spec/mailers/emails/profile_spec.rb8
-rw-r--r--spec/mailers/notify_spec.rb17
-rw-r--r--spec/metrics_server/metrics_server_spec.rb23
-rw-r--r--spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb24
-rw-r--r--spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb22
-rw-r--r--spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb29
-rw-r--r--spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb41
-rw-r--r--spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb63
-rw-r--r--spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb32
-rw-r--r--spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb72
-rw-r--r--spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb72
-rw-r--r--spec/migrations/20220715163254_update_notes_in_past_spec.rb23
-rw-r--r--spec/migrations/change_public_projects_cost_factor_spec.rb68
-rw-r--r--spec/migrations/finalize_orphaned_routes_cleanup_spec.rb72
-rw-r--r--spec/migrations/populate_operation_visibility_permissions_spec.rb32
-rw-r--r--spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb55
-rw-r--r--spec/migrations/schedule_set_correct_vulnerability_state_spec.rb33
-rw-r--r--spec/models/ability_spec.rb32
-rw-r--r--spec/models/application_setting_spec.rb54
-rw-r--r--spec/models/authentication_event_spec.rb27
-rw-r--r--spec/models/awareness_session_spec.rb163
-rw-r--r--spec/models/ci/build_report_result_spec.rb6
-rw-r--r--spec/models/ci/build_spec.rb172
-rw-r--r--spec/models/ci/group_spec.rb2
-rw-r--r--spec/models/ci/group_variable_spec.rb6
-rw-r--r--spec/models/ci/legacy_stage_spec.rb268
-rw-r--r--spec/models/ci/pending_build_spec.rb58
-rw-r--r--spec/models/ci/pipeline_artifact_spec.rb74
-rw-r--r--spec/models/ci/pipeline_spec.rb287
-rw-r--r--spec/models/ci/processable_spec.rb4
-rw-r--r--spec/models/ci/runner_spec.rb147
-rw-r--r--spec/models/ci/runner_version_spec.rb53
-rw-r--r--spec/models/ci/stage_spec.rb28
-rw-r--r--spec/models/ci/variable_spec.rb6
-rw-r--r--spec/models/clusters/applications/elastic_stack_spec.rb177
-rw-r--r--spec/models/clusters/cluster_spec.rb17
-rw-r--r--spec/models/clusters/integrations/elastic_stack_spec.rb19
-rw-r--r--spec/models/clusters/integrations/prometheus_spec.rb26
-rw-r--r--spec/models/commit_status_spec.rb20
-rw-r--r--spec/models/concerns/awareness_spec.rb39
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb2
-rw-r--r--spec/models/concerns/cacheable_attributes_spec.rb7
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb9
-rw-r--r--spec/models/concerns/ci/bulk_insertable_tags_spec.rb66
-rw-r--r--spec/models/concerns/each_batch_spec.rb99
-rw-r--r--spec/models/concerns/loose_index_scan_spec.rb57
-rw-r--r--spec/models/concerns/participable_spec.rb10
-rw-r--r--spec/models/concerns/pg_full_text_searchable_spec.rb4
-rw-r--r--spec/models/concerns/require_email_verification_spec.rb104
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb1
-rw-r--r--spec/models/container_registry/event_spec.rb38
-rw-r--r--spec/models/container_repository_spec.rb23
-rw-r--r--spec/models/deployment_spec.rb148
-rw-r--r--spec/models/diff_note_spec.rb30
-rw-r--r--spec/models/environment_spec.rb28
-rw-r--r--spec/models/factories_spec.rb1
-rw-r--r--spec/models/group_spec.rb95
-rw-r--r--spec/models/hooks/project_hook_spec.rb71
-rw-r--r--spec/models/hooks/web_hook_spec.rb91
-rw-r--r--spec/models/incident_management/issuable_escalation_status_spec.rb4
-rw-r--r--spec/models/integration_spec.rb109
-rw-r--r--spec/models/integrations/base_chat_notification_spec.rb18
-rw-r--r--spec/models/integrations/chat_message/deployment_message_spec.rb2
-rw-r--r--spec/models/integrations/datadog_spec.rb16
-rw-r--r--spec/models/integrations/field_spec.rb73
-rw-r--r--spec/models/integrations/harbor_spec.rb8
-rw-r--r--spec/models/integrations/irker_spec.rb14
-rw-r--r--spec/models/integrations/jira_spec.rb2
-rw-r--r--spec/models/integrations/prometheus_spec.rb43
-rw-r--r--spec/models/integrations/slack_spec.rb2
-rw-r--r--spec/models/integrations/youtrack_spec.rb6
-rw-r--r--spec/models/issue_spec.rb76
-rw-r--r--spec/models/key_spec.rb105
-rw-r--r--spec/models/members/group_member_spec.rb6
-rw-r--r--spec/models/members/project_member_spec.rb30
-rw-r--r--spec/models/merge_request_diff_file_spec.rb153
-rw-r--r--spec/models/merge_request_diff_spec.rb97
-rw-r--r--spec/models/merge_request_spec.rb16
-rw-r--r--spec/models/namespace_spec.rb42
-rw-r--r--spec/models/note_spec.rb16
-rw-r--r--spec/models/oauth_access_token_spec.rb48
-rw-r--r--spec/models/operations/feature_flags_client_spec.rb70
-rw-r--r--spec/models/packages/cleanup/policy_spec.rb36
-rw-r--r--spec/models/packages/debian/file_entry_spec.rb7
-rw-r--r--spec/models/pages/virtual_domain_spec.rb21
-rw-r--r--spec/models/pages_domain_spec.rb29
-rw-r--r--spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb3
-rw-r--r--spec/models/project_export_job_spec.rb13
-rw-r--r--spec/models/project_import_state_spec.rb41
-rw-r--r--spec/models/project_setting_spec.rb11
-rw-r--r--spec/models/project_spec.rb60
-rw-r--r--spec/models/project_team_spec.rb10
-rw-r--r--spec/models/project_tracing_setting_spec.rb40
-rw-r--r--spec/models/projects/import_export/relation_export_spec.rb23
-rw-r--r--spec/models/projects/import_export/relation_export_upload_spec.rb25
-rw-r--r--spec/models/protected_branch_spec.rb8
-rw-r--r--spec/models/remote_mirror_spec.rb18
-rw-r--r--spec/models/repository_spec.rb64
-rw-r--r--spec/models/ssh_host_key_spec.rb41
-rw-r--r--spec/models/todo_spec.rb20
-rw-r--r--spec/models/tree_spec.rb3
-rw-r--r--spec/models/user_spec.rb593
-rw-r--r--spec/models/users/namespace_callout_spec.rb39
-rw-r--r--spec/models/wiki_page_spec.rb57
-rw-r--r--spec/models/work_item_spec.rb57
-rw-r--r--spec/models/work_items/parent_link_spec.rb18
-rw-r--r--spec/models/work_items/type_spec.rb4
-rw-r--r--spec/models/work_items/widgets/assignees_spec.rb31
-rw-r--r--spec/models/work_items/widgets/hierarchy_spec.rb43
-rw-r--r--spec/models/x509_certificate_spec.rb5
-rw-r--r--spec/models/x509_issuer_spec.rb5
-rw-r--r--spec/policies/environment_policy_spec.rb8
-rw-r--r--spec/policies/global_policy_spec.rb40
-rw-r--r--spec/policies/group_policy_spec.rb27
-rw-r--r--spec/policies/incident_management/timeline_event_policy_spec.rb60
-rw-r--r--spec/policies/issue_policy_spec.rb12
-rw-r--r--spec/policies/merge_request_policy_spec.rb62
-rw-r--r--spec/policies/namespace/root_storage_statistics_policy_spec.rb2
-rw-r--r--spec/policies/namespaces/user_namespace_policy_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb157
-rw-r--r--spec/policies/project_statistics_policy_spec.rb2
-rw-r--r--spec/policies/work_item_policy_spec.rb29
-rw-r--r--spec/presenters/blob_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/build_presenter_spec.rb82
-rw-r--r--spec/presenters/ci/legacy_stage_presenter_spec.rb47
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb80
-rw-r--r--spec/presenters/gitlab/blame_presenter_spec.rb3
-rw-r--r--spec/requests/admin/background_migrations_controller_spec.rb4
-rw-r--r--spec/requests/api/api_spec.rb4
-rw-r--r--spec/requests/api/award_emoji_spec.rb112
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb73
-rw-r--r--spec/requests/api/commits_spec.rb19
-rw-r--r--spec/requests/api/conan_instance_packages_spec.rb6
-rw-r--r--spec/requests/api/conan_project_packages_spec.rb6
-rw-r--r--spec/requests/api/environments_spec.rb83
-rw-r--r--spec/requests/api/events_spec.rb2
-rw-r--r--spec/requests/api/feature_flags_user_lists_spec.rb7
-rw-r--r--spec/requests/api/geo_spec.rb14
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb3
-rw-r--r--spec/requests/api/graphql/ci/group_variables_spec.rb67
-rw-r--r--spec/requests/api/graphql/ci/instance_variables_spec.rb60
-rw-r--r--spec/requests/api/graphql/ci/job_spec.rb4
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb6
-rw-r--r--spec/requests/api/graphql/ci/manual_variables_spec.rb95
-rw-r--r--spec/requests/api/graphql/ci/pipelines_spec.rb4
-rw-r--r--spec/requests/api/graphql/ci/project_variables_spec.rb67
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/stages_spec.rb2
-rw-r--r--spec/requests/api/graphql/container_repository/container_repository_details_spec.rb2
-rw-r--r--spec/requests/api/graphql/crm/contacts_spec.rb69
-rw-r--r--spec/requests/api/graphql/current_user/groups_query_spec.rb34
-rw-r--r--spec/requests/api/graphql/group/container_repositories_spec.rb2
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb2
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb2
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb2
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_manifests_spec.rb7
-rw-r--r--spec/requests/api/graphql/group/group_members_spec.rb48
-rw-r--r--spec/requests/api/graphql/mutations/issues/create_spec.rb36
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb14
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb17
-rw-r--r--spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/work_items/create_spec.rb89
-rw-r--r--spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb268
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb51
-rw-r--r--spec/requests/api/graphql/project/container_repositories_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb37
-rw-r--r--spec/requests/api/graphql/project/jobs_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb60
-rw-r--r--spec/requests/api/graphql/project/project_members_spec.rb48
-rw-r--r--spec/requests/api/graphql/todo_query_spec.rb50
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb98
-rw-r--r--spec/requests/api/group_export_spec.rb14
-rw-r--r--spec/requests/api/group_variables_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb14
-rw-r--r--spec/requests/api/integrations_spec.rb40
-rw-r--r--spec/requests/api/internal/base_spec.rb58
-rw-r--r--spec/requests/api/internal/error_tracking_spec.rb108
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb4
-rw-r--r--spec/requests/api/invitations_spec.rb40
-rw-r--r--spec/requests/api/issues/issues_spec.rb14
-rw-r--r--spec/requests/api/markdown_snapshot_spec.rb4
-rw-r--r--spec/requests/api/maven_packages_spec.rb40
-rw-r--r--spec/requests/api/metadata_spec.rb94
-rw-r--r--spec/requests/api/npm_project_packages_spec.rb14
-rw-r--r--spec/requests/api/project_attributes.yml6
-rw-r--r--spec/requests/api/project_export_spec.rb14
-rw-r--r--spec/requests/api/project_hooks_spec.rb247
-rw-r--r--spec/requests/api/project_import_spec.rb10
-rw-r--r--spec/requests/api/projects_spec.rb43
-rw-r--r--spec/requests/api/protected_tags_spec.rb7
-rw-r--r--spec/requests/api/pypi_packages_spec.rb15
-rw-r--r--spec/requests/api/repositories_spec.rb74
-rw-r--r--spec/requests/api/settings_spec.rb2
-rw-r--r--spec/requests/api/snippets_spec.rb38
-rw-r--r--spec/requests/api/system_hooks_spec.rb229
-rw-r--r--spec/requests/api/tags_spec.rb7
-rw-r--r--spec/requests/api/terraform/modules/v1/packages_spec.rb210
-rw-r--r--spec/requests/api/unleash_spec.rb42
-rw-r--r--spec/requests/api/users_spec.rb109
-rw-r--r--spec/requests/groups/harbor/artifacts_controller_spec.rb10
-rw-r--r--spec/requests/groups/harbor/repositories_controller_spec.rb65
-rw-r--r--spec/requests/groups/harbor/tags_controller_spec.rb10
-rw-r--r--spec/requests/jira_authorizations_spec.rb6
-rw-r--r--spec/requests/jira_connect/oauth_application_ids_controller_spec.rb53
-rw-r--r--spec/requests/jira_connect/subscriptions_controller_spec.rb29
-rw-r--r--spec/requests/jwks_controller_spec.rb6
-rw-r--r--spec/requests/oauth_tokens_spec.rb78
-rw-r--r--spec/requests/openid_connect_spec.rb14
-rw-r--r--spec/requests/projects/environments_controller_spec.rb2
-rw-r--r--spec/requests/projects/google_cloud/configuration_controller_spec.rb141
-rw-r--r--spec/requests/projects/google_cloud/databases_controller_spec.rb135
-rw-r--r--spec/requests/projects/google_cloud/deployments_controller_spec.rb27
-rw-r--r--spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb16
-rw-r--r--spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb12
-rw-r--r--spec/requests/projects/google_cloud/service_accounts_controller_spec.rb67
-rw-r--r--spec/requests/projects/google_cloud_controller_spec.rb178
-rw-r--r--spec/requests/projects/harbor/artifacts_controller_spec.rb10
-rw-r--r--spec/requests/projects/harbor/repositories_controller_spec.rb65
-rw-r--r--spec/requests/projects/harbor/tags_controller_spec.rb10
-rw-r--r--spec/requests/projects/issues_controller_spec.rb16
-rw-r--r--spec/requests/projects/pipelines_controller_spec.rb67
-rw-r--r--spec/requests/users_controller_spec.rb8
-rw-r--r--spec/requests/verifies_with_email_spec.rb234
-rw-r--r--spec/routing/group_routing_spec.rb12
-rw-r--r--spec/routing/project_routing_spec.rb24
-rw-r--r--spec/routing/routing_spec.rb20
-rw-r--r--spec/rubocop/cop/database/rescue_query_canceled_spec.rb29
-rw-r--r--spec/rubocop/cop/database/rescue_statement_timeout_spec.rb29
-rw-r--r--spec/rubocop/cop/gitlab/namespaced_class_spec.rb6
-rw-r--r--spec/scripts/determine-qa-tests_spec.rb109
-rw-r--r--spec/scripts/lib/glfm/update_example_snapshots_spec.rb47
-rw-r--r--spec/scripts/pipeline_test_report_builder_spec.rb48
-rw-r--r--spec/serializers/ci/dag_job_group_entity_spec.rb6
-rw-r--r--spec/serializers/ci/dag_pipeline_entity_spec.rb6
-rw-r--r--spec/serializers/ci/dag_stage_entity_spec.rb4
-rw-r--r--spec/serializers/ci/job_entity_spec.rb8
-rw-r--r--spec/serializers/ci/job_serializer_spec.rb32
-rw-r--r--spec/serializers/cluster_entity_spec.rb60
-rw-r--r--spec/serializers/cluster_serializer_spec.rb2
-rw-r--r--spec/serializers/diffs_entity_spec.rb1
-rw-r--r--spec/serializers/diffs_metadata_entity_spec.rb1
-rw-r--r--spec/serializers/environment_entity_spec.rb48
-rw-r--r--spec/serializers/integrations/field_entity_spec.rb2
-rw-r--r--spec/serializers/integrations/harbor_serializers/artifact_entity_spec.rb51
-rw-r--r--spec/serializers/integrations/harbor_serializers/artifact_serializer_spec.rb9
-rw-r--r--spec/serializers/integrations/harbor_serializers/repository_entity_spec.rb55
-rw-r--r--spec/serializers/integrations/harbor_serializers/repository_serializer_spec.rb9
-rw-r--r--spec/serializers/integrations/harbor_serializers/tag_entity_spec.rb38
-rw-r--r--spec/serializers/integrations/harbor_serializers/tag_serializer_spec.rb9
-rw-r--r--spec/serializers/issue_entity_spec.rb2
-rw-r--r--spec/serializers/member_user_entity_spec.rb4
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb1
-rw-r--r--spec/serializers/stage_entity_spec.rb10
-rw-r--r--spec/serializers/stage_serializer_spec.rb4
-rw-r--r--spec/serializers/test_reports_comparer_entity_spec.rb4
-rw-r--r--spec/serializers/test_reports_comparer_serializer_spec.rb4
-rw-r--r--spec/services/alert_management/alerts/update_service_spec.rb51
-rw-r--r--spec/services/ci/abort_pipelines_service_spec.rb8
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb46
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb11
-rw-r--r--spec/services/ci/ensure_stage_service_spec.rb2
-rw-r--r--spec/services/ci/generate_coverage_reports_service_spec.rb40
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb28
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb20
-rw-r--r--spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb42
-rw-r--r--spec/services/ci/play_manual_stage_service_spec.rb2
-rw-r--r--spec/services/ci/register_job_service_spec.rb60
-rw-r--r--spec/services/ci/retry_job_service_spec.rb4
-rw-r--r--spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb149
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb41
-rw-r--r--spec/services/ci/unlock_artifacts_service_spec.rb2
-rw-r--r--spec/services/ci/update_pending_build_service_spec.rb26
-rw-r--r--spec/services/clusters/applications/create_service_spec.rb23
-rw-r--r--spec/services/clusters/integrations/create_service_spec.rb1
-rw-r--r--spec/services/deployments/create_for_build_service_spec.rb1
-rw-r--r--spec/services/deployments/create_service_spec.rb37
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb2
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb2
-rw-r--r--spec/services/event_create_service_spec.rb69
-rw-r--r--spec/services/feature_flags/create_service_spec.rb4
-rw-r--r--spec/services/feature_flags/destroy_service_spec.rb4
-rw-r--r--spec/services/feature_flags/update_service_spec.rb4
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb21
-rw-r--r--spec/services/git/branch_push_service_spec.rb191
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb39
-rw-r--r--spec/services/git/tag_hooks_service_spec.rb2
-rw-r--r--spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb2
-rw-r--r--spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb158
-rw-r--r--spec/services/groups/destroy_service_spec.rb10
-rw-r--r--spec/services/groups/group_links/destroy_service_spec.rb56
-rw-r--r--spec/services/groups/transfer_service_spec.rb12
-rw-r--r--spec/services/groups/update_service_spec.rb12
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb54
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb6
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb15
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb6
-rw-r--r--spec/services/incident_management/timeline_events/create_service_spec.rb101
-rw-r--r--spec/services/incident_management/timeline_events/update_service_spec.rb6
-rw-r--r--spec/services/issuable/clone/attributes_rewriter_spec.rb140
-rw-r--r--spec/services/issues/clone_service_spec.rb51
-rw-r--r--spec/services/issues/close_service_spec.rb17
-rw-r--r--spec/services/issues/create_service_spec.rb25
-rw-r--r--spec/services/issues/import_csv_service_spec.rb24
-rw-r--r--spec/services/issues/move_service_spec.rb1
-rw-r--r--spec/services/issues/related_branches_service_spec.rb93
-rw-r--r--spec/services/issues/reopen_service_spec.rb33
-rw-r--r--spec/services/issues/update_service_spec.rb21
-rw-r--r--spec/services/members/create_service_spec.rb12
-rw-r--r--spec/services/members/creator_service_spec.rb4
-rw-r--r--spec/services/members/groups/creator_service_spec.rb6
-rw-r--r--spec/services/members/invite_member_builder_spec.rb44
-rw-r--r--spec/services/members/invite_service_spec.rb13
-rw-r--r--spec/services/members/projects/creator_service_spec.rb6
-rw-r--r--spec/services/members/standard_member_builder_spec.rb25
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb11
-rw-r--r--spec/services/merge_requests/create_service_spec.rb1
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb23
-rw-r--r--spec/services/merge_requests/update_service_spec.rb26
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb1
-rw-r--r--spec/services/notification_service_spec.rb83
-rw-r--r--spec/services/packages/cleanup/execute_policy_service_spec.rb163
-rw-r--r--spec/services/packages/debian/create_package_file_service_spec.rb8
-rw-r--r--spec/services/packages/debian/extract_changes_metadata_service_spec.rb6
-rw-r--r--spec/services/packages/debian/generate_distribution_service_spec.rb6
-rw-r--r--spec/services/packages/mark_package_files_for_destruction_service_spec.rb47
-rw-r--r--spec/services/packages/pypi/create_package_service_spec.rb15
-rw-r--r--spec/services/pages/delete_service_spec.rb6
-rw-r--r--spec/services/pod_logs/base_service_spec.rb147
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb309
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb310
-rw-r--r--spec/services/preview_markdown_service_spec.rb20
-rw-r--r--spec/services/projects/after_rename_service_spec.rb32
-rw-r--r--spec/services/projects/blame_service_spec.rb36
-rw-r--r--spec/services/projects/create_from_template_service_spec.rb2
-rw-r--r--spec/services/projects/create_service_spec.rb26
-rw-r--r--spec/services/projects/destroy_service_spec.rb29
-rw-r--r--spec/services/projects/fork_service_spec.rb21
-rw-r--r--spec/services/projects/group_links/update_service_spec.rb2
-rw-r--r--spec/services/projects/move_deploy_keys_projects_service_spec.rb17
-rw-r--r--spec/services/projects/operations/update_service_spec.rb88
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb2
-rw-r--r--spec/services/projects/transfer_service_spec.rb20
-rw-r--r--spec/services/projects/update_pages_service_spec.rb54
-rw-r--r--spec/services/projects/update_service_spec.rb44
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb57
-rw-r--r--spec/services/repositories/changelog_service_spec.rb19
-rw-r--r--spec/services/search_service_spec.rb17
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb53
-rw-r--r--spec/services/suggestions/apply_service_spec.rb3
-rw-r--r--spec/services/system_notes/incidents_service_spec.rb14
-rw-r--r--spec/services/terraform/states/trigger_destroy_service_spec.rb13
-rw-r--r--spec/services/todo_service_spec.rb4
-rw-r--r--spec/services/users/activity_service_spec.rb7
-rw-r--r--spec/services/web_hook_service_spec.rb68
-rw-r--r--spec/services/web_hooks/log_execution_service_spec.rb6
-rw-r--r--spec/services/work_items/create_and_link_service_spec.rb19
-rw-r--r--spec/services/work_items/create_from_task_service_spec.rb10
-rw-r--r--spec/services/work_items/create_service_spec.rb123
-rw-r--r--spec/services/work_items/delete_task_service_spec.rb2
-rw-r--r--spec/services/work_items/parent_links/create_service_spec.rb173
-rw-r--r--spec/services/work_items/parent_links/destroy_service_spec.rb47
-rw-r--r--spec/services/work_items/task_list_reference_removal_service_spec.rb7
-rw-r--r--spec/services/work_items/task_list_reference_replacement_service_spec.rb10
-rw-r--r--spec/services/work_items/update_service_spec.rb109
-rw-r--r--spec/services/work_items/widgets/description_service/update_service_spec.rb35
-rw-r--r--spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb160
-rw-r--r--spec/services/work_items/widgets/weight_service/update_service_spec.rb36
-rw-r--r--spec/simplecov_env.rb2
-rw-r--r--spec/spec_helper.rb25
-rw-r--r--spec/support/finder_collection.rb48
-rw-r--r--spec/support/finder_collection_allowlist.yml66
-rw-r--r--spec/support/gitlab_experiment.rb1
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml10
-rw-r--r--spec/support/graphql/arguments.rb4
-rw-r--r--spec/support/helpers/database/database_helpers.rb6
-rw-r--r--spec/support/helpers/detailed_error_helpers.rb19
-rw-r--r--spec/support/helpers/features/invite_members_modal_helper.rb44
-rw-r--r--spec/support/helpers/features/source_editor_spec_helpers.rb7
-rw-r--r--spec/support/helpers/features/web_ide_spec_helpers.rb14
-rw-r--r--spec/support/helpers/harbor_helper.rb27
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb18
-rw-r--r--spec/support/helpers/project_forks_helper.rb9
-rw-r--r--spec/support/helpers/project_helpers.rb2
-rw-r--r--spec/support/helpers/prometheus_helpers.rb7
-rw-r--r--spec/support/helpers/repo_helpers.rb76
-rw-r--r--spec/support/helpers/seed_helper.rb47
-rw-r--r--spec/support/helpers/stub_snowplow.rb (renamed from spec/support/stub_snowplow.rb)2
-rw-r--r--spec/support/helpers/test_env.rb58
-rw-r--r--spec/support/helpers/usage_data_helpers.rb2
-rw-r--r--spec/support/matchers/background_migrations_matchers.rb7
-rw-r--r--spec/support/matchers/event_store.rb11
-rw-r--r--spec/support/matchers/match_file.rb2
-rw-r--r--spec/support/services/issuable_import_csv_service_shared_examples.rb4
-rw-r--r--spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb7
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/markdown_snapshot_shared_examples.rb4
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb3
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/policies/project_policy_shared_context.rb5
-rw-r--r--spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb37
-rw-r--r--spec/support/shared_examples/csp.rb76
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb2
-rw-r--r--spec/support/shared_examples/features/inviting_members_shared_examples.rb70
-rw-r--r--spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/autocomplete_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/finders/issues_finder_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/graphql/mutations/work_items/update_description_widget_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb162
-rw-r--r--spec/support/shared_examples/harbor/container_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb172
-rw-r--r--spec/support/shared_examples/harbor/tags_controller_shared_examples.rb155
-rw-r--r--spec/support/shared_examples/integrations/integration_settings_form.rb5
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_language_filter_shared_examples.rb54
-rw-r--r--spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/merge_request_author_auto_assign_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/models/chat_integration_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb82
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/concerns/timebox_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/issuable_participants_shared_examples.rb53
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb124
-rw-r--r--spec/support/shared_examples/models/mentionable_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/quick_actions/issue/clone_quick_action_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/requests/api/debian_common_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/api/debian_distributions_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/hooks_shared_examples.rb415
-rw-r--r--spec/support/shared_examples/requests/api/notes_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/services/alert_management_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/services/feature_flags/client_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/views/themed_layout_examples.rb35
-rw-r--r--spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb37
-rw-r--r--spec/support/snowplow.rb16
-rw-r--r--spec/support_specs/graphql/arguments_spec.rb3
-rw-r--r--spec/support_specs/helpers/graphql_helpers_spec.rb1
-rw-r--r--spec/tasks/dev_rake_spec.rb11
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb39
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb13
-rw-r--r--spec/tooling/danger/project_helper_spec.rb3
-rw-r--r--spec/views/admin/application_settings/general.html.haml_spec.rb31
-rw-r--r--spec/views/dashboard/projects/_blank_state_welcome.html.haml_spec.rb17
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb38
-rw-r--r--spec/views/errors/omniauth_error.html.haml_spec.rb22
-rw-r--r--spec/views/groups/edit.html.haml_spec.rb2
-rw-r--r--spec/views/groups/group_members/index.html.haml_spec.rb12
-rw-r--r--spec/views/layouts/_flash.html.haml_spec.rb14
-rw-r--r--spec/views/layouts/application.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/devise.html.haml_spec.rb7
-rw-r--r--spec/views/layouts/devise_empty.html.haml_spec.rb7
-rw-r--r--spec/views/layouts/fullscreen.html.haml_spec.rb13
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb60
-rw-r--r--spec/views/layouts/signup_onboarding.html.haml_spec.rb7
-rw-r--r--spec/views/layouts/simple_registration.html.haml_spec.rb7
-rw-r--r--spec/views/layouts/terms.html.haml_spec.rb13
-rw-r--r--spec/views/projects/commits/_commit.html.haml_spec.rb2
-rw-r--r--spec/views/projects/issues/_issue.html.haml_spec.rb44
-rw-r--r--spec/views/projects/jobs/show.html.haml_spec.rb15
-rw-r--r--spec/views/projects/project_members/index.html.haml_spec.rb6
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb16
-rw-r--r--spec/views/projects/tracing/show.html.haml_spec.rb59
-rw-r--r--spec/views/shared/deploy_tokens/_form.html.haml_spec.rb2
-rw-r--r--spec/workers/build_finished_worker_spec.rb13
-rw-r--r--spec/workers/build_hooks_worker_spec.rb19
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb66
-rw-r--r--spec/workers/ci/archive_trace_worker_spec.rb17
-rw-r--r--spec/workers/ci/build_finished_worker_spec.rb13
-rw-r--r--spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb43
-rw-r--r--spec/workers/clusters/applications/activate_integration_worker_spec.rb9
-rw-r--r--spec/workers/clusters/applications/deactivate_integration_worker_spec.rb9
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb22
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb32
-rw-r--r--spec/workers/concerns/waitable_worker_spec.rb53
-rw-r--r--spec/workers/container_registry/migration/enqueuer_worker_spec.rb2
-rw-r--r--spec/workers/disallow_two_factor_for_group_worker_spec.rb2
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb8
-rw-r--r--spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb3
-rw-r--r--spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb51
-rw-r--r--spec/workers/gitlab/github_import/import_issue_worker_spec.rb3
-rw-r--r--spec/workers/gitlab/github_import/import_note_worker_spec.rb3
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb3
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb48
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb2
-rw-r--r--spec/workers/gitlab_service_ping_worker_spec.rb25
-rw-r--r--spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb50
-rw-r--r--spec/workers/incident_management/close_incident_worker_spec.rb65
-rw-r--r--spec/workers/integrations/execute_worker_spec.rb22
-rw-r--r--spec/workers/integrations/irker_worker_spec.rb6
-rw-r--r--spec/workers/loose_foreign_keys/cleanup_worker_spec.rb2
-rw-r--r--spec/workers/namespaces/onboarding_issue_created_worker_spec.rb2
-rw-r--r--spec/workers/packages/cleanup/execute_policy_worker_spec.rb160
-rw-r--r--spec/workers/packages/cleanup_package_file_worker_spec.rb5
-rw-r--r--spec/workers/packages/cleanup_package_registry_worker_spec.rb25
-rw-r--r--spec/workers/packages/debian/generate_distribution_worker_spec.rb6
-rw-r--r--spec/workers/packages/debian/process_changes_worker_spec.rb6
-rw-r--r--spec/workers/pages/invalidate_domain_cache_worker_spec.rb45
-rw-r--r--spec/workers/post_receive_spec.rb26
-rw-r--r--spec/workers/projects/after_import_worker_spec.rb13
-rw-r--r--spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb27
-rw-r--r--spec/workers/remove_expired_group_links_worker_spec.rb41
-rw-r--r--spec/workers/remove_expired_members_worker_spec.rb4
-rw-r--r--spec/workers/stage_update_worker_spec.rb2
-rw-r--r--spec/workers/web_hooks/destroy_worker_spec.rb64
1381 files changed, 44373 insertions, 50073 deletions
diff --git a/spec/channels/application_cable/connection_spec.rb b/spec/channels/application_cable/connection_spec.rb
index affde0095cf..f5b2cdd2fca 100644
--- a/spec/channels/application_cable/connection_spec.rb
+++ b/spec/channels/application_cable/connection_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_sessions do
context 'when user is logged in' do
let(:user) { create(:user) }
- let(:session_hash) { { 'warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]] } }
+ let(:session_hash) { { 'warden.user.user.key' => [[user.id], user.authenticatable_salt] } }
it 'sets current_user' do
connect
@@ -21,7 +21,7 @@ RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_sessions do
end
context 'with a stale password' do
- let(:partial_password_hash) { build(:user, password: 'some_old_password').encrypted_password[0, 29] }
+ let(:partial_password_hash) { build(:user, password: 'some_old_password').authenticatable_salt }
let(:session_hash) { { 'warden.user.user.key' => [[user.id], partial_password_hash] } }
it 'sets current_user to nil' do
diff --git a/spec/channels/awareness_channel_spec.rb b/spec/channels/awareness_channel_spec.rb
new file mode 100644
index 00000000000..8d6dc36f6bd
--- /dev/null
+++ b/spec/channels/awareness_channel_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AwarenessChannel, :clean_gitlab_redis_shared_state, type: :channel do
+ before do
+ stub_action_cable_connection(current_user: user)
+ end
+
+ context "with user" do
+ let(:user) { create(:user) }
+
+ describe "when no path parameter given" do
+ it "rejects subscription" do
+ subscribe path: nil
+
+ expect(subscription).to be_rejected
+ end
+ end
+
+ describe "with valid path parameter" do
+ it "successfully subscribes" do
+ subscribe path: "/test"
+
+ session = AwarenessSession.for("/test")
+
+ expect(subscription).to be_confirmed
+ # check if we can use session object instead
+ expect(subscription).to have_stream_from("awareness:#{session.to_param}")
+ end
+
+ it "broadcasts set of collaborators when subscribing" do
+ session = AwarenessSession.for("/test")
+
+ freeze_time do
+ collaborator = {
+ id: user.id,
+ name: user.name,
+ avatar_url: user.avatar_url(size: 36),
+ last_activity: Time.zone.now,
+ last_activity_humanized: ActionController::Base.helpers.distance_of_time_in_words(
+ Time.zone.now, Time.zone.now
+ )
+ }
+
+ expect do
+ subscribe path: "/test"
+ end.to have_broadcasted_to("awareness:#{session.to_param}")
+ .with(collaborators: [collaborator])
+ end
+ end
+
+ it "transmits payload when user is touched" do
+ subscribe path: "/test"
+
+ perform :touch
+
+ expect(transmissions.size).to be 1
+ end
+
+ it "unsubscribes from channel" do
+ subscribe path: "/test"
+ session = AwarenessSession.for("/test")
+
+ expect { subscription.unsubscribe_from_channel }
+ .to change { session.size}.by(-1)
+ end
+ end
+ end
+
+ context "with guest" do
+ let(:user) { nil }
+
+ it "rejects subscription" do
+ subscribe path: "/test"
+
+ expect(subscription).to be_rejected
+ end
+ end
+end
diff --git a/spec/components/pajamas/alert_component_spec.rb b/spec/components/pajamas/alert_component_spec.rb
index 1e2845c44a8..c60724c7b78 100644
--- a/spec/components/pajamas/alert_component_spec.rb
+++ b/spec/components/pajamas/alert_component_spec.rb
@@ -14,11 +14,11 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
it 'renders alert body' do
- expect(rendered_component).to have_content(body)
+ expect(page).to have_content(body)
end
it 'renders actions' do
- expect(rendered_component).to have_content(actions)
+ expect(page).to have_content(actions)
end
end
@@ -28,20 +28,20 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
it 'does not set a title' do
- expect(rendered_component).not_to have_selector('.gl-alert-title')
- expect(rendered_component).to have_selector('.gl-alert-icon-no-title')
+ expect(page).not_to have_selector('.gl-alert-title')
+ expect(page).to have_selector('.gl-alert-icon-no-title')
end
it 'renders the default variant' do
- expect(rendered_component).to have_selector('.gl-alert-info')
- expect(rendered_component).to have_selector("[data-testid='information-o-icon']")
- expect(rendered_component).not_to have_selector('.gl-alert-no-icon')
+ expect(page).to have_selector('.gl-alert-info')
+ expect(page).to have_selector("[data-testid='information-o-icon']")
+ expect(page).not_to have_selector('.gl-alert-no-icon')
end
it 'renders a dismiss button' do
- expect(rendered_component).to have_selector('.gl-dismiss-btn.js-close')
- expect(rendered_component).to have_selector("[data-testid='close-icon']")
- expect(rendered_component).not_to have_selector('.gl-alert-not-dismissible')
+ expect(page).to have_selector('.gl-dismiss-btn.js-close')
+ expect(page).to have_selector("[data-testid='close-icon']")
+ expect(page).not_to have_selector('.gl-alert-not-dismissible')
end
end
@@ -61,17 +61,17 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
it 'sets the title' do
- expect(rendered_component).to have_selector('.gl-alert-title')
- expect(rendered_component).to have_content('_title_')
- expect(rendered_component).not_to have_selector('.gl-alert-icon-no-title')
+ expect(page).to have_selector('.gl-alert-title')
+ expect(page).to have_content('_title_')
+ expect(page).not_to have_selector('.gl-alert-icon-no-title')
end
it 'sets the alert_class' do
- expect(rendered_component).to have_selector('._alert_class_')
+ expect(page).to have_selector('._alert_class_')
end
it 'sets the alert_data' do
- expect(rendered_component).to have_selector('[data-feature-id="_feature_id_"][data-dismiss-endpoint="_dismiss_endpoint_"]')
+ expect(page).to have_selector('[data-feature-id="_feature_id_"][data-dismiss-endpoint="_dismiss_endpoint_"]')
end
end
@@ -81,12 +81,12 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
it 'has the "not dismissible" class' do
- expect(rendered_component).to have_selector('.gl-alert-not-dismissible')
+ expect(page).to have_selector('.gl-alert-not-dismissible')
end
it 'does not render the dismiss button' do
- expect(rendered_component).not_to have_selector('.gl-dismiss-btn.js-close')
- expect(rendered_component).not_to have_selector("[data-testid='close-icon']")
+ expect(page).not_to have_selector('.gl-dismiss-btn.js-close')
+ expect(page).not_to have_selector("[data-testid='close-icon']")
end
end
@@ -96,12 +96,12 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
it 'has the hidden icon class' do
- expect(rendered_component).to have_selector('.gl-alert-no-icon')
+ expect(page).to have_selector('.gl-alert-no-icon')
end
it 'does not render the icon' do
- expect(rendered_component).not_to have_selector('.gl-alert-icon')
- expect(rendered_component).not_to have_selector("[data-testid='information-o-icon']")
+ expect(page).not_to have_selector('.gl-alert-icon')
+ expect(page).not_to have_selector("[data-testid='information-o-icon']")
end
end
@@ -118,13 +118,13 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
it 'does not have "not dismissible" class' do
- expect(rendered_component).not_to have_selector('.gl-alert-not-dismissible')
+ expect(page).not_to have_selector('.gl-alert-not-dismissible')
end
it 'renders a dismiss button and data' do
- expect(rendered_component).to have_selector('.gl-dismiss-btn.js-close._close_button_class_')
- expect(rendered_component).to have_selector("[data-testid='close-icon']")
- expect(rendered_component).to have_selector('[data-testid="_close_button_testid_"]')
+ expect(page).to have_selector('.gl-dismiss-btn.js-close._close_button_class_')
+ expect(page).to have_selector("[data-testid='close-icon']")
+ expect(page).to have_selector('[data-testid="_close_button_testid_"]')
end
end
@@ -137,8 +137,8 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
with_them do
it 'renders the variant' do
- expect(rendered_component).to have_selector(".gl-alert-#{variant}")
- expect(rendered_component).to have_selector("[data-testid='#{described_class::ICONS[variant]}-icon']")
+ expect(page).to have_selector(".gl-alert-#{variant}")
+ expect(page).to have_selector("[data-testid='#{described_class::ICONS[variant]}-icon']")
end
end
end
diff --git a/spec/components/pajamas/banner_component_spec.rb b/spec/components/pajamas/banner_component_spec.rb
index 5969f06dbad..26468d80c77 100644
--- a/spec/components/pajamas/banner_component_spec.rb
+++ b/spec/components/pajamas/banner_component_spec.rb
@@ -19,22 +19,22 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
end
it 'renders its content' do
- expect(rendered_component).to have_text content
+ expect(page).to have_text content
end
it 'renders its title' do
- expect(rendered_component).to have_css "h1[class='gl-banner-title']", text: title
+ expect(page).to have_css "h1[class='gl-banner-title']", text: title
end
it 'renders a close button' do
- expect(rendered_component).to have_css "button.gl-banner-close"
+ expect(page).to have_css "button.gl-banner-close"
end
describe 'button_text and button_link' do
let(:options) { { button_text: 'Learn more', button_link: '/learn-more' } }
it 'define the primary action' do
- expect(rendered_component).to have_css "a.btn-confirm.gl-button[href='/learn-more']", text: 'Learn more'
+ expect(page).to have_css "a.btn-confirm.gl-button[href='/learn-more']", text: 'Learn more'
end
end
@@ -42,14 +42,14 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
let(:options) { { banner_options: { class: "baz", data: { foo: "bar" } } } }
it 'are on the banner' do
- expect(rendered_component).to have_css ".gl-banner.baz[data-foo='bar']"
+ expect(page).to have_css ".gl-banner.baz[data-foo='bar']"
end
context 'with custom classes' do
let(:options) { { variant: :introduction, banner_options: { class: 'extra special' } } }
it 'don\'t conflict with internal banner_classes' do
- expect(rendered_component).to have_css '.extra.special.gl-banner-introduction.gl-banner'
+ expect(page).to have_css '.extra.special.gl-banner-introduction.gl-banner'
end
end
end
@@ -58,14 +58,14 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
let(:options) { { close_options: { class: "js-foo", data: { uid: "123" } } } }
it 'are on the close button' do
- expect(rendered_component).to have_css "button.gl-banner-close.js-foo[data-uid='123']"
+ expect(page).to have_css "button.gl-banner-close.js-foo[data-uid='123']"
end
end
describe 'embedded' do
context 'by default (false)' do
it 'keeps the banner\'s borders' do
- expect(rendered_component).not_to have_css ".gl-banner.gl-border-none"
+ expect(page).not_to have_css ".gl-banner.gl-border-none"
end
end
@@ -73,7 +73,7 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
let(:options) { { embedded: true } }
it 'removes the banner\'s borders' do
- expect(rendered_component).to have_css ".gl-banner.gl-border-none"
+ expect(page).to have_css ".gl-banner.gl-border-none"
end
end
end
@@ -81,7 +81,7 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
describe 'variant' do
context 'by default (promotion)' do
it 'applies no variant class' do
- expect(rendered_component).to have_css "[class='gl-banner']"
+ expect(page).to have_css "[class='gl-banner']"
end
end
@@ -89,11 +89,11 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
let(:options) { { variant: :introduction } }
it "applies the introduction class to the banner" do
- expect(rendered_component).to have_css ".gl-banner.gl-banner-introduction"
+ expect(page).to have_css ".gl-banner.gl-banner-introduction"
end
it "applies the confirm class to the close button" do
- expect(rendered_component).to have_css ".gl-banner-close.btn-confirm.btn-confirm-tertiary"
+ expect(page).to have_css ".gl-banner-close.btn-confirm.btn-confirm-tertiary"
end
end
@@ -101,21 +101,21 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
let(:options) { { variant: :foobar } }
it 'ignores the unknown variant' do
- expect(rendered_component).to have_css "[class='gl-banner']"
+ expect(page).to have_css "[class='gl-banner']"
end
end
end
describe 'illustration' do
it 'has none by default' do
- expect(rendered_component).not_to have_css ".gl-banner-illustration"
+ expect(page).not_to have_css ".gl-banner-illustration"
end
context 'with svg_path' do
let(:options) { { svg_path: 'logo.svg' } }
it 'renders an image as illustration' do
- expect(rendered_component).to have_css ".gl-banner-illustration img"
+ expect(page).to have_css ".gl-banner-illustration img"
end
end
end
@@ -131,15 +131,15 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
end
it 'renders the slot content as illustration' do
- expect(rendered_component).to have_css ".gl-banner-illustration svg"
+ expect(page).to have_css ".gl-banner-illustration svg"
end
context 'and conflicting svg_path' do
let(:options) { { svg_path: 'logo.svg' } }
it 'uses the slot content' do
- expect(rendered_component).to have_css ".gl-banner-illustration svg"
- expect(rendered_component).not_to have_css ".gl-banner-illustration img"
+ expect(page).to have_css ".gl-banner-illustration svg"
+ expect(page).not_to have_css ".gl-banner-illustration img"
end
end
end
@@ -154,15 +154,15 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
end
it 'renders the slot content as the primary action' do
- expect(rendered_component).to have_css "a.special", text: 'Special'
+ expect(page).to have_css "a.special", text: 'Special'
end
context 'and conflicting button_text and button_link' do
let(:options) { { button_text: 'Not special', button_link: '/' } }
it 'uses the slot content' do
- expect(rendered_component).to have_css "a.special[href='#']", text: 'Special'
- expect(rendered_component).not_to have_css "a.btn[href='/']"
+ expect(page).to have_css "a.special[href='#']", text: 'Special'
+ expect(page).not_to have_css "a.btn[href='/']"
end
end
end
diff --git a/spec/components/pajamas/button_component_spec.rb b/spec/components/pajamas/button_component_spec.rb
index 60c2a2e5a06..a8c96042580 100644
--- a/spec/components/pajamas/button_component_spec.rb
+++ b/spec/components/pajamas/button_component_spec.rb
@@ -17,25 +17,25 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
end
it 'renders its content' do
- expect(rendered_component).to have_text content
+ expect(page).to have_text content
end
it 'adds default styling' do
- expect(rendered_component).to have_css ".btn.btn-default.btn-md.gl-button"
+ expect(page).to have_css ".btn.btn-default.btn-md.gl-button"
end
describe 'button_options' do
let(:options) { { button_options: { id: 'baz', data: { foo: 'bar' } } } }
it 'are added to the button' do
- expect(rendered_component).to have_css ".gl-button#baz[data-foo='bar']"
+ expect(page).to have_css ".gl-button#baz[data-foo='bar']"
end
context 'with custom classes' do
let(:options) { { variant: :danger, category: :tertiary, button_options: { class: 'custom-class' } } }
it 'don\'t conflict with internal button_classes' do
- expect(rendered_component).to have_css '.gl-button.btn-danger.btn-danger-tertiary.custom-class'
+ expect(page).to have_css '.gl-button.btn-danger.btn-danger-tertiary.custom-class'
end
end
@@ -43,7 +43,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { button_options: { type: 'submit' } } }
it 'overrides type' do
- expect(rendered_component).to have_css '[type="submit"]'
+ expect(page).to have_css '[type="submit"]'
end
end
end
@@ -52,14 +52,14 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { button_text_classes: 'custom-text-class' } }
it 'is added to the button text' do
- expect(rendered_component).to have_css ".gl-button-text.custom-text-class"
+ expect(page).to have_css ".gl-button-text.custom-text-class"
end
end
describe 'disabled' do
context 'by default (false)' do
it 'does not have disabled styling and behavior' do
- expect(rendered_component).not_to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
+ expect(page).not_to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
end
end
@@ -67,7 +67,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { disabled: true } }
it 'has disabled styling and behavior' do
- expect(rendered_component).to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
+ expect(page).to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
end
end
end
@@ -75,11 +75,11 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
describe 'loading' do
context 'by default (false)' do
it 'is not disabled' do
- expect(rendered_component).not_to have_css ".disabled[disabled='disabled']"
+ expect(page).not_to have_css ".disabled[disabled='disabled']"
end
it 'does not render a spinner' do
- expect(rendered_component).not_to have_css ".gl-spinner[aria-label='Loading']"
+ expect(page).not_to have_css ".gl-spinner[aria-label='Loading']"
end
end
@@ -87,11 +87,11 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { loading: true } }
it 'is disabled' do
- expect(rendered_component).to have_css ".disabled[disabled='disabled']"
+ expect(page).to have_css ".disabled[disabled='disabled']"
end
it 'renders a spinner' do
- expect(rendered_component).to have_css ".gl-spinner[aria-label='Loading']"
+ expect(page).to have_css ".gl-spinner[aria-label='Loading']"
end
end
end
@@ -99,7 +99,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
describe 'block' do
context 'by default (false)' do
it 'is inline' do
- expect(rendered_component).not_to have_css ".btn-block"
+ expect(page).not_to have_css ".btn-block"
end
end
@@ -107,7 +107,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { block: true } }
it 'is block element' do
- expect(rendered_component).to have_css ".btn-block"
+ expect(page).to have_css ".btn-block"
end
end
end
@@ -115,7 +115,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
describe 'selected' do
context 'by default (false)' do
it 'does not have selected styling and behavior' do
- expect(rendered_component).not_to have_css ".selected"
+ expect(page).not_to have_css ".selected"
end
end
@@ -123,7 +123,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { selected: true } }
it 'has selected styling and behavior' do
- expect(rendered_component).to have_css ".selected"
+ expect(page).to have_css ".selected"
end
end
end
@@ -136,8 +136,8 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
with_them do
it 'renders the button in correct variant && category' do
- expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
- expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}-tertiary")
+ expect(page).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
+ expect(page).to have_css(".#{described_class::VARIANT_CLASSES[variant]}-tertiary")
end
end
end
@@ -149,8 +149,8 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
with_them do
it 'renders the button in correct variant && category' do
- expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
- expect(rendered_component).not_to have_css(".#{described_class::VARIANT_CLASSES[variant]}-tertiary")
+ expect(page).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
+ expect(page).not_to have_css(".#{described_class::VARIANT_CLASSES[variant]}-tertiary")
end
end
end
@@ -162,8 +162,8 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
with_them do
it 'renders the button in correct variant && category' do
- expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
- expect(rendered_component).not_to have_css(".#{described_class::VARIANT_CLASSES[variant]}-primary")
+ expect(page).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
+ expect(page).not_to have_css(".#{described_class::VARIANT_CLASSES[variant]}-primary")
end
end
end
@@ -172,7 +172,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
describe 'size' do
context 'by default (medium)' do
it 'applies medium class' do
- expect(rendered_component).to have_css ".btn-md"
+ expect(page).to have_css ".btn-md"
end
end
@@ -180,22 +180,22 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { size: :small } }
it "applies the small class to the button" do
- expect(rendered_component).to have_css ".btn-sm"
+ expect(page).to have_css ".btn-sm"
end
end
end
describe 'icon' do
it 'has none by default' do
- expect(rendered_component).not_to have_css ".gl-icon"
+ expect(page).not_to have_css ".gl-icon"
end
context 'with icon' do
let(:options) { { icon: 'star-o', icon_classes: 'custom-icon' } }
it 'renders an icon with custom CSS class' do
- expect(rendered_component).to have_css "svg.gl-icon.gl-button-icon.custom-icon[data-testid='star-o-icon']"
- expect(rendered_component).not_to have_css ".btn-icon"
+ expect(page).to have_css "svg.gl-icon.gl-button-icon.custom-icon[data-testid='star-o-icon']"
+ expect(page).not_to have_css ".btn-icon"
end
end
@@ -204,7 +204,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { icon: 'star-o' } }
it 'adds a "btn-icon" CSS class' do
- expect(rendered_component).to have_css ".btn.btn-icon"
+ expect(page).to have_css ".btn.btn-icon"
end
end
@@ -213,8 +213,8 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { icon: 'star-o', loading: true } }
it 'renders only a loading icon' do
- expect(rendered_component).not_to have_css "svg.gl-icon.gl-button-icon.custom-icon[data-testid='star-o-icon']"
- expect(rendered_component).to have_css ".gl-spinner[aria-label='Loading']"
+ expect(page).not_to have_css "svg.gl-icon.gl-button-icon.custom-icon[data-testid='star-o-icon']"
+ expect(page).to have_css ".gl-spinner[aria-label='Loading']"
end
end
end
@@ -222,7 +222,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
describe 'type' do
context 'by default (without href)' do
it 'has type "button"' do
- expect(rendered_component).to have_css "button[type='button']"
+ expect(page).to have_css "button[type='button']"
end
end
@@ -233,7 +233,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
with_them do
it 'has the correct type' do
- expect(rendered_component).to have_css "button[type='#{type}']"
+ expect(page).to have_css "button[type='#{type}']"
end
end
end
@@ -242,7 +242,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { type: :madeup } }
it 'has type "button"' do
- expect(rendered_component).to have_css "button[type='button']"
+ expect(page).to have_css "button[type='button']"
end
end
@@ -250,22 +250,22 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { href: 'https://example.com', type: :reset } }
it 'ignores type' do
- expect(rendered_component).not_to have_css "[type]"
+ expect(page).not_to have_css "[type]"
end
end
end
describe 'link button' do
it 'renders a button tag with type="button" when "href" is not set' do
- expect(rendered_component).to have_css "button[type='button']"
+ expect(page).to have_css "button[type='button']"
end
context 'when "href" is provided' do
let(:options) { { href: 'https://gitlab.com', target: '_blank' } }
it "renders a link instead of the button" do
- expect(rendered_component).not_to have_css "button[type='button']"
- expect(rendered_component).to have_css "a[href='https://gitlab.com'][target='_blank']"
+ expect(page).not_to have_css "button[type='button']"
+ expect(page).to have_css "a[href='https://gitlab.com'][target='_blank']"
end
end
end
diff --git a/spec/components/pajamas/card_component_spec.rb b/spec/components/pajamas/card_component_spec.rb
index 65522a9023f..38d23cfca9c 100644
--- a/spec/components/pajamas/card_component_spec.rb
+++ b/spec/components/pajamas/card_component_spec.rb
@@ -16,15 +16,15 @@ RSpec.describe Pajamas::CardComponent, :aggregate_failures, type: :component do
end
it 'renders card header' do
- expect(rendered_component).to have_content(header)
+ expect(page).to have_content(header)
end
it 'renders card body' do
- expect(rendered_component).to have_content(body)
+ expect(page).to have_content(body)
end
it 'renders footer' do
- expect(rendered_component).to have_content(footer)
+ expect(page).to have_content(footer)
end
end
@@ -34,13 +34,13 @@ RSpec.describe Pajamas::CardComponent, :aggregate_failures, type: :component do
end
it 'does not have a header or footer' do
- expect(rendered_component).not_to have_selector('.gl-card-header')
- expect(rendered_component).not_to have_selector('.gl-card-footer')
+ expect(page).not_to have_selector('.gl-card-header')
+ expect(page).not_to have_selector('.gl-card-footer')
end
it 'renders the card and body' do
- expect(rendered_component).to have_selector('.gl-card')
- expect(rendered_component).to have_selector('.gl-card-body')
+ expect(page).to have_selector('.gl-card')
+ expect(page).to have_selector('.gl-card-body')
end
end
@@ -58,23 +58,23 @@ RSpec.describe Pajamas::CardComponent, :aggregate_failures, type: :component do
end
it 'renders card options' do
- expect(rendered_component).to have_selector('._card_class_')
- expect(rendered_component).to have_selector('[data-testid="_card_testid_"]')
+ expect(page).to have_selector('._card_class_')
+ expect(page).to have_selector('[data-testid="_card_testid_"]')
end
it 'renders header options' do
- expect(rendered_component).to have_selector('._header_class_')
- expect(rendered_component).to have_selector('[data-testid="_header_testid_"]')
+ expect(page).to have_selector('._header_class_')
+ expect(page).to have_selector('[data-testid="_header_testid_"]')
end
it 'renders body options' do
- expect(rendered_component).to have_selector('._body_class_')
- expect(rendered_component).to have_selector('[data-testid="_body_testid_"]')
+ expect(page).to have_selector('._body_class_')
+ expect(page).to have_selector('[data-testid="_body_testid_"]')
end
it 'renders footer options' do
- expect(rendered_component).to have_selector('._footer_class_')
- expect(rendered_component).to have_selector('[data-testid="_footer_testid_"]')
+ expect(page).to have_selector('._footer_class_')
+ expect(page).to have_selector('[data-testid="_footer_testid_"]')
end
end
end
diff --git a/spec/components/pajamas/checkbox_component_spec.rb b/spec/components/pajamas/checkbox_component_spec.rb
index b2f3a84fbfe..d79c537a30e 100644
--- a/spec/components/pajamas/checkbox_component_spec.rb
+++ b/spec/components/pajamas/checkbox_component_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Pajamas::CheckboxComponent, :aggregate_failures, type: :component
RSpec.shared_examples 'it renders unchecked checkbox with value of `1`' do
it 'renders unchecked checkbox with value of `1`' do
- expect(rendered_component).to have_unchecked_field(label, with: '1')
+ expect(page).to have_unchecked_field(label, with: '1')
end
end
@@ -31,7 +31,7 @@ RSpec.describe Pajamas::CheckboxComponent, :aggregate_failures, type: :component
include_examples 'it does not render help text'
it 'renders hidden input with value of `0`' do
- expect(rendered_component).to have_field('user[view_diffs_file_by_file]', type: 'hidden', with: '0')
+ expect(page).to have_field('user[view_diffs_file_by_file]', type: 'hidden', with: '0')
end
end
@@ -61,15 +61,15 @@ RSpec.describe Pajamas::CheckboxComponent, :aggregate_failures, type: :component
include_examples 'it renders help text'
it 'renders checked checkbox with value of `yes`' do
- expect(rendered_component).to have_checked_field(label, with: checked_value, class: checkbox_options[:class])
+ expect(page).to have_checked_field(label, with: checked_value, class: checkbox_options[:class])
end
it 'adds CSS class to label' do
- expect(rendered_component).to have_selector('label.label-foo-bar')
+ expect(page).to have_selector('label.label-foo-bar')
end
it 'renders hidden input with value of `no`' do
- expect(rendered_component).to have_field('user[view_diffs_file_by_file]', type: 'hidden', with: unchecked_value)
+ expect(page).to have_field('user[view_diffs_file_by_file]', type: 'hidden', with: unchecked_value)
end
end
diff --git a/spec/components/pajamas/radio_component_spec.rb b/spec/components/pajamas/radio_component_spec.rb
index 3885d101c7a..8df432746d0 100644
--- a/spec/components/pajamas/radio_component_spec.rb
+++ b/spec/components/pajamas/radio_component_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Pajamas::RadioComponent, :aggregate_failures, type: :component do
RSpec.shared_examples 'it renders unchecked radio' do
it 'renders unchecked radio' do
- expect(rendered_component).to have_unchecked_field(label)
+ expect(page).to have_unchecked_field(label)
end
end
@@ -58,11 +58,11 @@ RSpec.describe Pajamas::RadioComponent, :aggregate_failures, type: :component do
include_examples 'it renders help text'
it 'renders checked radio' do
- expect(rendered_component).to have_checked_field(label, class: radio_options[:class])
+ expect(page).to have_checked_field(label, class: radio_options[:class])
end
it 'adds CSS class to label' do
- expect(rendered_component).to have_selector('label.label-foo-bar')
+ expect(page).to have_selector('label.label-foo-bar')
end
end
diff --git a/spec/components/pajamas/spinner_component_spec.rb b/spec/components/pajamas/spinner_component_spec.rb
new file mode 100644
index 00000000000..9aac9a0085c
--- /dev/null
+++ b/spec/components/pajamas/spinner_component_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::SpinnerComponent, type: :component do
+ let(:options) { {} }
+
+ before do
+ render_inline(described_class.new(**options))
+ end
+
+ describe 'class' do
+ let(:options) { { class: 'gl-my-6' } }
+
+ it 'has the correct custom class' do
+ expect(page).to have_css '.gl-spinner-container.gl-my-6'
+ end
+ end
+
+ describe 'color' do
+ context 'by default' do
+ it 'is dark' do
+ expect(page).to have_css '.gl-spinner.gl-spinner-dark'
+ end
+ end
+
+ context 'set to light' do
+ let(:options) { { color: :light } }
+
+ it 'is light' do
+ expect(page).to have_css '.gl-spinner.gl-spinner-light'
+ end
+ end
+ end
+
+ describe 'inline' do
+ context 'by default' do
+ it 'renders a div' do
+ expect(page).to have_css 'div.gl-spinner'
+ end
+ end
+
+ context 'set to true' do
+ let(:options) { { inline: true } }
+
+ it 'renders a span' do
+ expect(page).to have_css 'span.gl-spinner'
+ end
+ end
+ end
+
+ describe 'label' do
+ context 'by default' do
+ it 'has "Loading" as aria-label' do
+ expect(page).to have_css '.gl-spinner[aria-label="Loading"]'
+ end
+ end
+
+ context 'when set to something else' do
+ let(:options) { { label: "Sending" } }
+
+ it 'has a custom aria-label' do
+ expect(page).to have_css '.gl-spinner[aria-label="Sending"]'
+ end
+ end
+ end
+
+ describe 'size' do
+ let(:options) { { size: :lg } }
+
+ it 'has the correct size class' do
+ expect(page).to have_css '.gl-spinner.gl-spinner-lg'
+ end
+ end
+end
diff --git a/spec/components/pajamas/toggle_component_spec.rb b/spec/components/pajamas/toggle_component_spec.rb
index b2727dec318..6b0a2d26979 100644
--- a/spec/components/pajamas/toggle_component_spec.rb
+++ b/spec/components/pajamas/toggle_component_spec.rb
@@ -8,31 +8,31 @@ RSpec.describe Pajamas::ToggleComponent, type: :component do
end
it 'renders a toggle container with provided class' do
- expect(rendered_component).to have_selector "[class='js-feature-toggle']"
+ expect(page).to have_selector "[class='js-feature-toggle']"
end
it 'does not set a name' do
- expect(rendered_component).not_to have_selector('[data-name]')
+ expect(page).not_to have_selector('[data-name]')
end
it 'sets default is-checked attributes' do
- expect(rendered_component).to have_selector('[data-is-checked="false"]')
+ expect(page).to have_selector('[data-is-checked="false"]')
end
it 'sets default disabled attributes' do
- expect(rendered_component).to have_selector('[data-disabled="false"]')
+ expect(page).to have_selector('[data-disabled="false"]')
end
it 'sets default is-loading attributes' do
- expect(rendered_component).to have_selector('[data-is-loading="false"]')
+ expect(page).to have_selector('[data-is-loading="false"]')
end
it 'does not set a label' do
- expect(rendered_component).not_to have_selector('[data-label]')
+ expect(page).not_to have_selector('[data-label]')
end
it 'does not set a label position' do
- expect(rendered_component).not_to have_selector('[data-label-position]')
+ expect(page).not_to have_selector('[data-label-position]')
end
end
@@ -52,35 +52,35 @@ RSpec.describe Pajamas::ToggleComponent, type: :component do
end
it 'sets the custom class' do
- expect(rendered_component).to have_selector('.js-custom-gl-toggle')
+ expect(page).to have_selector('.js-custom-gl-toggle')
end
it 'sets the custom name' do
- expect(rendered_component).to have_selector('[data-name="toggle-name"]')
+ expect(page).to have_selector('[data-name="toggle-name"]')
end
it 'sets the custom is-checked attributes' do
- expect(rendered_component).to have_selector('[data-is-checked="true"]')
+ expect(page).to have_selector('[data-is-checked="true"]')
end
it 'sets the custom disabled attributes' do
- expect(rendered_component).to have_selector('[data-disabled="true"]')
+ expect(page).to have_selector('[data-disabled="true"]')
end
it 'sets the custom is-loading attributes' do
- expect(rendered_component).to have_selector('[data-is-loading="true"]')
+ expect(page).to have_selector('[data-is-loading="true"]')
end
it 'sets the custom label' do
- expect(rendered_component).to have_selector('[data-label="Custom label"]')
+ expect(page).to have_selector('[data-label="Custom label"]')
end
it 'sets the custom label position' do
- expect(rendered_component).to have_selector('[data-label-position="top"]')
+ expect(page).to have_selector('[data-label-position="top"]')
end
it 'sets custom data attributes' do
- expect(rendered_component).to have_selector('[data-foo="bar"]')
+ expect(page).to have_selector('[data-foo="bar"]')
end
end
@@ -101,7 +101,7 @@ RSpec.describe Pajamas::ToggleComponent, type: :component do
end
with_them do
- it { expect(rendered_component).to have_selector("[data-label-position='#{position}']", count: count) }
+ it { expect(page).to have_selector("[data-label-position='#{position}']", count: count) }
end
end
end
diff --git a/spec/contracts/consumer/endpoints/project/pipelines.js b/spec/contracts/consumer/endpoints/project/pipelines.js
new file mode 100644
index 00000000000..33758dee75b
--- /dev/null
+++ b/spec/contracts/consumer/endpoints/project/pipelines.js
@@ -0,0 +1,16 @@
+import { request } from 'axios';
+
+export function getProjectPipelines(endpoint) {
+ const { url } = endpoint;
+
+ return request({
+ method: 'GET',
+ baseURL: url,
+ url: '/gitlab-org/gitlab-qa/-/pipelines.json',
+ headers: { Accept: '*/*' },
+ params: {
+ scope: 'all',
+ page: 1,
+ },
+ }).then((response) => response.data);
+}
diff --git a/spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js b/spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js
new file mode 100644
index 00000000000..8a7663325b9
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js
@@ -0,0 +1,243 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { Matchers } from '@pact-foundation/pact';
+import {
+ URL,
+ URL_PATH,
+ PIPELINE_GROUPS,
+ PIPELINE_LABELS,
+ PIPELINE_SOURCES,
+ PIPELINE_STATUSES,
+ PIPELINE_TEXTS,
+ USER_STATES,
+} from '../../../helpers/common_regex_patterns';
+
+const body = {
+ pipelines: Matchers.eachLike({
+ id: Matchers.integer(564173401),
+ iid: Matchers.integer(8197225),
+ user: {
+ id: Matchers.integer(1781152),
+ username: Matchers.string('gitlab-bot'),
+ name: Matchers.string('🤖 GitLab Bot 🤖'),
+ state: Matchers.term({
+ matcher: USER_STATES,
+ generate: 'active',
+ }),
+ avatar_url: Matchers.term({
+ matcher: URL,
+ generate: 'https://gitlab.com/uploads/-/system/user/avatar/1516152/avatar.png',
+ }),
+ web_url: Matchers.term({
+ matcher: URL,
+ generate: 'https://gitlab.com/gitlab-bot',
+ }),
+ show_status: Matchers.boolean(false),
+ path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-bot',
+ }),
+ },
+ active: Matchers.boolean(true),
+ source: Matchers.term({
+ matcher: PIPELINE_SOURCES,
+ generate: 'schedule',
+ }),
+ created_at: Matchers.iso8601DateTimeWithMillis('2022-06-11T00:05:21.558Z'),
+ updated_at: Matchers.iso8601DateTimeWithMillis('2022-06-11T00:05:34.258Z'),
+ path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/pipelines/561224401',
+ }),
+ flags: {
+ stuck: Matchers.boolean(false),
+ auto_devops: Matchers.boolean(false),
+ merge_request: Matchers.boolean(false),
+ yaml_errors: Matchers.boolean(false),
+ retryable: Matchers.boolean(false),
+ cancelable: Matchers.boolean(false),
+ failure_reason: Matchers.boolean(false),
+ detached_merge_request_pipeline: Matchers.boolean(false),
+ merge_request_pipeline: Matchers.boolean(false),
+ merge_train_pipeline: Matchers.boolean(false),
+ latest: Matchers.boolean(true),
+ },
+ details: {
+ status: {
+ icon: Matchers.term({
+ matcher: PIPELINE_STATUSES,
+ generate: 'status_running',
+ }),
+ text: Matchers.term({
+ matcher: PIPELINE_TEXTS,
+ generate: 'running',
+ }),
+ label: Matchers.term({
+ matcher: PIPELINE_LABELS,
+ generate: 'running',
+ }),
+ group: Matchers.term({
+ matcher: PIPELINE_GROUPS,
+ generate: 'running',
+ }),
+ tooltip: Matchers.term({
+ matcher: PIPELINE_LABELS,
+ generate: 'passed',
+ }),
+ has_details: Matchers.boolean(true),
+ details_path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/pipelines/566374401',
+ }),
+ illustration: null,
+ favicon: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/assets/ci_favicons/favicon_status_running.png',
+ }),
+ },
+ stages: Matchers.eachLike({
+ name: Matchers.string('sync'),
+ title: Matchers.string('sync: passed'),
+ status: {
+ icon: Matchers.term({
+ matcher: PIPELINE_STATUSES,
+ generate: 'status_success',
+ }),
+ text: Matchers.term({
+ matcher: PIPELINE_TEXTS,
+ generate: 'passed',
+ }),
+ label: Matchers.term({
+ matcher: PIPELINE_LABELS,
+ generate: 'passed',
+ }),
+ group: Matchers.term({
+ matcher: PIPELINE_GROUPS,
+ generate: 'success',
+ }),
+ tooltip: Matchers.term({
+ matcher: PIPELINE_LABELS,
+ generate: 'passed',
+ }),
+ has_details: Matchers.boolean(true),
+ details_path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/pipelines/561174401#sync',
+ }),
+ illustration: null,
+ favicon: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/assets/ci_favicons/favicon_status_success.png',
+ }),
+ },
+ path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/pipelines/561124401#sync',
+ }),
+ dropdown_path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/pipelines/561174401/stage.json?stage=sync',
+ }),
+ }),
+ duration: Matchers.integer(25),
+ finished_at: Matchers.iso8601DateTimeWithMillis('2022-06-11T00:55:21.558Z'),
+ name: Matchers.string('Pipeline'),
+ manual_actions: Matchers.eachLike({
+ name: Matchers.string('review-docs-deploy'),
+ playable: Matchers.boolean(true),
+ scheduled: Matchers.boolean(false),
+ }),
+ scheduled_actions: Matchers.eachLike({
+ name: Matchers.string('review-docs-schedule'),
+ playable: Matchers.boolean(true),
+ scheduled: Matchers.boolean(false),
+ }),
+ },
+ ref: {
+ name: Matchers.string('master'),
+ path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/commits/master',
+ }),
+ tag: Matchers.boolean(false),
+ branch: Matchers.boolean(true),
+ merge_request: Matchers.boolean(false),
+ },
+ commit: {
+ id: Matchers.string('e6d797385144b955c6d4ecfa00e9656dc33efd2b'),
+ short_id: Matchers.string('e6d79738'),
+ created_at: Matchers.iso8601DateTimeWithMillis('2022-06-10T22:02:10.000+00:00'),
+ parent_ids: Matchers.eachLike(Matchers.string('3b0e053a24958174eaa7e3b183c7263432890d1c')),
+ title: Matchers.string("Merge branch 'ee-test' into 'master'"),
+ message: Matchers.string("Merge branch 'ee-test' into 'master'\nThis is a test."),
+ author_name: Matchers.string('John Doe'),
+ author_email: Matchers.email('jdoe@gitlab.com'),
+ authored_date: Matchers.iso8601DateTimeWithMillis('2022-06-10T22:02:10.000+00:00'),
+ committer_name: Matchers.string('John Doe'),
+ committer_email: Matchers.email('jdoe@gitlab.com'),
+ committed_date: Matchers.iso8601DateTimeWithMillis('2022-06-10T22:02:10.000+00:00'),
+ trailers: {},
+ web_url: Matchers.term({
+ matcher: URL,
+ generate: 'https://gitlab.com/gitlab-org/gitlab/-/commit/f559253c514d9ab707c66e',
+ }),
+ author: null,
+ author_gravatar_url: Matchers.term({
+ matcher: URL,
+ generate:
+ 'https://secure.gravatar.com/avatar/d85e45af29611ac2c1395e3c3d6ec5d6?s=80\u0026d=identicon',
+ }),
+ commit_url: Matchers.term({
+ matcher: URL,
+ generate:
+ 'https://gitlab.com/gitlab-org/gitlab/-/commit/dc7522f559253c514d9ab707c66e7a1026abca5a',
+ }),
+ commit_path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/commit/dc7522f559253c514d9ab707c66e7a1026abca5a',
+ }),
+ },
+ project: {
+ id: Matchers.integer(253964),
+ name: Matchers.string('GitLab'),
+ full_path: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab',
+ }),
+ full_name: Matchers.string('GitLab.org / GitLab'),
+ },
+ triggered_by: null,
+ triggered: [],
+ }),
+ count: {
+ all: Matchers.string('1,000+'),
+ },
+};
+
+const ProjectPipelines = {
+ body: Matchers.extractPayload(body),
+
+ success: {
+ status: 200,
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ body,
+ },
+
+ request: {
+ uponReceiving: 'a request for a list of project pipelines',
+ withRequest: {
+ method: 'GET',
+ path: '/gitlab-org/gitlab-qa/-/pipelines.json',
+ headers: {
+ Accept: '*/*',
+ },
+ query: 'scope=all&page=1',
+ },
+ },
+};
+
+export { ProjectPipelines };
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js b/spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js
new file mode 100644
index 00000000000..f51ed9c2c74
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js
@@ -0,0 +1,99 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { Matchers } from '@pact-foundation/pact';
+import {
+ JOB_STATUSES,
+ PIPELINE_GROUPS,
+ PIPELINE_STATUSES,
+ PIPELINE_TEXTS,
+ URL,
+ URL_PATH,
+} from '../../../helpers/common_regex_patterns';
+
+const body = {
+ data: {
+ project: {
+ id: Matchers.string('gid://gitlab/Project/278964'),
+ pipeline: {
+ id: Matchers.string('gid://gitlab/Ci::Pipeline/577266584'),
+ iid: Matchers.string('1175084'),
+ status: Matchers.term({
+ matcher: JOB_STATUSES,
+ generate: 'RUNNING',
+ }),
+ retryable: Matchers.boolean(false),
+ cancelable: Matchers.boolean(true),
+ userPermissions: {
+ destroyPipeline: Matchers.boolean(false),
+ updatePipeline: Matchers.boolean(true),
+ },
+ detailedStatus: {
+ id: Matchers.string('running-577266584-577266584'),
+ detailsPath: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-org/gitlab/-/pipelines/577266584',
+ }),
+ icon: Matchers.term({
+ matcher: PIPELINE_STATUSES,
+ generate: 'status_running',
+ }),
+ group: Matchers.term({
+ matcher: PIPELINE_GROUPS,
+ generate: 'running',
+ }),
+ text: Matchers.term({
+ matcher: PIPELINE_TEXTS,
+ generate: 'running',
+ }),
+ },
+ createdAt: Matchers.iso8601DateTime('2022-06-30T16:58:59Z'),
+ user: {
+ id: Matchers.string('gid://gitlab/User/194645'),
+ name: Matchers.string('John Doe'),
+ username: Matchers.string('jdoe'),
+ webPath: Matchers.term({
+ matcher: URL_PATH,
+ generate: '/gitlab-bot',
+ }),
+ webUrl: Matchers.term({
+ matcher: URL,
+ generate: 'https://gitlab.com/gitlab-bot',
+ }),
+ email: null,
+ avatarUrl: Matchers.term({
+ matcher: URL,
+ generate:
+ 'https://www.gravatar.com/avatar/10fc7f102be8de7657fb4d80898bbfe3?s=80&d=identicon',
+ }),
+ status: null,
+ },
+ },
+ },
+ },
+};
+
+const PipelineHeaderData = {
+ body: Matchers.extractPayload(body),
+
+ success: {
+ status: 200,
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ body,
+ },
+
+ request: {
+ method: 'POST',
+ path: '/api/graphql',
+ },
+
+ variables: {
+ fullPath: 'gitlab-org/gitlab-qa',
+ iid: 1,
+ },
+};
+
+export { PipelineHeaderData };
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/helpers/common_regex_patterns.js b/spec/contracts/consumer/helpers/common_regex_patterns.js
new file mode 100644
index 00000000000..664a71ab8a9
--- /dev/null
+++ b/spec/contracts/consumer/helpers/common_regex_patterns.js
@@ -0,0 +1,24 @@
+/**
+ * Important note: These regex patterns need to use Ruby format because the underlying Pact mock service is written in Ruby.
+ */
+export const URL = '^(http|https)://[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(/.*)?$';
+export const URL_PATH = '^/[a-zA-Z0-9#-=?_]+$';
+
+// Pipelines
+export const PIPELINE_GROUPS =
+ '^(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|success_warning|waiting-for-resource)$';
+export const PIPELINE_LABELS =
+ '^(canceled|created|delayed|failed|manual action|passed|pending|preparing|running|skipped|passed with warnings|waiting for resource)$';
+export const PIPELINE_SOURCES =
+ '^(push|web|trigger|schedule|api|external|pipeline|chat|webide|merge_request_event|external_pull_request_event|parent_pipeline|ondemand_dast_scan|ondemand_dast_validation)$';
+export const PIPELINE_STATUSES =
+ '^status_(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|warning)$';
+export const PIPELINE_TEXTS =
+ '^(canceled|created|delayed|failed|manual|passed|pending|preparing|running|skipped|waiting)$';
+
+// Jobs
+export const JOB_STATUSES =
+ '^(CANCELED|CREATED|FAILED|MANUAL|PENDING|PREPARING|RUNNING|SCHEDULED|SKIPPED|SUCCESS|WAITING_FOR_RESOURCE)$';
+
+// Users
+export const USER_STATES = '^(active|blocked)$';
diff --git a/spec/contracts/consumer/helpers/graphql_query_extractor.js b/spec/contracts/consumer/helpers/graphql_query_extractor.js
new file mode 100644
index 00000000000..884fa2cf528
--- /dev/null
+++ b/spec/contracts/consumer/helpers/graphql_query_extractor.js
@@ -0,0 +1,8 @@
+import { readFile } from 'fs/promises';
+import path from 'path';
+
+export async function extractGraphQLQuery(fileLocation) {
+ const file = path.resolve(__dirname, '..', '..', '..', '..', fileLocation);
+
+ return readFile(file, { encoding: 'UTF-8' });
+}
diff --git a/spec/contracts/consumer/resources/graphql/pipelines.js b/spec/contracts/consumer/resources/graphql/pipelines.js
new file mode 100644
index 00000000000..4f7ce58891c
--- /dev/null
+++ b/spec/contracts/consumer/resources/graphql/pipelines.js
@@ -0,0 +1,25 @@
+import axios from 'axios';
+
+import { extractGraphQLQuery } from '../../helpers/graphql_query_extractor';
+
+export async function getPipelineHeaderDataRequest(endpoint) {
+ const { url } = endpoint;
+ const query = await extractGraphQLQuery(
+ 'app/assets/javascripts/pipelines/graphql/queries/get_pipeline_header_data.query.graphql',
+ );
+ const graphqlQuery = {
+ query,
+ variables: {
+ fullPath: 'gitlab-org/gitlab-qa',
+ iid: 1,
+ },
+ };
+
+ return axios({
+ baseURL: url,
+ url: '/api/graphql',
+ method: 'POST',
+ headers: { Accept: '*/*' },
+ data: graphqlQuery,
+ });
+}
diff --git a/spec/contracts/consumer/specs/project/pipeline/index.spec.js b/spec/contracts/consumer/specs/project/pipeline/index.spec.js
new file mode 100644
index 00000000000..1c0358a3e28
--- /dev/null
+++ b/spec/contracts/consumer/specs/project/pipeline/index.spec.js
@@ -0,0 +1,44 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { pactWith } from 'jest-pact';
+
+import { ProjectPipelines } from '../../../fixtures/project/pipeline/get_list_project_pipelines.fixture';
+import { getProjectPipelines } from '../../../endpoints/project/pipelines';
+
+const CONSUMER_NAME = 'Pipelines#index';
+const CONSUMER_LOG = '../logs/consumer.log';
+const CONTRACT_DIR = '../contracts/project/pipeline/index';
+const PROVIDER_NAME = 'GET List project pipelines';
+
+// API endpoint: /pipelines.json
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(PROVIDER_NAME, () => {
+ beforeEach(() => {
+ const interaction = {
+ state: 'a few pipelines for a project exists',
+ ...ProjectPipelines.request,
+ willRespondWith: ProjectPipelines.success,
+ };
+ provider.addInteraction(interaction);
+ });
+
+ it('returns a successful body', () => {
+ return getProjectPipelines({
+ url: provider.mockService.baseUrl,
+ }).then((pipelines) => {
+ expect(pipelines).toEqual(ProjectPipelines.body);
+ });
+ });
+ });
+ },
+);
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/specs/project/pipeline/show.spec.js b/spec/contracts/consumer/specs/project/pipeline/show.spec.js
new file mode 100644
index 00000000000..0f1cc1c3108
--- /dev/null
+++ b/spec/contracts/consumer/specs/project/pipeline/show.spec.js
@@ -0,0 +1,53 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { pactWith } from 'jest-pact';
+import { GraphQLInteraction } from '@pact-foundation/pact';
+
+import { extractGraphQLQuery } from '../../../helpers/graphql_query_extractor';
+
+import { PipelineHeaderData } from '../../../fixtures/project/pipeline/get_pipeline_header_data.fixture';
+import { getPipelineHeaderDataRequest } from '../../../resources/graphql/pipelines';
+
+const CONSUMER_NAME = 'Pipelines#show';
+const CONSUMER_LOG = '../logs/consumer.log';
+const CONTRACT_DIR = '../contracts/project/pipeline/show';
+const PROVIDER_NAME = 'GET pipeline header data';
+
+// GraphQL query: getPipelineHeaderData
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(PROVIDER_NAME, () => {
+ beforeEach(async () => {
+ const query = await extractGraphQLQuery(
+ 'app/assets/javascripts/pipelines/graphql/queries/get_pipeline_header_data.query.graphql',
+ );
+ const graphqlQuery = new GraphQLInteraction()
+ .given('a pipeline for a project exists')
+ .uponReceiving('a request for the pipeline header data')
+ .withQuery(query)
+ .withRequest(PipelineHeaderData.request)
+ .withVariables(PipelineHeaderData.variables)
+ .willRespondWith(PipelineHeaderData.success);
+
+ provider.addInteraction(graphqlQuery);
+ });
+
+ it('returns a successful body', async () => {
+ const pipelineHeaderData = await getPipelineHeaderDataRequest({
+ url: provider.mockService.baseUrl,
+ });
+
+ expect(pipelineHeaderData.data).toEqual(PipelineHeaderData.body);
+ });
+ });
+ },
+);
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/yarn.lock b/spec/contracts/consumer/yarn.lock
new file mode 100644
index 00000000000..754541b56ae
--- /dev/null
+++ b/spec/contracts/consumer/yarn.lock
@@ -0,0 +1,4775 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@ampproject/remapping@^2.1.0":
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d"
+ integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==
+ dependencies:
+ "@jridgewell/gen-mapping" "^0.1.0"
+ "@jridgewell/trace-mapping" "^0.3.9"
+
+"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789"
+ integrity sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==
+ dependencies:
+ "@babel/highlight" "^7.16.7"
+
+"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.17.10":
+ version "7.18.5"
+ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.18.5.tgz#acac0c839e317038c73137fbb6ef71a1d6238471"
+ integrity sha512-BxhE40PVCBxVEJsSBhB6UWyAuqJRxGsAw8BdHMJ3AKGydcwuWW4kOO3HmqBQAdcq/OP+/DlTVxLvsCzRTnZuGg==
+
+"@babel/core@^7.1.0", "@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.7.2", "@babel/core@^7.8.0":
+ version "7.18.5"
+ resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.5.tgz#c597fa680e58d571c28dda9827669c78cdd7f000"
+ integrity sha512-MGY8vg3DxMnctw0LdvSEojOsumc70g0t18gNyUdAZqB1Rpd1Bqo/svHGvt+UJ6JcGX+DIekGFDxxIWofBxLCnQ==
+ dependencies:
+ "@ampproject/remapping" "^2.1.0"
+ "@babel/code-frame" "^7.16.7"
+ "@babel/generator" "^7.18.2"
+ "@babel/helper-compilation-targets" "^7.18.2"
+ "@babel/helper-module-transforms" "^7.18.0"
+ "@babel/helpers" "^7.18.2"
+ "@babel/parser" "^7.18.5"
+ "@babel/template" "^7.16.7"
+ "@babel/traverse" "^7.18.5"
+ "@babel/types" "^7.18.4"
+ convert-source-map "^1.7.0"
+ debug "^4.1.0"
+ gensync "^1.0.0-beta.2"
+ json5 "^2.2.1"
+ semver "^6.3.0"
+
+"@babel/generator@^7.18.2", "@babel/generator@^7.7.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.2.tgz#33873d6f89b21efe2da63fe554460f3df1c5880d"
+ integrity sha512-W1lG5vUwFvfMd8HVXqdfbuG7RuaSrTCCD8cl8fP8wOivdbtbIg2Db3IWUcgvfxKbbn6ZBGYRW/Zk1MIwK49mgw==
+ dependencies:
+ "@babel/types" "^7.18.2"
+ "@jridgewell/gen-mapping" "^0.3.0"
+ jsesc "^2.5.1"
+
+"@babel/helper-annotate-as-pure@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz#bb2339a7534a9c128e3102024c60760a3a7f3862"
+ integrity sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw==
+ dependencies:
+ "@babel/types" "^7.16.7"
+
+"@babel/helper-builder-binary-assignment-operator-visitor@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz#38d138561ea207f0f69eb1626a418e4f7e6a580b"
+ integrity sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA==
+ dependencies:
+ "@babel/helper-explode-assignable-expression" "^7.16.7"
+ "@babel/types" "^7.16.7"
+
+"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.16.7", "@babel/helper-compilation-targets@^7.17.10", "@babel/helper-compilation-targets@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.2.tgz#67a85a10cbd5fc7f1457fec2e7f45441dc6c754b"
+ integrity sha512-s1jnPotJS9uQnzFtiZVBUxe67CuBa679oWFHpxYYnTpRL/1ffhyX44R9uYiXoa/pLXcY9H2moJta0iaanlk/rQ==
+ dependencies:
+ "@babel/compat-data" "^7.17.10"
+ "@babel/helper-validator-option" "^7.16.7"
+ browserslist "^4.20.2"
+ semver "^6.3.0"
+
+"@babel/helper-create-class-features-plugin@^7.17.12", "@babel/helper-create-class-features-plugin@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.18.0.tgz#fac430912606331cb075ea8d82f9a4c145a4da19"
+ integrity sha512-Kh8zTGR9de3J63e5nS0rQUdRs/kbtwoeQQ0sriS0lItjC96u8XXZN6lKpuyWd2coKSU13py/y+LTmThLuVX0Pg==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.16.7"
+ "@babel/helper-environment-visitor" "^7.16.7"
+ "@babel/helper-function-name" "^7.17.9"
+ "@babel/helper-member-expression-to-functions" "^7.17.7"
+ "@babel/helper-optimise-call-expression" "^7.16.7"
+ "@babel/helper-replace-supers" "^7.16.7"
+ "@babel/helper-split-export-declaration" "^7.16.7"
+
+"@babel/helper-create-regexp-features-plugin@^7.16.7", "@babel/helper-create-regexp-features-plugin@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.17.12.tgz#bb37ca467f9694bbe55b884ae7a5cc1e0084e4fd"
+ integrity sha512-b2aZrV4zvutr9AIa6/gA3wsZKRwTKYoDxYiFKcESS3Ug2GTXzwBEvMuuFLhCQpEnRXs1zng4ISAXSUxxKBIcxw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.16.7"
+ regexpu-core "^5.0.1"
+
+"@babel/helper-define-polyfill-provider@^0.3.1":
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz#52411b445bdb2e676869e5a74960d2d3826d2665"
+ integrity sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.13.0"
+ "@babel/helper-module-imports" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.13.0"
+ "@babel/traverse" "^7.13.0"
+ debug "^4.1.1"
+ lodash.debounce "^4.0.8"
+ resolve "^1.14.2"
+ semver "^6.1.2"
+
+"@babel/helper-environment-visitor@^7.16.7", "@babel/helper-environment-visitor@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.2.tgz#8a6d2dedb53f6bf248e31b4baf38739ee4a637bd"
+ integrity sha512-14GQKWkX9oJzPiQQ7/J36FTXcD4kSp8egKjO9nINlSKiHITRA9q/R74qu8S9xlc/b/yjsJItQUeeh3xnGN0voQ==
+
+"@babel/helper-explode-assignable-expression@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz#12a6d8522fdd834f194e868af6354e8650242b7a"
+ integrity sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ==
+ dependencies:
+ "@babel/types" "^7.16.7"
+
+"@babel/helper-function-name@^7.16.7", "@babel/helper-function-name@^7.17.9":
+ version "7.17.9"
+ resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz#136fcd54bc1da82fcb47565cf16fd8e444b1ff12"
+ integrity sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg==
+ dependencies:
+ "@babel/template" "^7.16.7"
+ "@babel/types" "^7.17.0"
+
+"@babel/helper-hoist-variables@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz#86bcb19a77a509c7b77d0e22323ef588fa58c246"
+ integrity sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==
+ dependencies:
+ "@babel/types" "^7.16.7"
+
+"@babel/helper-member-expression-to-functions@^7.17.7":
+ version "7.17.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.17.7.tgz#a34013b57d8542a8c4ff8ba3f747c02452a4d8c4"
+ integrity sha512-thxXgnQ8qQ11W2wVUObIqDL4p148VMxkt5T/qpN5k2fboRyzFGFmKsTGViquyM5QHKUy48OZoca8kw4ajaDPyw==
+ dependencies:
+ "@babel/types" "^7.17.0"
+
+"@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz#25612a8091a999704461c8a222d0efec5d091437"
+ integrity sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==
+ dependencies:
+ "@babel/types" "^7.16.7"
+
+"@babel/helper-module-transforms@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.0.tgz#baf05dec7a5875fb9235bd34ca18bad4e21221cd"
+ integrity sha512-kclUYSUBIjlvnzN2++K9f2qzYKFgjmnmjwL4zlmU5f8ZtzgWe8s0rUPSTGy2HmK4P8T52MQsS+HTQAgZd3dMEA==
+ dependencies:
+ "@babel/helper-environment-visitor" "^7.16.7"
+ "@babel/helper-module-imports" "^7.16.7"
+ "@babel/helper-simple-access" "^7.17.7"
+ "@babel/helper-split-export-declaration" "^7.16.7"
+ "@babel/helper-validator-identifier" "^7.16.7"
+ "@babel/template" "^7.16.7"
+ "@babel/traverse" "^7.18.0"
+ "@babel/types" "^7.18.0"
+
+"@babel/helper-optimise-call-expression@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz#a34e3560605abbd31a18546bd2aad3e6d9a174f2"
+ integrity sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w==
+ dependencies:
+ "@babel/types" "^7.16.7"
+
+"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.17.12", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.17.12.tgz#86c2347da5acbf5583ba0a10aed4c9bf9da9cf96"
+ integrity sha512-JDkf04mqtN3y4iAbO1hv9U2ARpPyPL1zqyWs/2WG1pgSq9llHFjStX5jdxb84himgJm+8Ng+x0oiWF/nw/XQKA==
+
+"@babel/helper-remap-async-to-generator@^7.16.8":
+ version "7.16.8"
+ resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz#29ffaade68a367e2ed09c90901986918d25e57e3"
+ integrity sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.16.7"
+ "@babel/helper-wrap-function" "^7.16.8"
+ "@babel/types" "^7.16.8"
+
+"@babel/helper-replace-supers@^7.16.7", "@babel/helper-replace-supers@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.18.2.tgz#41fdfcc9abaf900e18ba6e5931816d9062a7b2e0"
+ integrity sha512-XzAIyxx+vFnrOxiQrToSUOzUOn0e1J2Li40ntddek1Y69AXUTXoDJ40/D5RdjFu7s7qHiaeoTiempZcbuVXh2Q==
+ dependencies:
+ "@babel/helper-environment-visitor" "^7.18.2"
+ "@babel/helper-member-expression-to-functions" "^7.17.7"
+ "@babel/helper-optimise-call-expression" "^7.16.7"
+ "@babel/traverse" "^7.18.2"
+ "@babel/types" "^7.18.2"
+
+"@babel/helper-simple-access@^7.17.7", "@babel/helper-simple-access@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.2.tgz#4dc473c2169ac3a1c9f4a51cfcd091d1c36fcff9"
+ integrity sha512-7LIrjYzndorDY88MycupkpQLKS1AFfsVRm2k/9PtKScSy5tZq0McZTj+DiMRynboZfIqOKvo03pmhTaUgiD6fQ==
+ dependencies:
+ "@babel/types" "^7.18.2"
+
+"@babel/helper-skip-transparent-expression-wrappers@^7.16.0":
+ version "7.16.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz#0ee3388070147c3ae051e487eca3ebb0e2e8bb09"
+ integrity sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw==
+ dependencies:
+ "@babel/types" "^7.16.0"
+
+"@babel/helper-split-export-declaration@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz#0b648c0c42da9d3920d85ad585f2778620b8726b"
+ integrity sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==
+ dependencies:
+ "@babel/types" "^7.16.7"
+
+"@babel/helper-validator-identifier@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad"
+ integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==
+
+"@babel/helper-validator-option@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23"
+ integrity sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==
+
+"@babel/helper-wrap-function@^7.16.8":
+ version "7.16.8"
+ resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz#58afda087c4cd235de92f7ceedebca2c41274200"
+ integrity sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw==
+ dependencies:
+ "@babel/helper-function-name" "^7.16.7"
+ "@babel/template" "^7.16.7"
+ "@babel/traverse" "^7.16.8"
+ "@babel/types" "^7.16.8"
+
+"@babel/helpers@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.2.tgz#970d74f0deadc3f5a938bfa250738eb4ac889384"
+ integrity sha512-j+d+u5xT5utcQSzrh9p+PaJX94h++KN+ng9b9WEJq7pkUPAd61FGqhjuUEdfknb3E/uDBb7ruwEeKkIxNJPIrg==
+ dependencies:
+ "@babel/template" "^7.16.7"
+ "@babel/traverse" "^7.18.2"
+ "@babel/types" "^7.18.2"
+
+"@babel/highlight@^7.16.7":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.17.12.tgz#257de56ee5afbd20451ac0a75686b6b404257351"
+ integrity sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.16.7"
+ chalk "^2.0.0"
+ js-tokens "^4.0.0"
+
+"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.7", "@babel/parser@^7.18.5":
+ version "7.18.5"
+ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.5.tgz#337062363436a893a2d22faa60be5bb37091c83c"
+ integrity sha512-YZWVaglMiplo7v8f1oMQ5ZPQr0vn7HPeZXxXWsxXJRjGVrzUFn9OxFQl1sb5wzfootjA/yChhW84BV+383FSOw==
+
+"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.17.12.tgz#1dca338caaefca368639c9ffb095afbd4d420b1e"
+ integrity sha512-xCJQXl4EeQ3J9C4yOmpTrtVGmzpm2iSzyxbkZHw7UCnZBftHpF/hpII80uWVyVrc40ytIClHjgWGTG1g/yB+aw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.17.12.tgz#0d498ec8f0374b1e2eb54b9cb2c4c78714c77753"
+ integrity sha512-/vt0hpIw0x4b6BLKUkwlvEoiGZYYLNZ96CzyHYPbtG2jZGz6LBe7/V+drYrc/d+ovrF9NBi0pmtvmNb/FsWtRQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0"
+ "@babel/plugin-proposal-optional-chaining" "^7.17.12"
+
+"@babel/plugin-proposal-async-generator-functions@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.17.12.tgz#094a417e31ce7e692d84bab06c8e2a607cbeef03"
+ integrity sha512-RWVvqD1ooLKP6IqWTA5GyFVX2isGEgC5iFxKzfYOIy/QEFdxYyCybBDtIGjipHpb9bDWHzcqGqFakf+mVmBTdQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-remap-async-to-generator" "^7.16.8"
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+
+"@babel/plugin-proposal-class-properties@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.17.12.tgz#84f65c0cc247d46f40a6da99aadd6438315d80a4"
+ integrity sha512-U0mI9q8pW5Q9EaTHFPwSVusPMV/DV9Mm8p7csqROFLtIE9rBF5piLqyrBGigftALrBcsBGu4m38JneAe7ZDLXw==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.17.12"
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-proposal-class-static-block@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.0.tgz#7d02253156e3c3793bdb9f2faac3a1c05f0ba710"
+ integrity sha512-t+8LsRMMDE74c6sV7KShIw13sqbqd58tlqNrsWoWBTIMw7SVQ0cZ905wLNS/FBCy/3PyooRHLFFlfrUNyyz5lA==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.18.0"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/plugin-syntax-class-static-block" "^7.14.5"
+
+"@babel/plugin-proposal-dynamic-import@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz#c19c897eaa46b27634a00fee9fb7d829158704b2"
+ integrity sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+
+"@babel/plugin-proposal-export-namespace-from@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.17.12.tgz#b22864ccd662db9606edb2287ea5fd1709f05378"
+ integrity sha512-j7Ye5EWdwoXOpRmo5QmRyHPsDIe6+u70ZYZrd7uz+ebPYFKfRcLcNu3Ro0vOlJ5zuv8rU7xa+GttNiRzX56snQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+
+"@babel/plugin-proposal-json-strings@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.17.12.tgz#f4642951792437233216d8c1af370bb0fbff4664"
+ integrity sha512-rKJ+rKBoXwLnIn7n6o6fulViHMrOThz99ybH+hKHcOZbnN14VuMnH9fo2eHE69C8pO4uX1Q7t2HYYIDmv8VYkg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+
+"@babel/plugin-proposal-logical-assignment-operators@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.17.12.tgz#c64a1bcb2b0a6d0ed2ff674fd120f90ee4b88a23"
+ integrity sha512-EqFo2s1Z5yy+JeJu7SFfbIUtToJTVlC61/C7WLKDntSw4Sz6JNAIfL7zQ74VvirxpjB5kz/kIx0gCcb+5OEo2Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+
+"@babel/plugin-proposal-nullish-coalescing-operator@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.17.12.tgz#1e93079bbc2cbc756f6db6a1925157c4a92b94be"
+ integrity sha512-ws/g3FSGVzv+VH86+QvgtuJL/kR67xaEIF2x0iPqdDfYW6ra6JF3lKVBkWynRLcNtIC1oCTfDRVxmm2mKzy+ag==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+
+"@babel/plugin-proposal-numeric-separator@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz#d6b69f4af63fb38b6ca2558442a7fb191236eba9"
+ integrity sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+
+"@babel/plugin-proposal-object-rest-spread@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.0.tgz#79f2390c892ba2a68ec112eb0d895cfbd11155e8"
+ integrity sha512-nbTv371eTrFabDfHLElkn9oyf9VG+VKK6WMzhY2o4eHKaG19BToD9947zzGMO6I/Irstx9d8CwX6njPNIAR/yw==
+ dependencies:
+ "@babel/compat-data" "^7.17.10"
+ "@babel/helper-compilation-targets" "^7.17.10"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-transform-parameters" "^7.17.12"
+
+"@babel/plugin-proposal-optional-catch-binding@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz#c623a430674ffc4ab732fd0a0ae7722b67cb74cf"
+ integrity sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+
+"@babel/plugin-proposal-optional-chaining@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.17.12.tgz#f96949e9bacace3a9066323a5cf90cfb9de67174"
+ integrity sha512-7wigcOs/Z4YWlK7xxjkvaIw84vGhDv/P1dFGQap0nHkc8gFKY/r+hXc8Qzf5k1gY7CvGIcHqAnOagVKJJ1wVOQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+
+"@babel/plugin-proposal-private-methods@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.17.12.tgz#c2ca3a80beb7539289938da005ad525a038a819c"
+ integrity sha512-SllXoxo19HmxhDWm3luPz+cPhtoTSKLJE9PXshsfrOzBqs60QP0r8OaJItrPhAj0d7mZMnNF0Y1UUggCDgMz1A==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.17.12"
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-proposal-private-property-in-object@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.17.12.tgz#b02efb7f106d544667d91ae97405a9fd8c93952d"
+ integrity sha512-/6BtVi57CJfrtDNKfK5b66ydK2J5pXUKBKSPD2G1whamMuEnZWgoOIfO8Vf9F/DoD4izBLD/Au4NMQfruzzykg==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.16.7"
+ "@babel/helper-create-class-features-plugin" "^7.17.12"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/plugin-syntax-private-property-in-object" "^7.14.5"
+
+"@babel/plugin-proposal-unicode-property-regex@^7.17.12", "@babel/plugin-proposal-unicode-property-regex@^7.4.4":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.17.12.tgz#3dbd7a67bd7f94c8238b394da112d86aaf32ad4d"
+ integrity sha512-Wb9qLjXf3ZazqXA7IvI7ozqRIXIGPtSo+L5coFmEkhTQK18ao4UDDD0zdTGAarmbLj2urpRwrc6893cu5Bfh0A==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.17.12"
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-syntax-async-generators@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d"
+ integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-bigint@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea"
+ integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10"
+ integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-class-static-block@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406"
+ integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-dynamic-import@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3"
+ integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-export-namespace-from@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a"
+ integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.3"
+
+"@babel/plugin-syntax-import-assertions@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.17.12.tgz#58096a92b11b2e4e54b24c6a0cc0e5e607abcedd"
+ integrity sha512-n/loy2zkq9ZEM8tEOwON9wTQSTNDTDEz6NujPtJGLU7qObzT1N4c4YZZf8E6ATB2AjNQg/Ib2AIpO03EZaCehw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-syntax-import-meta@^7.8.3":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51"
+ integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-json-strings@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a"
+ integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699"
+ integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9"
+ integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97"
+ integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-object-rest-spread@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871"
+ integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-catch-binding@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1"
+ integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-chaining@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a"
+ integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-private-property-in-object@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad"
+ integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3":
+ version "7.14.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c"
+ integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-typescript@^7.7.2":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.17.12.tgz#b54fc3be6de734a56b87508f99d6428b5b605a7b"
+ integrity sha512-TYY0SXFiO31YXtNg3HtFwNJHjLsAyIIhAhNWkQ5whPPS7HWUFlg9z0Ta4qAQNjQbP1wsSt/oKkmZ/4/WWdMUpw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-arrow-functions@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.17.12.tgz#dddd783b473b1b1537ef46423e3944ff24898c45"
+ integrity sha512-PHln3CNi/49V+mza4xMwrg+WGYevSF1oaiXaC2EQfdp4HWlSjRsrDXWJiQBKpP7749u6vQ9mcry2uuFOv5CXvA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-async-to-generator@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.17.12.tgz#dbe5511e6b01eee1496c944e35cdfe3f58050832"
+ integrity sha512-J8dbrWIOO3orDzir57NRsjg4uxucvhby0L/KZuGsWDj0g7twWK3g7JhJhOrXtuXiw8MeiSdJ3E0OW9H8LYEzLQ==
+ dependencies:
+ "@babel/helper-module-imports" "^7.16.7"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-remap-async-to-generator" "^7.16.8"
+
+"@babel/plugin-transform-block-scoped-functions@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz#4d0d57d9632ef6062cdf354bb717102ee042a620"
+ integrity sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-block-scoping@^7.17.12":
+ version "7.18.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.4.tgz#7988627b3e9186a13e4d7735dc9c34a056613fb9"
+ integrity sha512-+Hq10ye+jlvLEogSOtq4mKvtk7qwcUQ1f0Mrueai866C82f844Yom2cttfJdMdqRLTxWpsbfbkIkOIfovyUQXw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-classes@^7.17.12":
+ version "7.18.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.18.4.tgz#51310b812a090b846c784e47087fa6457baef814"
+ integrity sha512-e42NSG2mlKWgxKUAD9EJJSkZxR67+wZqzNxLSpc51T8tRU5SLFHsPmgYR5yr7sdgX4u+iHA1C5VafJ6AyImV3A==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.16.7"
+ "@babel/helper-environment-visitor" "^7.18.2"
+ "@babel/helper-function-name" "^7.17.9"
+ "@babel/helper-optimise-call-expression" "^7.16.7"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-replace-supers" "^7.18.2"
+ "@babel/helper-split-export-declaration" "^7.16.7"
+ globals "^11.1.0"
+
+"@babel/plugin-transform-computed-properties@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.17.12.tgz#bca616a83679698f3258e892ed422546e531387f"
+ integrity sha512-a7XINeplB5cQUWMg1E/GI1tFz3LfK021IjV1rj1ypE+R7jHm+pIHmHl25VNkZxtx9uuYp7ThGk8fur1HHG7PgQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-destructuring@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.18.0.tgz#dc4f92587e291b4daa78aa20cc2d7a63aa11e858"
+ integrity sha512-Mo69klS79z6KEfrLg/1WkmVnB8javh75HX4pi2btjvlIoasuxilEyjtsQW6XPrubNd7AQy0MMaNIaQE4e7+PQw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-dotall-regex@^7.16.7", "@babel/plugin-transform-dotall-regex@^7.4.4":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz#6b2d67686fab15fb6a7fd4bd895d5982cfc81241"
+ integrity sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.16.7"
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-duplicate-keys@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.17.12.tgz#a09aa709a3310013f8e48e0e23bc7ace0f21477c"
+ integrity sha512-EA5eYFUG6xeerdabina/xIoB95jJ17mAkR8ivx6ZSu9frKShBjpOGZPn511MTDTkiCO+zXnzNczvUM69YSf3Zw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-exponentiation-operator@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz#efa9862ef97e9e9e5f653f6ddc7b665e8536fe9b"
+ integrity sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA==
+ dependencies:
+ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.16.7"
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-for-of@^7.18.1":
+ version "7.18.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.1.tgz#ed14b657e162b72afbbb2b4cdad277bf2bb32036"
+ integrity sha512-+TTB5XwvJ5hZbO8xvl2H4XaMDOAK57zF4miuC9qQJgysPNEAZZ9Z69rdF5LJkozGdZrjBIUAIyKUWRMmebI7vg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-function-name@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz#5ab34375c64d61d083d7d2f05c38d90b97ec65cf"
+ integrity sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.16.7"
+ "@babel/helper-function-name" "^7.16.7"
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-literals@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.17.12.tgz#97131fbc6bbb261487105b4b3edbf9ebf9c830ae"
+ integrity sha512-8iRkvaTjJciWycPIZ9k9duu663FT7VrBdNqNgxnVXEFwOIp55JWcZd23VBRySYbnS3PwQ3rGiabJBBBGj5APmQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-member-expression-literals@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz#6e5dcf906ef8a098e630149d14c867dd28f92384"
+ integrity sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-modules-amd@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.0.tgz#7ef1002e67e36da3155edc8bf1ac9398064c02ed"
+ integrity sha512-h8FjOlYmdZwl7Xm2Ug4iX2j7Qy63NANI+NQVWQzv6r25fqgg7k2dZl03p95kvqNclglHs4FZ+isv4p1uXMA+QA==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.18.0"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-commonjs@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.2.tgz#1aa8efa2e2a6e818b6a7f2235fceaf09bdb31e9e"
+ integrity sha512-f5A865gFPAJAEE0K7F/+nm5CmAE3y8AWlMBG9unu5j9+tk50UQVK0QS8RNxSp7MJf0wh97uYyLWt3Zvu71zyOQ==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.18.0"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-simple-access" "^7.18.2"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-systemjs@^7.18.0":
+ version "7.18.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.18.5.tgz#87f11c44fbfd3657be000d4897e192d9cb535996"
+ integrity sha512-SEewrhPpcqMF1V7DhnEbhVJLrC+nnYfe1E0piZMZXBpxi9WvZqWGwpsk7JYP7wPWeqaBh4gyKlBhHJu3uz5g4Q==
+ dependencies:
+ "@babel/helper-hoist-variables" "^7.16.7"
+ "@babel/helper-module-transforms" "^7.18.0"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-validator-identifier" "^7.16.7"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-umd@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.0.tgz#56aac64a2c2a1922341129a4597d1fd5c3ff020f"
+ integrity sha512-d/zZ8I3BWli1tmROLxXLc9A6YXvGK8egMxHp+E/rRwMh1Kip0AP77VwZae3snEJ33iiWwvNv2+UIIhfalqhzZA==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.18.0"
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-named-capturing-groups-regex@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.17.12.tgz#9c4a5a5966e0434d515f2675c227fd8cc8606931"
+ integrity sha512-vWoWFM5CKaTeHrdUJ/3SIOTRV+MBVGybOC9mhJkaprGNt5demMymDW24yC74avb915/mIRe3TgNb/d8idvnCRA==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.17.12"
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-new-target@^7.17.12":
+ version "7.18.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.5.tgz#8c228c4a07501dd12c95c5f23d1622131cc23931"
+ integrity sha512-TuRL5uGW4KXU6OsRj+mLp9BM7pO8e7SGNTEokQRRxHFkXYMFiy2jlKSZPFtI/mKORDzciH+hneskcSOp0gU8hg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-object-super@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz#ac359cf8d32cf4354d27a46867999490b6c32a94"
+ integrity sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+ "@babel/helper-replace-supers" "^7.16.7"
+
+"@babel/plugin-transform-parameters@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.17.12.tgz#eb467cd9586ff5ff115a9880d6fdbd4a846b7766"
+ integrity sha512-6qW4rWo1cyCdq1FkYri7AHpauchbGLXpdwnYsfxFb+KtddHENfsY5JZb35xUwkK5opOLcJ3BNd2l7PhRYGlwIA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-property-literals@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz#2dadac85155436f22c696c4827730e0fe1057a55"
+ integrity sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-regenerator@^7.18.0":
+ version "7.18.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.0.tgz#44274d655eb3f1af3f3a574ba819d3f48caf99d5"
+ integrity sha512-C8YdRw9uzx25HSIzwA7EM7YP0FhCe5wNvJbZzjVNHHPGVcDJ3Aie+qGYYdS1oVQgn+B3eAIJbWFLrJ4Jipv7nw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ regenerator-transform "^0.15.0"
+
+"@babel/plugin-transform-reserved-words@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.17.12.tgz#7dbd349f3cdffba751e817cf40ca1386732f652f"
+ integrity sha512-1KYqwbJV3Co03NIi14uEHW8P50Md6KqFgt0FfpHdK6oyAHQVTosgPuPSiWud1HX0oYJ1hGRRlk0fP87jFpqXZA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-shorthand-properties@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz#e8549ae4afcf8382f711794c0c7b6b934c5fbd2a"
+ integrity sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-spread@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.17.12.tgz#c112cad3064299f03ea32afed1d659223935d1f5"
+ integrity sha512-9pgmuQAtFi3lpNUstvG9nGfk9DkrdmWNp9KeKPFmuZCpEnxRzYlS8JgwPjYj+1AWDOSvoGN0H30p1cBOmT/Svg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0"
+
+"@babel/plugin-transform-sticky-regex@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz#c84741d4f4a38072b9a1e2e3fd56d359552e8660"
+ integrity sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-template-literals@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.2.tgz#31ed6915721864847c48b656281d0098ea1add28"
+ integrity sha512-/cmuBVw9sZBGZVOMkpAEaVLwm4JmK2GZ1dFKOGGpMzEHWFmyZZ59lUU0PdRr8YNYeQdNzTDwuxP2X2gzydTc9g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-typeof-symbol@^7.17.12":
+ version "7.17.12"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.17.12.tgz#0f12f57ac35e98b35b4ed34829948d42bd0e6889"
+ integrity sha512-Q8y+Jp7ZdtSPXCThB6zjQ74N3lj0f6TDh1Hnf5B+sYlzQ8i5Pjp8gW0My79iekSpT4WnI06blqP6DT0OmaXXmw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.17.12"
+
+"@babel/plugin-transform-unicode-escapes@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz#da8717de7b3287a2c6d659750c964f302b31ece3"
+ integrity sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/plugin-transform-unicode-regex@^7.16.7":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz#0f7aa4a501198976e25e82702574c34cfebe9ef2"
+ integrity sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.16.7"
+ "@babel/helper-plugin-utils" "^7.16.7"
+
+"@babel/preset-env@^7.18.2":
+ version "7.18.2"
+ resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.18.2.tgz#f47d3000a098617926e674c945d95a28cb90977a"
+ integrity sha512-PfpdxotV6afmXMU47S08F9ZKIm2bJIQ0YbAAtDfIENX7G1NUAXigLREh69CWDjtgUy7dYn7bsMzkgdtAlmS68Q==
+ dependencies:
+ "@babel/compat-data" "^7.17.10"
+ "@babel/helper-compilation-targets" "^7.18.2"
+ "@babel/helper-plugin-utils" "^7.17.12"
+ "@babel/helper-validator-option" "^7.16.7"
+ "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.17.12"
+ "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.17.12"
+ "@babel/plugin-proposal-async-generator-functions" "^7.17.12"
+ "@babel/plugin-proposal-class-properties" "^7.17.12"
+ "@babel/plugin-proposal-class-static-block" "^7.18.0"
+ "@babel/plugin-proposal-dynamic-import" "^7.16.7"
+ "@babel/plugin-proposal-export-namespace-from" "^7.17.12"
+ "@babel/plugin-proposal-json-strings" "^7.17.12"
+ "@babel/plugin-proposal-logical-assignment-operators" "^7.17.12"
+ "@babel/plugin-proposal-nullish-coalescing-operator" "^7.17.12"
+ "@babel/plugin-proposal-numeric-separator" "^7.16.7"
+ "@babel/plugin-proposal-object-rest-spread" "^7.18.0"
+ "@babel/plugin-proposal-optional-catch-binding" "^7.16.7"
+ "@babel/plugin-proposal-optional-chaining" "^7.17.12"
+ "@babel/plugin-proposal-private-methods" "^7.17.12"
+ "@babel/plugin-proposal-private-property-in-object" "^7.17.12"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.17.12"
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+ "@babel/plugin-syntax-class-properties" "^7.12.13"
+ "@babel/plugin-syntax-class-static-block" "^7.14.5"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+ "@babel/plugin-syntax-import-assertions" "^7.17.12"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+ "@babel/plugin-syntax-private-property-in-object" "^7.14.5"
+ "@babel/plugin-syntax-top-level-await" "^7.14.5"
+ "@babel/plugin-transform-arrow-functions" "^7.17.12"
+ "@babel/plugin-transform-async-to-generator" "^7.17.12"
+ "@babel/plugin-transform-block-scoped-functions" "^7.16.7"
+ "@babel/plugin-transform-block-scoping" "^7.17.12"
+ "@babel/plugin-transform-classes" "^7.17.12"
+ "@babel/plugin-transform-computed-properties" "^7.17.12"
+ "@babel/plugin-transform-destructuring" "^7.18.0"
+ "@babel/plugin-transform-dotall-regex" "^7.16.7"
+ "@babel/plugin-transform-duplicate-keys" "^7.17.12"
+ "@babel/plugin-transform-exponentiation-operator" "^7.16.7"
+ "@babel/plugin-transform-for-of" "^7.18.1"
+ "@babel/plugin-transform-function-name" "^7.16.7"
+ "@babel/plugin-transform-literals" "^7.17.12"
+ "@babel/plugin-transform-member-expression-literals" "^7.16.7"
+ "@babel/plugin-transform-modules-amd" "^7.18.0"
+ "@babel/plugin-transform-modules-commonjs" "^7.18.2"
+ "@babel/plugin-transform-modules-systemjs" "^7.18.0"
+ "@babel/plugin-transform-modules-umd" "^7.18.0"
+ "@babel/plugin-transform-named-capturing-groups-regex" "^7.17.12"
+ "@babel/plugin-transform-new-target" "^7.17.12"
+ "@babel/plugin-transform-object-super" "^7.16.7"
+ "@babel/plugin-transform-parameters" "^7.17.12"
+ "@babel/plugin-transform-property-literals" "^7.16.7"
+ "@babel/plugin-transform-regenerator" "^7.18.0"
+ "@babel/plugin-transform-reserved-words" "^7.17.12"
+ "@babel/plugin-transform-shorthand-properties" "^7.16.7"
+ "@babel/plugin-transform-spread" "^7.17.12"
+ "@babel/plugin-transform-sticky-regex" "^7.16.7"
+ "@babel/plugin-transform-template-literals" "^7.18.2"
+ "@babel/plugin-transform-typeof-symbol" "^7.17.12"
+ "@babel/plugin-transform-unicode-escapes" "^7.16.7"
+ "@babel/plugin-transform-unicode-regex" "^7.16.7"
+ "@babel/preset-modules" "^0.1.5"
+ "@babel/types" "^7.18.2"
+ babel-plugin-polyfill-corejs2 "^0.3.0"
+ babel-plugin-polyfill-corejs3 "^0.5.0"
+ babel-plugin-polyfill-regenerator "^0.3.0"
+ core-js-compat "^3.22.1"
+ semver "^6.3.0"
+
+"@babel/preset-modules@^0.1.5":
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9"
+ integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.4.4"
+ "@babel/plugin-transform-dotall-regex" "^7.4.4"
+ "@babel/types" "^7.4.4"
+ esutils "^2.0.2"
+
+"@babel/runtime@^7.8.4":
+ version "7.18.3"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.3.tgz#c7b654b57f6f63cf7f8b418ac9ca04408c4579f4"
+ integrity sha512-38Y8f7YUhce/K7RMwTp7m0uCumpv9hZkitCbBClqQIow1qSbCvGkcegKOXpEWCQLfWmevgRiWokZ1GkpfhbZug==
+ dependencies:
+ regenerator-runtime "^0.13.4"
+
+"@babel/template@^7.16.7", "@babel/template@^7.3.3":
+ version "7.16.7"
+ resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155"
+ integrity sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==
+ dependencies:
+ "@babel/code-frame" "^7.16.7"
+ "@babel/parser" "^7.16.7"
+ "@babel/types" "^7.16.7"
+
+"@babel/traverse@^7.13.0", "@babel/traverse@^7.16.8", "@babel/traverse@^7.18.0", "@babel/traverse@^7.18.2", "@babel/traverse@^7.18.5", "@babel/traverse@^7.7.2":
+ version "7.18.5"
+ resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.5.tgz#94a8195ad9642801837988ab77f36e992d9a20cd"
+ integrity sha512-aKXj1KT66sBj0vVzk6rEeAO6Z9aiiQ68wfDgge3nHhA/my6xMM/7HGQUNumKZaoa2qUPQ5whJG9aAifsxUKfLA==
+ dependencies:
+ "@babel/code-frame" "^7.16.7"
+ "@babel/generator" "^7.18.2"
+ "@babel/helper-environment-visitor" "^7.18.2"
+ "@babel/helper-function-name" "^7.17.9"
+ "@babel/helper-hoist-variables" "^7.16.7"
+ "@babel/helper-split-export-declaration" "^7.16.7"
+ "@babel/parser" "^7.18.5"
+ "@babel/types" "^7.18.4"
+ debug "^4.1.0"
+ globals "^11.1.0"
+
+"@babel/types@^7.0.0", "@babel/types@^7.16.0", "@babel/types@^7.16.7", "@babel/types@^7.16.8", "@babel/types@^7.17.0", "@babel/types@^7.18.0", "@babel/types@^7.18.2", "@babel/types@^7.18.4", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4":
+ version "7.18.4"
+ resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.4.tgz#27eae9b9fd18e9dccc3f9d6ad051336f307be354"
+ integrity sha512-ThN1mBcMq5pG/Vm2IcBmPPfyPXbd8S02rS+OBIDENdufvqC7Z/jHPCv9IcP01277aKtDI8g/2XysBN4hA8niiw==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.16.7"
+ to-fast-properties "^2.0.0"
+
+"@bcoe/v8-coverage@^0.2.3":
+ version "0.2.3"
+ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
+ integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
+
+"@hapi/bourne@^2.0.0":
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/@hapi/bourne/-/bourne-2.1.0.tgz#66aff77094dc3080bd5df44ec63881f2676eb020"
+ integrity sha512-i1BpaNDVLJdRBEKeJWkVO6tYX6DMFBuwMhSuWqLsY4ufeTKGVuV5rBsUhxPayXqnnWHgXUAmWK16H/ykO5Wj4Q==
+
+"@istanbuljs/load-nyc-config@^1.0.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced"
+ integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==
+ dependencies:
+ camelcase "^5.3.1"
+ find-up "^4.1.0"
+ get-package-type "^0.1.0"
+ js-yaml "^3.13.1"
+ resolve-from "^5.0.0"
+
+"@istanbuljs/schema@^0.1.2":
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98"
+ integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==
+
+"@jest/console@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba"
+ integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ jest-message-util "^27.5.1"
+ jest-util "^27.5.1"
+ slash "^3.0.0"
+
+"@jest/core@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626"
+ integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ==
+ dependencies:
+ "@jest/console" "^27.5.1"
+ "@jest/reporters" "^27.5.1"
+ "@jest/test-result" "^27.5.1"
+ "@jest/transform" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ ansi-escapes "^4.2.1"
+ chalk "^4.0.0"
+ emittery "^0.8.1"
+ exit "^0.1.2"
+ graceful-fs "^4.2.9"
+ jest-changed-files "^27.5.1"
+ jest-config "^27.5.1"
+ jest-haste-map "^27.5.1"
+ jest-message-util "^27.5.1"
+ jest-regex-util "^27.5.1"
+ jest-resolve "^27.5.1"
+ jest-resolve-dependencies "^27.5.1"
+ jest-runner "^27.5.1"
+ jest-runtime "^27.5.1"
+ jest-snapshot "^27.5.1"
+ jest-util "^27.5.1"
+ jest-validate "^27.5.1"
+ jest-watcher "^27.5.1"
+ micromatch "^4.0.4"
+ rimraf "^3.0.0"
+ slash "^3.0.0"
+ strip-ansi "^6.0.0"
+
+"@jest/environment@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74"
+ integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA==
+ dependencies:
+ "@jest/fake-timers" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ jest-mock "^27.5.1"
+
+"@jest/fake-timers@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74"
+ integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ "@sinonjs/fake-timers" "^8.0.1"
+ "@types/node" "*"
+ jest-message-util "^27.5.1"
+ jest-mock "^27.5.1"
+ jest-util "^27.5.1"
+
+"@jest/globals@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b"
+ integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q==
+ dependencies:
+ "@jest/environment" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ expect "^27.5.1"
+
+"@jest/reporters@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04"
+ integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw==
+ dependencies:
+ "@bcoe/v8-coverage" "^0.2.3"
+ "@jest/console" "^27.5.1"
+ "@jest/test-result" "^27.5.1"
+ "@jest/transform" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ collect-v8-coverage "^1.0.0"
+ exit "^0.1.2"
+ glob "^7.1.2"
+ graceful-fs "^4.2.9"
+ istanbul-lib-coverage "^3.0.0"
+ istanbul-lib-instrument "^5.1.0"
+ istanbul-lib-report "^3.0.0"
+ istanbul-lib-source-maps "^4.0.0"
+ istanbul-reports "^3.1.3"
+ jest-haste-map "^27.5.1"
+ jest-resolve "^27.5.1"
+ jest-util "^27.5.1"
+ jest-worker "^27.5.1"
+ slash "^3.0.0"
+ source-map "^0.6.0"
+ string-length "^4.0.1"
+ terminal-link "^2.0.0"
+ v8-to-istanbul "^8.1.0"
+
+"@jest/schemas@^28.0.2":
+ version "28.0.2"
+ resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.0.2.tgz#08c30df6a8d07eafea0aef9fb222c5e26d72e613"
+ integrity sha512-YVDJZjd4izeTDkij00vHHAymNXQ6WWsdChFRK86qck6Jpr3DCL5W3Is3vslviRlP+bLuMYRLbdp98amMvqudhA==
+ dependencies:
+ "@sinclair/typebox" "^0.23.3"
+
+"@jest/source-map@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf"
+ integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg==
+ dependencies:
+ callsites "^3.0.0"
+ graceful-fs "^4.2.9"
+ source-map "^0.6.0"
+
+"@jest/test-result@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb"
+ integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag==
+ dependencies:
+ "@jest/console" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/istanbul-lib-coverage" "^2.0.0"
+ collect-v8-coverage "^1.0.0"
+
+"@jest/test-sequencer@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b"
+ integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ==
+ dependencies:
+ "@jest/test-result" "^27.5.1"
+ graceful-fs "^4.2.9"
+ jest-haste-map "^27.5.1"
+ jest-runtime "^27.5.1"
+
+"@jest/transform@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409"
+ integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw==
+ dependencies:
+ "@babel/core" "^7.1.0"
+ "@jest/types" "^27.5.1"
+ babel-plugin-istanbul "^6.1.1"
+ chalk "^4.0.0"
+ convert-source-map "^1.4.0"
+ fast-json-stable-stringify "^2.0.0"
+ graceful-fs "^4.2.9"
+ jest-haste-map "^27.5.1"
+ jest-regex-util "^27.5.1"
+ jest-util "^27.5.1"
+ micromatch "^4.0.4"
+ pirates "^4.0.4"
+ slash "^3.0.0"
+ source-map "^0.6.1"
+ write-file-atomic "^3.0.0"
+
+"@jest/transform@^28.1.1":
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-28.1.1.tgz#83541f2a3f612077c8501f49cc4e205d4e4a6b27"
+ integrity sha512-PkfaTUuvjUarl1EDr5ZQcCA++oXkFCP9QFUkG0yVKVmNObjhrqDy0kbMpMebfHWm3CCDHjYNem9eUSH8suVNHQ==
+ dependencies:
+ "@babel/core" "^7.11.6"
+ "@jest/types" "^28.1.1"
+ "@jridgewell/trace-mapping" "^0.3.7"
+ babel-plugin-istanbul "^6.1.1"
+ chalk "^4.0.0"
+ convert-source-map "^1.4.0"
+ fast-json-stable-stringify "^2.0.0"
+ graceful-fs "^4.2.9"
+ jest-haste-map "^28.1.1"
+ jest-regex-util "^28.0.2"
+ jest-util "^28.1.1"
+ micromatch "^4.0.4"
+ pirates "^4.0.4"
+ slash "^3.0.0"
+ write-file-atomic "^4.0.1"
+
+"@jest/types@^27.5.1":
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80"
+ integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==
+ dependencies:
+ "@types/istanbul-lib-coverage" "^2.0.0"
+ "@types/istanbul-reports" "^3.0.0"
+ "@types/node" "*"
+ "@types/yargs" "^16.0.0"
+ chalk "^4.0.0"
+
+"@jest/types@^28.1.1":
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.1.tgz#d059bbc80e6da6eda9f081f293299348bd78ee0b"
+ integrity sha512-vRXVqSg1VhDnB8bWcmvLzmg0Bt9CRKVgHPXqYwvWMX3TvAjeO+nRuK6+VdTKCtWOvYlmkF/HqNAL/z+N3B53Kw==
+ dependencies:
+ "@jest/schemas" "^28.0.2"
+ "@types/istanbul-lib-coverage" "^2.0.0"
+ "@types/istanbul-reports" "^3.0.0"
+ "@types/node" "*"
+ "@types/yargs" "^17.0.8"
+ chalk "^4.0.0"
+
+"@jridgewell/gen-mapping@^0.1.0":
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996"
+ integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==
+ dependencies:
+ "@jridgewell/set-array" "^1.0.0"
+ "@jridgewell/sourcemap-codec" "^1.4.10"
+
+"@jridgewell/gen-mapping@^0.3.0":
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz#cf92a983c83466b8c0ce9124fadeaf09f7c66ea9"
+ integrity sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg==
+ dependencies:
+ "@jridgewell/set-array" "^1.0.0"
+ "@jridgewell/sourcemap-codec" "^1.4.10"
+ "@jridgewell/trace-mapping" "^0.3.9"
+
+"@jridgewell/resolve-uri@^3.0.3":
+ version "3.0.7"
+ resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe"
+ integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==
+
+"@jridgewell/set-array@^1.0.0":
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea"
+ integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ==
+
+"@jridgewell/sourcemap-codec@^1.4.10":
+ version "1.4.13"
+ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c"
+ integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==
+
+"@jridgewell/trace-mapping@^0.3.7", "@jridgewell/trace-mapping@^0.3.9":
+ version "0.3.13"
+ resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz#dcfe3e95f224c8fe97a87a5235defec999aa92ea"
+ integrity sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w==
+ dependencies:
+ "@jridgewell/resolve-uri" "^3.0.3"
+ "@jridgewell/sourcemap-codec" "^1.4.10"
+
+"@pact-foundation/pact-node@^10.17.2":
+ version "10.17.4"
+ resolved "https://registry.yarnpkg.com/@pact-foundation/pact-node/-/pact-node-10.17.4.tgz#3a36fd2fe4b37fbb79be7d2ee3d31a9154305caa"
+ integrity sha512-KnK2V0cEyCqNA/LJxVUEu1yJ0gSBBHbvflqbuTwJ5cSv5AqN8KilSTwi1QvQ7HWGScdQci0ibUX1CORyAfEwhg==
+ dependencies:
+ "@types/needle" "^2.5.1"
+ "@types/pino" "^6.3.5"
+ "@types/q" "1.0.7"
+ "@types/request" "2.48.2"
+ chalk "2.3.1"
+ check-types "7.3.0"
+ cross-spawn "^7.0.1"
+ libnpmconfig "^1.2.1"
+ mkdirp "1.0.0"
+ needle "^2.6.0"
+ pino "^6.11.0"
+ pino-pretty "^4.1.0"
+ q "1.5.1"
+ rimraf "2.6.2"
+ sumchecker "^2.0.2"
+ tar "^6.1.11"
+ underscore "1.12.1"
+ unixify "1.0.0"
+ unzipper "^0.10.10"
+ url-join "^4.0.0"
+
+"@pact-foundation/pact@^9.17.2":
+ version "9.17.3"
+ resolved "https://registry.yarnpkg.com/@pact-foundation/pact/-/pact-9.17.3.tgz#9621e23e6803385841a3ab29a4162a90b6bcac3e"
+ integrity sha512-AVcScLZsxDDKlO1AuyN7m26Z2EqIt5AA4P8cPGEkokyHf97vbbWlHOPZsgZgzxZXQRnvjNyQnyMwHR69hQ1hew==
+ dependencies:
+ "@pact-foundation/pact-node" "^10.17.2"
+ "@types/bluebird" "^3.5.20"
+ "@types/express" "^4.17.11"
+ bluebird "~3.5.1"
+ body-parser "^1.18.2"
+ cli-color "^1.1.0"
+ es6-object-assign "^1.1.0"
+ es6-promise "^4.1.1"
+ express "^4.17.1"
+ graphql "^14.0.0"
+ graphql-tag "^2.9.1"
+ http-proxy "^1.18.1"
+ lodash "^4.17.21"
+ lodash.isfunction "3.0.8"
+ lodash.isnil "4.0.0"
+ lodash.isundefined "3.0.1"
+ lodash.omit "^4.5.0"
+ lodash.omitby "4.6.0"
+ pino "^6.5.1"
+ pino-pretty "^4.1.0"
+ pkginfo "^0.4.1"
+ popsicle "^9.2.0"
+
+"@sinclair/typebox@^0.23.3":
+ version "0.23.5"
+ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.23.5.tgz#93f7b9f4e3285a7a9ade7557d9a8d36809cbc47d"
+ integrity sha512-AFBVi/iT4g20DHoujvMH1aEDn8fGJh4xsRGCP6d8RpLPMqsNPvW01Jcn0QysXTsg++/xj25NmJsGyH9xug/wKg==
+
+"@sinonjs/commons@^1.7.0":
+ version "1.8.3"
+ resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d"
+ integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==
+ dependencies:
+ type-detect "4.0.8"
+
+"@sinonjs/fake-timers@^8.0.1":
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7"
+ integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg==
+ dependencies:
+ "@sinonjs/commons" "^1.7.0"
+
+"@tootallnate/once@1":
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
+ integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==
+
+"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14":
+ version "7.1.19"
+ resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460"
+ integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw==
+ dependencies:
+ "@babel/parser" "^7.1.0"
+ "@babel/types" "^7.0.0"
+ "@types/babel__generator" "*"
+ "@types/babel__template" "*"
+ "@types/babel__traverse" "*"
+
+"@types/babel__generator@*":
+ version "7.6.4"
+ resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7"
+ integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==
+ dependencies:
+ "@babel/types" "^7.0.0"
+
+"@types/babel__template@*":
+ version "7.4.1"
+ resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969"
+ integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==
+ dependencies:
+ "@babel/parser" "^7.1.0"
+ "@babel/types" "^7.0.0"
+
+"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6":
+ version "7.17.1"
+ resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.17.1.tgz#1a0e73e8c28c7e832656db372b779bfd2ef37314"
+ integrity sha512-kVzjari1s2YVi77D3w1yuvohV2idweYXMCDzqBiVNN63TcDWrIlTVOYpqVrvbbyOE/IyzBoTKF0fdnLPEORFxA==
+ dependencies:
+ "@babel/types" "^7.3.0"
+
+"@types/bluebird@^3.5.20":
+ version "3.5.36"
+ resolved "https://registry.yarnpkg.com/@types/bluebird/-/bluebird-3.5.36.tgz#00d9301d4dc35c2f6465a8aec634bb533674c652"
+ integrity sha512-HBNx4lhkxN7bx6P0++W8E289foSu8kO8GCk2unhuVggO+cE7rh9DhZUyPhUxNRG9m+5B5BTKxZQ5ZP92x/mx9Q==
+
+"@types/body-parser@*":
+ version "1.19.2"
+ resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0"
+ integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==
+ dependencies:
+ "@types/connect" "*"
+ "@types/node" "*"
+
+"@types/caseless@*":
+ version "0.12.2"
+ resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.2.tgz#f65d3d6389e01eeb458bd54dc8f52b95a9463bc8"
+ integrity sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==
+
+"@types/connect@*":
+ version "3.4.35"
+ resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1"
+ integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/express-serve-static-core@^4.17.18":
+ version "4.17.29"
+ resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz#2a1795ea8e9e9c91b4a4bbe475034b20c1ec711c"
+ integrity sha512-uMd++6dMKS32EOuw1Uli3e3BPgdLIXmezcfHv7N4c1s3gkhikBplORPpMq3fuWkxncZN1reb16d5n8yhQ80x7Q==
+ dependencies:
+ "@types/node" "*"
+ "@types/qs" "*"
+ "@types/range-parser" "*"
+
+"@types/express@^4.17.11":
+ version "4.17.13"
+ resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034"
+ integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==
+ dependencies:
+ "@types/body-parser" "*"
+ "@types/express-serve-static-core" "^4.17.18"
+ "@types/qs" "*"
+ "@types/serve-static" "*"
+
+"@types/graceful-fs@^4.1.2", "@types/graceful-fs@^4.1.3":
+ version "4.1.5"
+ resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15"
+ integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==
+ dependencies:
+ "@types/node" "*"
+
+"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1":
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44"
+ integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==
+
+"@types/istanbul-lib-report@*":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686"
+ integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==
+ dependencies:
+ "@types/istanbul-lib-coverage" "*"
+
+"@types/istanbul-reports@^3.0.0":
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff"
+ integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==
+ dependencies:
+ "@types/istanbul-lib-report" "*"
+
+"@types/mime@^1":
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
+ integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==
+
+"@types/needle@^2.5.1":
+ version "2.5.3"
+ resolved "https://registry.yarnpkg.com/@types/needle/-/needle-2.5.3.tgz#cc64f46411b811df260171b68756e67d36890ea1"
+ integrity sha512-RwgTwMRaedfyCBe5SSWMpm1Yqzc5UPZEMw0eAd09OSyV93nLRj9/evMGZmgFeHKzUOd4xxtHvgtc+rjcBjI1Qg==
+ dependencies:
+ "@types/node" "*"
+
+"@types/node@*":
+ version "18.0.0"
+ resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.0.tgz#67c7b724e1bcdd7a8821ce0d5ee184d3b4dd525a"
+ integrity sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA==
+
+"@types/pino-pretty@*":
+ version "4.7.5"
+ resolved "https://registry.yarnpkg.com/@types/pino-pretty/-/pino-pretty-4.7.5.tgz#e4ade1e42b78b8b0c1c28010ff7eb6c439278b19"
+ integrity sha512-rfHe6VIknk14DymxGqc9maGsRe8/HQSvM2u46EAz2XrS92qsAJnW16dpdFejBuZKD8cRJX6Aw6uVZqIQctMpAg==
+ dependencies:
+ "@types/node" "*"
+ "@types/pino" "6.3"
+
+"@types/pino-std-serializers@*":
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/@types/pino-std-serializers/-/pino-std-serializers-2.4.1.tgz#f8bd52a209c8b3c97d1533b1ba27f57c816382bf"
+ integrity sha512-17XcksO47M24IVTVKPeAByWUd3Oez7EbIjXpSbzMPhXVzgjGtrOa49gKBwxH9hb8dKv58OelsWQ+A1G1l9S3wQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/pino@6.3", "@types/pino@^6.3.5":
+ version "6.3.12"
+ resolved "https://registry.yarnpkg.com/@types/pino/-/pino-6.3.12.tgz#4425db6ced806109c3df957100cba9dfcd73c228"
+ integrity sha512-dsLRTq8/4UtVSpJgl9aeqHvbh6pzdmjYD3C092SYgLD2TyoCqHpTJk6vp8DvCTGGc7iowZ2MoiYiVUUCcu7muw==
+ dependencies:
+ "@types/node" "*"
+ "@types/pino-pretty" "*"
+ "@types/pino-std-serializers" "*"
+ sonic-boom "^2.1.0"
+
+"@types/prettier@^2.1.5":
+ version "2.6.3"
+ resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.3.tgz#68ada76827b0010d0db071f739314fa429943d0a"
+ integrity sha512-ymZk3LEC/fsut+/Q5qejp6R9O1rMxz3XaRHDV6kX8MrGAhOSPqVARbDi+EZvInBpw+BnCX3TD240byVkOfQsHg==
+
+"@types/q@1.0.7":
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/@types/q/-/q-1.0.7.tgz#afd4c610f16f6386d320e0738ec38ba7d3431917"
+ integrity sha512-0WS7XU7sXzQ7J1nbnMKKYdjrrFoO3YtZYgUzeV8JFXffPnHfvSJQleR70I8BOAsOm14i4dyaAZ3YzqIl1YhkXQ==
+
+"@types/qs@*":
+ version "6.9.7"
+ resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"
+ integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==
+
+"@types/range-parser@*":
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
+ integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
+
+"@types/request@2.48.2":
+ version "2.48.2"
+ resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.2.tgz#936374cbe1179d7ed529fc02543deb4597450fed"
+ integrity sha512-gP+PSFXAXMrd5PcD7SqHeUjdGshAI8vKQ3+AvpQr3ht9iQea+59LOKvKITcQI+Lg+1EIkDP6AFSBUJPWG8GDyA==
+ dependencies:
+ "@types/caseless" "*"
+ "@types/node" "*"
+ "@types/tough-cookie" "*"
+ form-data "^2.5.0"
+
+"@types/serve-static@*":
+ version "1.13.10"
+ resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.10.tgz#f5e0ce8797d2d7cc5ebeda48a52c96c4fa47a8d9"
+ integrity sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==
+ dependencies:
+ "@types/mime" "^1"
+ "@types/node" "*"
+
+"@types/stack-utils@^2.0.0":
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c"
+ integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==
+
+"@types/tough-cookie@*":
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
+ integrity sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw==
+
+"@types/yargs-parser@*":
+ version "21.0.0"
+ resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b"
+ integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==
+
+"@types/yargs@^16.0.0":
+ version "16.0.4"
+ resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977"
+ integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==
+ dependencies:
+ "@types/yargs-parser" "*"
+
+"@types/yargs@^17.0.8":
+ version "17.0.10"
+ resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.10.tgz#591522fce85d8739bca7b8bb90d048e4478d186a"
+ integrity sha512-gmEaFwpj/7f/ROdtIlci1R1VYU1J4j95m8T+Tj3iBgiBFKg1foE/PSl93bBd5T9LDXNPo8UlNN6W0qwD8O5OaA==
+ dependencies:
+ "@types/yargs-parser" "*"
+
+abab@^2.0.3, abab@^2.0.5:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291"
+ integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==
+
+accepts@~1.3.8:
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
+ integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==
+ dependencies:
+ mime-types "~2.1.34"
+ negotiator "0.6.3"
+
+acorn-globals@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45"
+ integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==
+ dependencies:
+ acorn "^7.1.1"
+ acorn-walk "^7.1.1"
+
+acorn-walk@^7.1.1:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc"
+ integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==
+
+acorn@^7.1.1:
+ version "7.4.1"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
+ integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
+
+acorn@^8.2.4:
+ version "8.7.1"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30"
+ integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==
+
+agent-base@6:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77"
+ integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==
+ dependencies:
+ debug "4"
+
+ansi-escapes@^4.2.1:
+ version "4.3.2"
+ resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e"
+ integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==
+ dependencies:
+ type-fest "^0.21.3"
+
+ansi-regex@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
+ integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==
+
+ansi-regex@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
+ integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
+
+ansi-styles@^3.2.0, ansi-styles@^3.2.1:
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
+ integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
+ dependencies:
+ color-convert "^1.9.0"
+
+ansi-styles@^4.0.0, ansi-styles@^4.1.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
+ integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
+ dependencies:
+ color-convert "^2.0.1"
+
+ansi-styles@^5.0.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b"
+ integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==
+
+anymatch@^3.0.3:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716"
+ integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==
+ dependencies:
+ normalize-path "^3.0.0"
+ picomatch "^2.0.4"
+
+argparse@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
+ integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
+ dependencies:
+ sprintf-js "~1.0.2"
+
+args@^5.0.1:
+ version "5.0.3"
+ resolved "https://registry.yarnpkg.com/args/-/args-5.0.3.tgz#943256db85021a85684be2f0882f25d796278702"
+ integrity sha512-h6k/zfFgusnv3i5TU08KQkVKuCPBtL/PWQbWkHUxvJrZ2nAyeaUupneemcrgn1xmqxPQsPIzwkUhOpoqPDRZuA==
+ dependencies:
+ camelcase "5.0.0"
+ chalk "2.4.2"
+ leven "2.1.0"
+ mri "1.1.4"
+
+array-flatten@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
+ integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==
+
+asynckit@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
+ integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==
+
+atomic-sleep@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b"
+ integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==
+
+axios@^0.26.0:
+ version "0.26.1"
+ resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
+ integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
+ dependencies:
+ follow-redirects "^1.14.8"
+
+babel-jest@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444"
+ integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg==
+ dependencies:
+ "@jest/transform" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/babel__core" "^7.1.14"
+ babel-plugin-istanbul "^6.1.1"
+ babel-preset-jest "^27.5.1"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.9"
+ slash "^3.0.0"
+
+babel-jest@^28.1.1:
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-28.1.1.tgz#2a3a4ae50964695b2d694ccffe4bec537c5a3586"
+ integrity sha512-MEt0263viUdAkTq5D7upHPNxvt4n9uLUGa6pPz3WviNBMtOmStb1lIXS3QobnoqM+qnH+vr4EKlvhe8QcmxIYw==
+ dependencies:
+ "@jest/transform" "^28.1.1"
+ "@types/babel__core" "^7.1.14"
+ babel-plugin-istanbul "^6.1.1"
+ babel-preset-jest "^28.1.1"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.9"
+ slash "^3.0.0"
+
+babel-plugin-dynamic-import-node@^2.3.3:
+ version "2.3.3"
+ resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3"
+ integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==
+ dependencies:
+ object.assign "^4.1.0"
+
+babel-plugin-istanbul@^6.1.1:
+ version "6.1.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73"
+ integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.0.0"
+ "@istanbuljs/load-nyc-config" "^1.0.0"
+ "@istanbuljs/schema" "^0.1.2"
+ istanbul-lib-instrument "^5.0.4"
+ test-exclude "^6.0.0"
+
+babel-plugin-jest-hoist@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e"
+ integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ==
+ dependencies:
+ "@babel/template" "^7.3.3"
+ "@babel/types" "^7.3.3"
+ "@types/babel__core" "^7.0.0"
+ "@types/babel__traverse" "^7.0.6"
+
+babel-plugin-jest-hoist@^28.1.1:
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-28.1.1.tgz#5e055cdcc47894f28341f87f5e35aad2df680b11"
+ integrity sha512-NovGCy5Hn25uMJSAU8FaHqzs13cFoOI4lhIujiepssjCKRsAo3TA734RDWSGxuFTsUJXerYOqQQodlxgmtqbzw==
+ dependencies:
+ "@babel/template" "^7.3.3"
+ "@babel/types" "^7.3.3"
+ "@types/babel__core" "^7.1.14"
+ "@types/babel__traverse" "^7.0.6"
+
+babel-plugin-polyfill-corejs2@^0.3.0:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz#440f1b70ccfaabc6b676d196239b138f8a2cfba5"
+ integrity sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==
+ dependencies:
+ "@babel/compat-data" "^7.13.11"
+ "@babel/helper-define-polyfill-provider" "^0.3.1"
+ semver "^6.1.1"
+
+babel-plugin-polyfill-corejs3@^0.5.0:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz#aabe4b2fa04a6e038b688c5e55d44e78cd3a5f72"
+ integrity sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.3.1"
+ core-js-compat "^3.21.0"
+
+babel-plugin-polyfill-regenerator@^0.3.0:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz#2c0678ea47c75c8cc2fbb1852278d8fb68233990"
+ integrity sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.3.1"
+
+babel-preset-current-node-syntax@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b"
+ integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==
+ dependencies:
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+ "@babel/plugin-syntax-bigint" "^7.8.3"
+ "@babel/plugin-syntax-class-properties" "^7.8.3"
+ "@babel/plugin-syntax-import-meta" "^7.8.3"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+ "@babel/plugin-syntax-numeric-separator" "^7.8.3"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+ "@babel/plugin-syntax-top-level-await" "^7.8.3"
+
+babel-preset-jest@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81"
+ integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag==
+ dependencies:
+ babel-plugin-jest-hoist "^27.5.1"
+ babel-preset-current-node-syntax "^1.0.0"
+
+babel-preset-jest@^28.1.1:
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-28.1.1.tgz#5b6e5e69f963eb2d70f739c607b8f723c0ee75e4"
+ integrity sha512-FCq9Oud0ReTeWtcneYf/48981aTfXYuB9gbU4rBNNJVBSQ6ssv7E6v/qvbBxtOWwZFXjLZwpg+W3q7J6vhH25g==
+ dependencies:
+ babel-plugin-jest-hoist "^28.1.1"
+ babel-preset-current-node-syntax "^1.0.0"
+
+balanced-match@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
+ integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
+
+big-integer@^1.6.17:
+ version "1.6.51"
+ resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686"
+ integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==
+
+binary@~0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79"
+ integrity sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==
+ dependencies:
+ buffers "~0.1.1"
+ chainsaw "~0.1.0"
+
+bluebird@~3.4.1:
+ version "3.4.7"
+ resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3"
+ integrity sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA==
+
+bluebird@~3.5.1:
+ version "3.5.5"
+ resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.5.tgz#a8d0afd73251effbbd5fe384a77d73003c17a71f"
+ integrity sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w==
+
+body-parser@1.20.0, body-parser@^1.18.2:
+ version "1.20.0"
+ resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5"
+ integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==
+ dependencies:
+ bytes "3.1.2"
+ content-type "~1.0.4"
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ on-finished "2.4.1"
+ qs "6.10.3"
+ raw-body "2.5.1"
+ type-is "~1.6.18"
+ unpipe "1.0.0"
+
+brace-expansion@^1.1.7:
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
+ integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
+ dependencies:
+ balanced-match "^1.0.0"
+ concat-map "0.0.1"
+
+braces@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
+ integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
+ dependencies:
+ fill-range "^7.0.1"
+
+browser-process-hrtime@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626"
+ integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==
+
+browserslist@^4.20.2, browserslist@^4.20.4:
+ version "4.20.4"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.4.tgz#98096c9042af689ee1e0271333dbc564b8ce4477"
+ integrity sha512-ok1d+1WpnU24XYN7oC3QWgTyMhY/avPJ/r9T00xxvUOIparA/gc+UPUMaod3i+G6s+nI2nUb9xZ5k794uIwShw==
+ dependencies:
+ caniuse-lite "^1.0.30001349"
+ electron-to-chromium "^1.4.147"
+ escalade "^3.1.1"
+ node-releases "^2.0.5"
+ picocolors "^1.0.0"
+
+bser@2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05"
+ integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==
+ dependencies:
+ node-int64 "^0.4.0"
+
+buffer-from@^1.0.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
+ integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
+
+buffer-indexof-polyfill@~1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz#d2732135c5999c64b277fcf9b1abe3498254729c"
+ integrity sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A==
+
+buffers@~0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb"
+ integrity sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==
+
+bytes@3.1.2:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
+ integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
+
+call-bind@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
+ integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==
+ dependencies:
+ function-bind "^1.1.1"
+ get-intrinsic "^1.0.2"
+
+callsites@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
+ integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
+
+camelcase@5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.0.0.tgz#03295527d58bd3cd4aa75363f35b2e8d97be2f42"
+ integrity sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==
+
+camelcase@^5.3.1:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
+ integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
+
+camelcase@^6.2.0:
+ version "6.3.0"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
+ integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
+
+caniuse-lite@^1.0.30001349:
+ version "1.0.30001355"
+ resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001355.tgz#e240b7177443ed0198c737a7f609536976701c77"
+ integrity sha512-Sd6pjJHF27LzCB7pT7qs+kuX2ndurzCzkpJl6Qct7LPSZ9jn0bkOA8mdgMgmqnQAWLVOOGjLpc+66V57eLtb1g==
+
+chainsaw@~0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98"
+ integrity sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==
+ dependencies:
+ traverse ">=0.3.0 <0.4"
+
+chalk@2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.1.tgz#523fe2678aec7b04e8041909292fe8b17059b796"
+ integrity sha512-QUU4ofkDoMIVO7hcx1iPTISs88wsO8jA92RQIm4JAwZvFGGAV2hSAA1NX7oVj2Ej2Q6NDTcRDjPTFrMCRZoJ6g==
+ dependencies:
+ ansi-styles "^3.2.0"
+ escape-string-regexp "^1.0.5"
+ supports-color "^5.2.0"
+
+chalk@2.4.2, chalk@^2.0.0:
+ version "2.4.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
+ integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
+ dependencies:
+ ansi-styles "^3.2.1"
+ escape-string-regexp "^1.0.5"
+ supports-color "^5.3.0"
+
+chalk@^4.0.0:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
+ integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
+ dependencies:
+ ansi-styles "^4.1.0"
+ supports-color "^7.1.0"
+
+char-regex@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
+ integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
+
+check-types@7.3.0:
+ version "7.3.0"
+ resolved "https://registry.yarnpkg.com/check-types/-/check-types-7.3.0.tgz#468f571a4435c24248f5fd0cb0e8d87c3c341e7d"
+ integrity sha512-bzDMlwEIZFtyK70RHwQhMCvXpPyJZgOCCKlvH9oAJz4quUQse8ZynYE5RQzKpY7b5PoL6G+jQMcZzUPD4p6tFg==
+
+chownr@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece"
+ integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==
+
+ci-info@^3.2.0:
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.2.tgz#6d2967ffa407466481c6c90b6e16b3098f080128"
+ integrity sha512-xmDt/QIAdeZ9+nfdPsaBCpMvHNLFiLdjj59qjqn+6iPe6YmHGQ35sBnQ8uslRBXFmXkiZQOJRjvQeoGppoTjjg==
+
+cjs-module-lexer@^1.0.0:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40"
+ integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==
+
+cli-color@^1.1.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-1.4.0.tgz#7d10738f48526824f8fe7da51857cb0f572fe01f"
+ integrity sha512-xu6RvQqqrWEo6MPR1eixqGPywhYBHRs653F9jfXB2Hx4jdM/3WxiNE1vppRmxtMIfl16SFYTpYlrnqH/HsK/2w==
+ dependencies:
+ ansi-regex "^2.1.1"
+ d "1"
+ es5-ext "^0.10.46"
+ es6-iterator "^2.0.3"
+ memoizee "^0.4.14"
+ timers-ext "^0.1.5"
+
+cliui@^7.0.2:
+ version "7.0.4"
+ resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f"
+ integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==
+ dependencies:
+ string-width "^4.2.0"
+ strip-ansi "^6.0.0"
+ wrap-ansi "^7.0.0"
+
+co@^4.6.0:
+ version "4.6.0"
+ resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
+ integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==
+
+collect-v8-coverage@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59"
+ integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==
+
+color-convert@^1.9.0:
+ version "1.9.3"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
+ integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
+ dependencies:
+ color-name "1.1.3"
+
+color-convert@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
+ integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
+ dependencies:
+ color-name "~1.1.4"
+
+color-name@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
+ integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
+
+color-name@~1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+ integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
+
+combined-stream@^1.0.6, combined-stream@^1.0.8:
+ version "1.0.8"
+ resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
+ integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
+ dependencies:
+ delayed-stream "~1.0.0"
+
+concat-map@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+ integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
+
+concat-stream@^1.4.7:
+ version "1.6.2"
+ resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34"
+ integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==
+ dependencies:
+ buffer-from "^1.0.0"
+ inherits "^2.0.3"
+ readable-stream "^2.2.2"
+ typedarray "^0.0.6"
+
+content-disposition@0.5.4:
+ version "0.5.4"
+ resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe"
+ integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==
+ dependencies:
+ safe-buffer "5.2.1"
+
+content-type@~1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
+ integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
+
+convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369"
+ integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==
+ dependencies:
+ safe-buffer "~5.1.1"
+
+cookie-signature@1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
+ integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==
+
+cookie@0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
+ integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
+
+core-js-compat@^3.21.0, core-js-compat@^3.22.1:
+ version "3.23.1"
+ resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.23.1.tgz#23d44d9f209086e60dabf9130cea7719af6e199b"
+ integrity sha512-KeYrEc8t6FJsKYB2qnDwRHWaC0cJNaqlHfCpMe5q3j/W1nje3moib/txNklddLPCtGb+etcBIyJ8zuMa/LN5/A==
+ dependencies:
+ browserslist "^4.20.4"
+ semver "7.0.0"
+
+core-util-is@~1.0.0:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
+ integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
+
+cross-spawn@^7.0.1, cross-spawn@^7.0.3:
+ version "7.0.3"
+ resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
+ integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
+ dependencies:
+ path-key "^3.1.0"
+ shebang-command "^2.0.0"
+ which "^2.0.1"
+
+cssom@^0.4.4:
+ version "0.4.4"
+ resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10"
+ integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==
+
+cssom@~0.3.6:
+ version "0.3.8"
+ resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a"
+ integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==
+
+cssstyle@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852"
+ integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==
+ dependencies:
+ cssom "~0.3.6"
+
+d@1, d@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a"
+ integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==
+ dependencies:
+ es5-ext "^0.10.50"
+ type "^1.0.1"
+
+data-urls@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b"
+ integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==
+ dependencies:
+ abab "^2.0.3"
+ whatwg-mimetype "^2.3.0"
+ whatwg-url "^8.0.0"
+
+dateformat@^4.5.1:
+ version "4.6.3"
+ resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5"
+ integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==
+
+debug@2.6.9, debug@^2.2.0:
+ version "2.6.9"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+ integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
+ dependencies:
+ ms "2.0.0"
+
+debug@4, debug@^4.1.0, debug@^4.1.1:
+ version "4.3.4"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
+ integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
+ dependencies:
+ ms "2.1.2"
+
+debug@^3.2.6:
+ version "3.2.7"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a"
+ integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==
+ dependencies:
+ ms "^2.1.1"
+
+decimal.js@^10.2.1:
+ version "10.3.1"
+ resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783"
+ integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ==
+
+dedent@^0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c"
+ integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==
+
+deep-is@~0.1.3:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831"
+ integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==
+
+deepmerge@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955"
+ integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==
+
+define-properties@^1.1.3:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1"
+ integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==
+ dependencies:
+ has-property-descriptors "^1.0.0"
+ object-keys "^1.1.1"
+
+delayed-stream@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
+ integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
+
+depd@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
+ integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
+
+destroy@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015"
+ integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==
+
+detect-newline@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651"
+ integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==
+
+diff-sequences@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327"
+ integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==
+
+domexception@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304"
+ integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==
+ dependencies:
+ webidl-conversions "^5.0.0"
+
+duplexer2@~0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1"
+ integrity sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==
+ dependencies:
+ readable-stream "^2.0.2"
+
+ee-first@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
+ integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==
+
+electron-to-chromium@^1.4.147:
+ version "1.4.159"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.159.tgz#458f8c36736680f0369adc0d9ea601f1f2da06cd"
+ integrity sha512-D0NfhJzv5GgN3L7bzy5VUD90yKXnNFdCy+fH6jox+5UyJJSFmDC2d1mUxc9yi3bpTaOA1MJ6BkuVjn+GZTaZwg==
+
+emittery@^0.8.1:
+ version "0.8.1"
+ resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860"
+ integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg==
+
+emoji-regex@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
+ integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==
+
+encodeurl@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
+ integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==
+
+end-of-stream@^1.1.0:
+ version "1.4.4"
+ resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
+ integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
+ dependencies:
+ once "^1.4.0"
+
+error-ex@^1.3.1:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
+ integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
+ dependencies:
+ is-arrayish "^0.2.1"
+
+es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46:
+ version "0.10.61"
+ resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.61.tgz#311de37949ef86b6b0dcea894d1ffedb909d3269"
+ integrity sha512-yFhIqQAzu2Ca2I4SE2Au3rxVfmohU9Y7wqGR+s7+H7krk26NXhIRAZDgqd6xqjCEFUomDEA3/Bo/7fKmIkW1kA==
+ dependencies:
+ es6-iterator "^2.0.3"
+ es6-symbol "^3.1.3"
+ next-tick "^1.1.0"
+
+es6-iterator@^2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7"
+ integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==
+ dependencies:
+ d "1"
+ es5-ext "^0.10.35"
+ es6-symbol "^3.1.1"
+
+es6-object-assign@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/es6-object-assign/-/es6-object-assign-1.1.0.tgz#c2c3582656247c39ea107cb1e6652b6f9f24523c"
+ integrity sha512-MEl9uirslVwqQU369iHNWZXsI8yaZYGg/D65aOgZkeyFJwHYSxilf7rQzXKI7DdDuBPrBXbfk3sl9hJhmd5AUw==
+
+es6-promise@^4.1.1:
+ version "4.2.8"
+ resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a"
+ integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==
+
+es6-symbol@^3.1.1, es6-symbol@^3.1.3:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18"
+ integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==
+ dependencies:
+ d "^1.0.1"
+ ext "^1.1.2"
+
+es6-weak-map@^2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53"
+ integrity sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==
+ dependencies:
+ d "1"
+ es5-ext "^0.10.46"
+ es6-iterator "^2.0.3"
+ es6-symbol "^3.1.1"
+
+escalade@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
+ integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
+
+escape-html@~1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
+ integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==
+
+escape-string-regexp@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
+ integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
+
+escape-string-regexp@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344"
+ integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==
+
+escodegen@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd"
+ integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==
+ dependencies:
+ esprima "^4.0.1"
+ estraverse "^5.2.0"
+ esutils "^2.0.2"
+ optionator "^0.8.1"
+ optionalDependencies:
+ source-map "~0.6.1"
+
+esprima@^4.0.0, esprima@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
+ integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
+
+estraverse@^5.2.0:
+ version "5.3.0"
+ resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123"
+ integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==
+
+esutils@^2.0.2:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
+ integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
+
+etag@~1.8.1:
+ version "1.8.1"
+ resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
+ integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==
+
+event-emitter@^0.3.5:
+ version "0.3.5"
+ resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39"
+ integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+
+eventemitter3@^4.0.0:
+ version "4.0.7"
+ resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
+ integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
+
+execa@^5.0.0:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
+ integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==
+ dependencies:
+ cross-spawn "^7.0.3"
+ get-stream "^6.0.0"
+ human-signals "^2.1.0"
+ is-stream "^2.0.0"
+ merge-stream "^2.0.0"
+ npm-run-path "^4.0.1"
+ onetime "^5.1.2"
+ signal-exit "^3.0.3"
+ strip-final-newline "^2.0.0"
+
+exit@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c"
+ integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==
+
+expect@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74"
+ integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ jest-get-type "^27.5.1"
+ jest-matcher-utils "^27.5.1"
+ jest-message-util "^27.5.1"
+
+express@^4.17.1:
+ version "4.18.1"
+ resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf"
+ integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==
+ dependencies:
+ accepts "~1.3.8"
+ array-flatten "1.1.1"
+ body-parser "1.20.0"
+ content-disposition "0.5.4"
+ content-type "~1.0.4"
+ cookie "0.5.0"
+ cookie-signature "1.0.6"
+ debug "2.6.9"
+ depd "2.0.0"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ finalhandler "1.2.0"
+ fresh "0.5.2"
+ http-errors "2.0.0"
+ merge-descriptors "1.0.1"
+ methods "~1.1.2"
+ on-finished "2.4.1"
+ parseurl "~1.3.3"
+ path-to-regexp "0.1.7"
+ proxy-addr "~2.0.7"
+ qs "6.10.3"
+ range-parser "~1.2.1"
+ safe-buffer "5.2.1"
+ send "0.18.0"
+ serve-static "1.15.0"
+ setprototypeof "1.2.0"
+ statuses "2.0.1"
+ type-is "~1.6.18"
+ utils-merge "1.0.1"
+ vary "~1.1.2"
+
+ext@^1.1.2:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/ext/-/ext-1.6.0.tgz#3871d50641e874cc172e2b53f919842d19db4c52"
+ integrity sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg==
+ dependencies:
+ type "^2.5.0"
+
+fast-json-stable-stringify@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
+ integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
+
+fast-levenshtein@~2.0.6:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
+ integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
+
+fast-redact@^3.0.0:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.1.1.tgz#790fcff8f808c2e12fabbfb2be5cb2deda448fa0"
+ integrity sha512-odVmjC8x8jNeMZ3C+rPMESzXVSEU8tSWSHv9HFxP2mm89G/1WwqhrerJDQm9Zus8X6aoRgQDThKqptdNA6bt+A==
+
+fast-safe-stringify@^2.0.7, fast-safe-stringify@^2.0.8:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884"
+ integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==
+
+fb-watchman@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85"
+ integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==
+ dependencies:
+ bser "2.1.1"
+
+figgy-pudding@^3.5.1:
+ version "3.5.2"
+ resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e"
+ integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw==
+
+fill-range@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
+ integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
+ dependencies:
+ to-regex-range "^5.0.1"
+
+finalhandler@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32"
+ integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==
+ dependencies:
+ debug "2.6.9"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ on-finished "2.4.1"
+ parseurl "~1.3.3"
+ statuses "2.0.1"
+ unpipe "~1.0.0"
+
+find-up@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
+ integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
+ dependencies:
+ locate-path "^3.0.0"
+
+find-up@^4.0.0, find-up@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
+ integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
+ dependencies:
+ locate-path "^5.0.0"
+ path-exists "^4.0.0"
+
+flatstr@^1.0.12:
+ version "1.0.12"
+ resolved "https://registry.yarnpkg.com/flatstr/-/flatstr-1.0.12.tgz#c2ba6a08173edbb6c9640e3055b95e287ceb5931"
+ integrity sha512-4zPxDyhCyiN2wIAtSLI6gc82/EjqZc1onI4Mz/l0pWrAlsSfYH/2ZIcU+e3oA2wDwbzIWNKwa23F8rh6+DRWkw==
+
+follow-redirects@^1.0.0, follow-redirects@^1.14.8:
+ version "1.15.1"
+ resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5"
+ integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==
+
+form-data@^2.0.0, form-data@^2.5.0:
+ version "2.5.1"
+ resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4"
+ integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==
+ dependencies:
+ asynckit "^0.4.0"
+ combined-stream "^1.0.6"
+ mime-types "^2.1.12"
+
+form-data@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f"
+ integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==
+ dependencies:
+ asynckit "^0.4.0"
+ combined-stream "^1.0.8"
+ mime-types "^2.1.12"
+
+forwarded@0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811"
+ integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==
+
+fresh@0.5.2:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
+ integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==
+
+fs-minipass@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
+ integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==
+ dependencies:
+ minipass "^3.0.0"
+
+fs.realpath@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
+ integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
+
+fsevents@^2.3.2:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
+ integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
+
+fstream@^1.0.12:
+ version "1.0.12"
+ resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.12.tgz#4e8ba8ee2d48be4f7d0de505455548eae5932045"
+ integrity sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==
+ dependencies:
+ graceful-fs "^4.1.2"
+ inherits "~2.0.0"
+ mkdirp ">=0.5 0"
+ rimraf "2"
+
+function-bind@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
+ integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
+
+gensync@^1.0.0-beta.2:
+ version "1.0.0-beta.2"
+ resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0"
+ integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==
+
+get-caller-file@^2.0.5:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
+ integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
+
+get-intrinsic@^1.0.2, get-intrinsic@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.2.tgz#336975123e05ad0b7ba41f152ee4aadbea6cf598"
+ integrity sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==
+ dependencies:
+ function-bind "^1.1.1"
+ has "^1.0.3"
+ has-symbols "^1.0.3"
+
+get-package-type@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a"
+ integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==
+
+get-stream@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7"
+ integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==
+
+glob@^7.0.5, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4:
+ version "7.2.3"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b"
+ integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.1.1"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
+globals@^11.1.0:
+ version "11.12.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
+ integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==
+
+graceful-fs@^4.1.2, graceful-fs@^4.2.2, graceful-fs@^4.2.9:
+ version "4.2.10"
+ resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
+ integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
+
+graphql-tag@^2.9.1:
+ version "2.12.6"
+ resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.12.6.tgz#d441a569c1d2537ef10ca3d1633b48725329b5f1"
+ integrity sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg==
+ dependencies:
+ tslib "^2.1.0"
+
+graphql@^14.0.0:
+ version "14.7.0"
+ resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.7.0.tgz#7fa79a80a69be4a31c27dda824dc04dac2035a72"
+ integrity sha512-l0xWZpoPKpppFzMfvVyFmp9vLN7w/ZZJPefUicMCepfJeQ8sMcztloGYY9DfjVPo6tIUDzU5Hw3MUbIjj9AVVA==
+ dependencies:
+ iterall "^1.2.2"
+
+has-flag@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
+ integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
+
+has-flag@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
+ integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
+
+has-property-descriptors@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861"
+ integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==
+ dependencies:
+ get-intrinsic "^1.1.1"
+
+has-symbols@^1.0.1, has-symbols@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8"
+ integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==
+
+has@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
+ integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
+ dependencies:
+ function-bind "^1.1.1"
+
+html-encoding-sniffer@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3"
+ integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==
+ dependencies:
+ whatwg-encoding "^1.0.5"
+
+html-escaper@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453"
+ integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
+
+http-errors@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3"
+ integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==
+ dependencies:
+ depd "2.0.0"
+ inherits "2.0.4"
+ setprototypeof "1.2.0"
+ statuses "2.0.1"
+ toidentifier "1.0.1"
+
+http-proxy-agent@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a"
+ integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==
+ dependencies:
+ "@tootallnate/once" "1"
+ agent-base "6"
+ debug "4"
+
+http-proxy@^1.18.1:
+ version "1.18.1"
+ resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549"
+ integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==
+ dependencies:
+ eventemitter3 "^4.0.0"
+ follow-redirects "^1.0.0"
+ requires-port "^1.0.0"
+
+https-proxy-agent@^5.0.0:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6"
+ integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==
+ dependencies:
+ agent-base "6"
+ debug "4"
+
+human-signals@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
+ integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==
+
+iconv-lite@0.4.24, iconv-lite@^0.4.4:
+ version "0.4.24"
+ resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
+ integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
+ dependencies:
+ safer-buffer ">= 2.1.2 < 3"
+
+import-local@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4"
+ integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==
+ dependencies:
+ pkg-dir "^4.2.0"
+ resolve-cwd "^3.0.0"
+
+imurmurhash@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
+ integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
+
+inflight@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
+ integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==
+ dependencies:
+ once "^1.3.0"
+ wrappy "1"
+
+inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.3:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
+ integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
+
+ini@^1.3.5:
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
+ integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
+
+ipaddr.js@1.9.1:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
+ integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
+
+is-arrayish@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
+ integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
+
+is-core-module@^2.8.1:
+ version "2.9.0"
+ resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69"
+ integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==
+ dependencies:
+ has "^1.0.3"
+
+is-fullwidth-code-point@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
+ integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
+
+is-generator-fn@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118"
+ integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==
+
+is-number@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
+ integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
+
+is-potential-custom-element-name@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5"
+ integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==
+
+is-promise@^2.2.2:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1"
+ integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==
+
+is-stream@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
+ integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
+
+is-typedarray@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
+ integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==
+
+isarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
+ integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
+
+isexe@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
+ integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
+
+istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3"
+ integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==
+
+istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f"
+ integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A==
+ dependencies:
+ "@babel/core" "^7.12.3"
+ "@babel/parser" "^7.14.7"
+ "@istanbuljs/schema" "^0.1.2"
+ istanbul-lib-coverage "^3.2.0"
+ semver "^6.3.0"
+
+istanbul-lib-report@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6"
+ integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==
+ dependencies:
+ istanbul-lib-coverage "^3.0.0"
+ make-dir "^3.0.0"
+ supports-color "^7.1.0"
+
+istanbul-lib-source-maps@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551"
+ integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==
+ dependencies:
+ debug "^4.1.1"
+ istanbul-lib-coverage "^3.0.0"
+ source-map "^0.6.1"
+
+istanbul-reports@^3.1.3:
+ version "3.1.4"
+ resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.4.tgz#1b6f068ecbc6c331040aab5741991273e609e40c"
+ integrity sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw==
+ dependencies:
+ html-escaper "^2.0.0"
+ istanbul-lib-report "^3.0.0"
+
+iterall@^1.2.2:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea"
+ integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg==
+
+jest-changed-files@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5"
+ integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ execa "^5.0.0"
+ throat "^6.0.1"
+
+jest-circus@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc"
+ integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw==
+ dependencies:
+ "@jest/environment" "^27.5.1"
+ "@jest/test-result" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ co "^4.6.0"
+ dedent "^0.7.0"
+ expect "^27.5.1"
+ is-generator-fn "^2.0.0"
+ jest-each "^27.5.1"
+ jest-matcher-utils "^27.5.1"
+ jest-message-util "^27.5.1"
+ jest-runtime "^27.5.1"
+ jest-snapshot "^27.5.1"
+ jest-util "^27.5.1"
+ pretty-format "^27.5.1"
+ slash "^3.0.0"
+ stack-utils "^2.0.3"
+ throat "^6.0.1"
+
+jest-cli@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145"
+ integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw==
+ dependencies:
+ "@jest/core" "^27.5.1"
+ "@jest/test-result" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ chalk "^4.0.0"
+ exit "^0.1.2"
+ graceful-fs "^4.2.9"
+ import-local "^3.0.2"
+ jest-config "^27.5.1"
+ jest-util "^27.5.1"
+ jest-validate "^27.5.1"
+ prompts "^2.0.1"
+ yargs "^16.2.0"
+
+jest-config@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41"
+ integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==
+ dependencies:
+ "@babel/core" "^7.8.0"
+ "@jest/test-sequencer" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ babel-jest "^27.5.1"
+ chalk "^4.0.0"
+ ci-info "^3.2.0"
+ deepmerge "^4.2.2"
+ glob "^7.1.1"
+ graceful-fs "^4.2.9"
+ jest-circus "^27.5.1"
+ jest-environment-jsdom "^27.5.1"
+ jest-environment-node "^27.5.1"
+ jest-get-type "^27.5.1"
+ jest-jasmine2 "^27.5.1"
+ jest-regex-util "^27.5.1"
+ jest-resolve "^27.5.1"
+ jest-runner "^27.5.1"
+ jest-util "^27.5.1"
+ jest-validate "^27.5.1"
+ micromatch "^4.0.4"
+ parse-json "^5.2.0"
+ pretty-format "^27.5.1"
+ slash "^3.0.0"
+ strip-json-comments "^3.1.1"
+
+jest-diff@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def"
+ integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==
+ dependencies:
+ chalk "^4.0.0"
+ diff-sequences "^27.5.1"
+ jest-get-type "^27.5.1"
+ pretty-format "^27.5.1"
+
+jest-docblock@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0"
+ integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ==
+ dependencies:
+ detect-newline "^3.0.0"
+
+jest-each@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e"
+ integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ chalk "^4.0.0"
+ jest-get-type "^27.5.1"
+ jest-util "^27.5.1"
+ pretty-format "^27.5.1"
+
+jest-environment-jsdom@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546"
+ integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw==
+ dependencies:
+ "@jest/environment" "^27.5.1"
+ "@jest/fake-timers" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ jest-mock "^27.5.1"
+ jest-util "^27.5.1"
+ jsdom "^16.6.0"
+
+jest-environment-node@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e"
+ integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw==
+ dependencies:
+ "@jest/environment" "^27.5.1"
+ "@jest/fake-timers" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ jest-mock "^27.5.1"
+ jest-util "^27.5.1"
+
+jest-get-type@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1"
+ integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==
+
+jest-haste-map@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f"
+ integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ "@types/graceful-fs" "^4.1.2"
+ "@types/node" "*"
+ anymatch "^3.0.3"
+ fb-watchman "^2.0.0"
+ graceful-fs "^4.2.9"
+ jest-regex-util "^27.5.1"
+ jest-serializer "^27.5.1"
+ jest-util "^27.5.1"
+ jest-worker "^27.5.1"
+ micromatch "^4.0.4"
+ walker "^1.0.7"
+ optionalDependencies:
+ fsevents "^2.3.2"
+
+jest-haste-map@^28.1.1:
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-28.1.1.tgz#471685f1acd365a9394745bb97c8fc16289adca3"
+ integrity sha512-ZrRSE2o3Ezh7sb1KmeLEZRZ4mgufbrMwolcFHNRSjKZhpLa8TdooXOOFlSwoUzlbVs1t0l7upVRW2K7RWGHzbQ==
+ dependencies:
+ "@jest/types" "^28.1.1"
+ "@types/graceful-fs" "^4.1.3"
+ "@types/node" "*"
+ anymatch "^3.0.3"
+ fb-watchman "^2.0.0"
+ graceful-fs "^4.2.9"
+ jest-regex-util "^28.0.2"
+ jest-util "^28.1.1"
+ jest-worker "^28.1.1"
+ micromatch "^4.0.4"
+ walker "^1.0.8"
+ optionalDependencies:
+ fsevents "^2.3.2"
+
+jest-jasmine2@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4"
+ integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ==
+ dependencies:
+ "@jest/environment" "^27.5.1"
+ "@jest/source-map" "^27.5.1"
+ "@jest/test-result" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ co "^4.6.0"
+ expect "^27.5.1"
+ is-generator-fn "^2.0.0"
+ jest-each "^27.5.1"
+ jest-matcher-utils "^27.5.1"
+ jest-message-util "^27.5.1"
+ jest-runtime "^27.5.1"
+ jest-snapshot "^27.5.1"
+ jest-util "^27.5.1"
+ pretty-format "^27.5.1"
+ throat "^6.0.1"
+
+jest-leak-detector@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8"
+ integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ==
+ dependencies:
+ jest-get-type "^27.5.1"
+ pretty-format "^27.5.1"
+
+jest-matcher-utils@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab"
+ integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==
+ dependencies:
+ chalk "^4.0.0"
+ jest-diff "^27.5.1"
+ jest-get-type "^27.5.1"
+ pretty-format "^27.5.1"
+
+jest-message-util@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf"
+ integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==
+ dependencies:
+ "@babel/code-frame" "^7.12.13"
+ "@jest/types" "^27.5.1"
+ "@types/stack-utils" "^2.0.0"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.9"
+ micromatch "^4.0.4"
+ pretty-format "^27.5.1"
+ slash "^3.0.0"
+ stack-utils "^2.0.3"
+
+jest-mock@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6"
+ integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+
+jest-pact@^0.9.1:
+ version "0.9.4"
+ resolved "https://registry.yarnpkg.com/jest-pact/-/jest-pact-0.9.4.tgz#c13f3d28cbe6288bbb0920e6b69857a9e1a042e9"
+ integrity sha512-vmBISe3DSGxOuMnjJJHdK0winrkBSpgD/Z3iMCaBUBBYG9bSPsBfGqBzAF0eYLGmQ7yUaoYlI798e515DGnzQw==
+
+jest-pnp-resolver@^1.2.2:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c"
+ integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==
+
+jest-regex-util@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95"
+ integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg==
+
+jest-regex-util@^28.0.2:
+ version "28.0.2"
+ resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead"
+ integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw==
+
+jest-resolve-dependencies@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8"
+ integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ jest-regex-util "^27.5.1"
+ jest-snapshot "^27.5.1"
+
+jest-resolve@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384"
+ integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.9"
+ jest-haste-map "^27.5.1"
+ jest-pnp-resolver "^1.2.2"
+ jest-util "^27.5.1"
+ jest-validate "^27.5.1"
+ resolve "^1.20.0"
+ resolve.exports "^1.1.0"
+ slash "^3.0.0"
+
+jest-runner@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5"
+ integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ==
+ dependencies:
+ "@jest/console" "^27.5.1"
+ "@jest/environment" "^27.5.1"
+ "@jest/test-result" "^27.5.1"
+ "@jest/transform" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ emittery "^0.8.1"
+ graceful-fs "^4.2.9"
+ jest-docblock "^27.5.1"
+ jest-environment-jsdom "^27.5.1"
+ jest-environment-node "^27.5.1"
+ jest-haste-map "^27.5.1"
+ jest-leak-detector "^27.5.1"
+ jest-message-util "^27.5.1"
+ jest-resolve "^27.5.1"
+ jest-runtime "^27.5.1"
+ jest-util "^27.5.1"
+ jest-worker "^27.5.1"
+ source-map-support "^0.5.6"
+ throat "^6.0.1"
+
+jest-runtime@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af"
+ integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A==
+ dependencies:
+ "@jest/environment" "^27.5.1"
+ "@jest/fake-timers" "^27.5.1"
+ "@jest/globals" "^27.5.1"
+ "@jest/source-map" "^27.5.1"
+ "@jest/test-result" "^27.5.1"
+ "@jest/transform" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ chalk "^4.0.0"
+ cjs-module-lexer "^1.0.0"
+ collect-v8-coverage "^1.0.0"
+ execa "^5.0.0"
+ glob "^7.1.3"
+ graceful-fs "^4.2.9"
+ jest-haste-map "^27.5.1"
+ jest-message-util "^27.5.1"
+ jest-mock "^27.5.1"
+ jest-regex-util "^27.5.1"
+ jest-resolve "^27.5.1"
+ jest-snapshot "^27.5.1"
+ jest-util "^27.5.1"
+ slash "^3.0.0"
+ strip-bom "^4.0.0"
+
+jest-serializer@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64"
+ integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==
+ dependencies:
+ "@types/node" "*"
+ graceful-fs "^4.2.9"
+
+jest-snapshot@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1"
+ integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA==
+ dependencies:
+ "@babel/core" "^7.7.2"
+ "@babel/generator" "^7.7.2"
+ "@babel/plugin-syntax-typescript" "^7.7.2"
+ "@babel/traverse" "^7.7.2"
+ "@babel/types" "^7.0.0"
+ "@jest/transform" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/babel__traverse" "^7.0.4"
+ "@types/prettier" "^2.1.5"
+ babel-preset-current-node-syntax "^1.0.0"
+ chalk "^4.0.0"
+ expect "^27.5.1"
+ graceful-fs "^4.2.9"
+ jest-diff "^27.5.1"
+ jest-get-type "^27.5.1"
+ jest-haste-map "^27.5.1"
+ jest-matcher-utils "^27.5.1"
+ jest-message-util "^27.5.1"
+ jest-util "^27.5.1"
+ natural-compare "^1.4.0"
+ pretty-format "^27.5.1"
+ semver "^7.3.2"
+
+jest-util@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9"
+ integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ ci-info "^3.2.0"
+ graceful-fs "^4.2.9"
+ picomatch "^2.2.3"
+
+jest-util@^28.1.1:
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.1.tgz#ff39e436a1aca397c0ab998db5a51ae2b7080d05"
+ integrity sha512-FktOu7ca1DZSyhPAxgxB6hfh2+9zMoJ7aEQA759Z6p45NuO8mWcqujH+UdHlCm/V6JTWwDztM2ITCzU1ijJAfw==
+ dependencies:
+ "@jest/types" "^28.1.1"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ ci-info "^3.2.0"
+ graceful-fs "^4.2.9"
+ picomatch "^2.2.3"
+
+jest-validate@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067"
+ integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ==
+ dependencies:
+ "@jest/types" "^27.5.1"
+ camelcase "^6.2.0"
+ chalk "^4.0.0"
+ jest-get-type "^27.5.1"
+ leven "^3.1.0"
+ pretty-format "^27.5.1"
+
+jest-watcher@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2"
+ integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw==
+ dependencies:
+ "@jest/test-result" "^27.5.1"
+ "@jest/types" "^27.5.1"
+ "@types/node" "*"
+ ansi-escapes "^4.2.1"
+ chalk "^4.0.0"
+ jest-util "^27.5.1"
+ string-length "^4.0.1"
+
+jest-worker@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0"
+ integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==
+ dependencies:
+ "@types/node" "*"
+ merge-stream "^2.0.0"
+ supports-color "^8.0.0"
+
+jest-worker@^28.1.1:
+ version "28.1.1"
+ resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.1.tgz#3480c73247171dfd01eda77200f0063ab6a3bf28"
+ integrity sha512-Au7slXB08C6h+xbJPp7VIb6U0XX5Kc9uel/WFc6/rcTzGiaVCBRngBExSYuXSLFPULPSYU3cJ3ybS988lNFQhQ==
+ dependencies:
+ "@types/node" "*"
+ merge-stream "^2.0.0"
+ supports-color "^8.0.0"
+
+jest@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc"
+ integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==
+ dependencies:
+ "@jest/core" "^27.5.1"
+ import-local "^3.0.2"
+ jest-cli "^27.5.1"
+
+jmespath@^0.15.0:
+ version "0.15.0"
+ resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
+ integrity sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==
+
+joycon@^2.2.5:
+ version "2.2.5"
+ resolved "https://registry.yarnpkg.com/joycon/-/joycon-2.2.5.tgz#8d4cf4cbb2544d7b7583c216fcdfec19f6be1615"
+ integrity sha512-YqvUxoOcVPnCp0VU1/56f+iKSdvIRJYPznH22BdXV3xMk75SFXhWeJkZ8C9XxUWt1b5x2X1SxuFygW1U0FmkEQ==
+
+js-tokens@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
+ integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
+
+js-yaml@^3.13.1:
+ version "3.14.1"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
+ integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^4.0.0"
+
+jsdom@^16.6.0:
+ version "16.7.0"
+ resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710"
+ integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==
+ dependencies:
+ abab "^2.0.5"
+ acorn "^8.2.4"
+ acorn-globals "^6.0.0"
+ cssom "^0.4.4"
+ cssstyle "^2.3.0"
+ data-urls "^2.0.0"
+ decimal.js "^10.2.1"
+ domexception "^2.0.1"
+ escodegen "^2.0.0"
+ form-data "^3.0.0"
+ html-encoding-sniffer "^2.0.1"
+ http-proxy-agent "^4.0.1"
+ https-proxy-agent "^5.0.0"
+ is-potential-custom-element-name "^1.0.1"
+ nwsapi "^2.2.0"
+ parse5 "6.0.1"
+ saxes "^5.0.1"
+ symbol-tree "^3.2.4"
+ tough-cookie "^4.0.0"
+ w3c-hr-time "^1.0.2"
+ w3c-xmlserializer "^2.0.0"
+ webidl-conversions "^6.1.0"
+ whatwg-encoding "^1.0.5"
+ whatwg-mimetype "^2.3.0"
+ whatwg-url "^8.5.0"
+ ws "^7.4.6"
+ xml-name-validator "^3.0.0"
+
+jsesc@^2.5.1:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4"
+ integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==
+
+jsesc@~0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
+ integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==
+
+json-parse-even-better-errors@^2.3.0:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
+ integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
+
+json5@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c"
+ integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==
+
+kleur@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
+ integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==
+
+leven@2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/leven/-/leven-2.1.0.tgz#c2e7a9f772094dee9d34202ae8acce4687875580"
+ integrity sha512-nvVPLpIHUxCUoRLrFqTgSxXJ614d8AgQoWl7zPe/2VadE8+1dpU3LBhowRuBAcuwruWtOdD8oYC9jDNJjXDPyA==
+
+leven@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2"
+ integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==
+
+levn@~0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee"
+ integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==
+ dependencies:
+ prelude-ls "~1.1.2"
+ type-check "~0.3.2"
+
+libnpmconfig@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/libnpmconfig/-/libnpmconfig-1.2.1.tgz#c0c2f793a74e67d4825e5039e7a02a0044dfcbc0"
+ integrity sha512-9esX8rTQAHqarx6qeZqmGQKBNZR5OIbl/Ayr0qQDy3oXja2iFVQQI81R6GZ2a02bSNZ9p3YOGX1O6HHCb1X7kA==
+ dependencies:
+ figgy-pudding "^3.5.1"
+ find-up "^3.0.0"
+ ini "^1.3.5"
+
+lines-and-columns@^1.1.6:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"
+ integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
+
+listenercount@~1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/listenercount/-/listenercount-1.0.1.tgz#84c8a72ab59c4725321480c975e6508342e70937"
+ integrity sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==
+
+locate-path@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
+ integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
+ dependencies:
+ p-locate "^3.0.0"
+ path-exists "^3.0.0"
+
+locate-path@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
+ integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
+ dependencies:
+ p-locate "^4.1.0"
+
+lodash.debounce@^4.0.8:
+ version "4.0.8"
+ resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
+ integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==
+
+lodash.isfunction@3.0.8:
+ version "3.0.8"
+ resolved "https://registry.yarnpkg.com/lodash.isfunction/-/lodash.isfunction-3.0.8.tgz#4db709fc81bc4a8fd7127a458a5346c5cdce2c6b"
+ integrity sha512-WQj3vccQSW5IKeRl8F0bezPlZH5/LFXtNPICsbZLsv+HmVfWAfrzy2ZajGqmNLonIjPIcPOk3uXOGv5jgPgTyg==
+
+lodash.isnil@4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/lodash.isnil/-/lodash.isnil-4.0.0.tgz#49e28cd559013458c814c5479d3c663a21bfaa6c"
+ integrity sha512-up2Mzq3545mwVnMhTDMdfoG1OurpA/s5t88JmQX809eH3C8491iu2sfKhTfhQtKY78oPNhiaHJUpT/dUDAAtng==
+
+lodash.isundefined@3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/lodash.isundefined/-/lodash.isundefined-3.0.1.tgz#23ef3d9535565203a66cefd5b830f848911afb48"
+ integrity sha512-MXB1is3s899/cD8jheYYE2V9qTHwKvt+npCwpD+1Sxm3Q3cECXCiYHjeHWXNwr6Q0SOBPrYUDxendrO6goVTEA==
+
+lodash.omit@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.omit/-/lodash.omit-4.5.0.tgz#6eb19ae5a1ee1dd9df0b969e66ce0b7fa30b5e60"
+ integrity sha512-XeqSp49hNGmlkj2EJlfrQFIzQ6lXdNro9sddtQzcJY8QaoC2GO0DT7xaIokHeyM+mIT0mPMlPvkYzg2xCuHdZg==
+
+lodash.omitby@4.6.0:
+ version "4.6.0"
+ resolved "https://registry.yarnpkg.com/lodash.omitby/-/lodash.omitby-4.6.0.tgz#5c15ff4754ad555016b53c041311e8f079204791"
+ integrity sha512-5OrRcIVR75M288p4nbI2WLAf3ndw2GD9fyNv3Bc15+WCxJDdZ4lYndSxGd7hnG6PVjiJTeJE2dHEGhIuKGicIQ==
+
+lodash@^4.17.21, lodash@^4.7.0:
+ version "4.17.21"
+ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
+ integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
+
+lru-cache@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
+ integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==
+ dependencies:
+ yallist "^4.0.0"
+
+lru-queue@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3"
+ integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==
+ dependencies:
+ es5-ext "~0.10.2"
+
+make-dir@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f"
+ integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==
+ dependencies:
+ semver "^6.0.0"
+
+make-error-cause@^1.2.1:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/make-error-cause/-/make-error-cause-1.2.2.tgz#df0388fcd0b37816dff0a5fb8108939777dcbc9d"
+ integrity sha512-4TO2Y3HkBnis4c0dxhAgD/jprySYLACf7nwN6V0HAHDx59g12WlRpUmFy1bRHamjGUEEBrEvCq6SUpsEE2lhUg==
+ dependencies:
+ make-error "^1.2.0"
+
+make-error@^1.2.0:
+ version "1.3.6"
+ resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
+ integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
+
+makeerror@1.0.12:
+ version "1.0.12"
+ resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a"
+ integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==
+ dependencies:
+ tmpl "1.0.5"
+
+media-typer@0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
+ integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==
+
+memoizee@^0.4.14:
+ version "0.4.15"
+ resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72"
+ integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ==
+ dependencies:
+ d "^1.0.1"
+ es5-ext "^0.10.53"
+ es6-weak-map "^2.0.3"
+ event-emitter "^0.3.5"
+ is-promise "^2.2.2"
+ lru-queue "^0.1.0"
+ next-tick "^1.1.0"
+ timers-ext "^0.1.7"
+
+merge-descriptors@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
+ integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==
+
+merge-stream@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60"
+ integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==
+
+methods@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
+ integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==
+
+micromatch@^4.0.4:
+ version "4.0.5"
+ resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6"
+ integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==
+ dependencies:
+ braces "^3.0.2"
+ picomatch "^2.3.1"
+
+mime-db@1.52.0:
+ version "1.52.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
+ integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
+
+mime-types@^2.1.12, mime-types@~2.1.24, mime-types@~2.1.34:
+ version "2.1.35"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
+ integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
+ dependencies:
+ mime-db "1.52.0"
+
+mime@1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
+ integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
+
+mimic-fn@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
+ integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
+
+minimatch@^3.0.4, minimatch@^3.1.1:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
+ integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
+ dependencies:
+ brace-expansion "^1.1.7"
+
+minimist@^1.2.6:
+ version "1.2.6"
+ resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
+ integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
+
+minipass@^3.0.0:
+ version "3.1.6"
+ resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.6.tgz#3b8150aa688a711a1521af5e8779c1d3bb4f45ee"
+ integrity sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==
+ dependencies:
+ yallist "^4.0.0"
+
+minizlib@^2.1.1:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
+ integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==
+ dependencies:
+ minipass "^3.0.0"
+ yallist "^4.0.0"
+
+mkdirp@1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.0.tgz#8487b07699b70c9b06fce47b3ce28d8176c13c75"
+ integrity sha512-4Pb+8NJ5DdvaWD797hKOM28wMXsObb4HppQdIwKUHFiB69ICZ4wktOE+qsGGBy7GtwgYNizp0R9KEy4zKYBLMg==
+
+"mkdirp@>=0.5 0":
+ version "0.5.6"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6"
+ integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==
+ dependencies:
+ minimist "^1.2.6"
+
+mkdirp@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
+ integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
+
+mri@1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/mri/-/mri-1.1.4.tgz#7cb1dd1b9b40905f1fac053abe25b6720f44744a"
+ integrity sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w==
+
+ms@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+ integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==
+
+ms@2.1.2:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
+ integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
+
+ms@2.1.3, ms@^2.1.1:
+ version "2.1.3"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
+ integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
+
+natural-compare@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
+ integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==
+
+needle@^2.6.0:
+ version "2.9.1"
+ resolved "https://registry.yarnpkg.com/needle/-/needle-2.9.1.tgz#22d1dffbe3490c2b83e301f7709b6736cd8f2684"
+ integrity sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ==
+ dependencies:
+ debug "^3.2.6"
+ iconv-lite "^0.4.4"
+ sax "^1.2.4"
+
+negotiator@0.6.3:
+ version "0.6.3"
+ resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
+ integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==
+
+next-tick@1, next-tick@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb"
+ integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==
+
+node-int64@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b"
+ integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==
+
+node-releases@^2.0.5:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.5.tgz#280ed5bc3eba0d96ce44897d8aee478bfb3d9666"
+ integrity sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q==
+
+normalize-path@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9"
+ integrity sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==
+ dependencies:
+ remove-trailing-separator "^1.0.1"
+
+normalize-path@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
+ integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
+
+npm-run-path@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea"
+ integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==
+ dependencies:
+ path-key "^3.0.0"
+
+nwsapi@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7"
+ integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==
+
+object-inspect@^1.9.0:
+ version "1.12.2"
+ resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea"
+ integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==
+
+object-keys@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
+ integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
+
+object.assign@^4.1.0:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940"
+ integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+ has-symbols "^1.0.1"
+ object-keys "^1.1.1"
+
+on-finished@2.4.1:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f"
+ integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==
+ dependencies:
+ ee-first "1.1.1"
+
+once@^1.3.0, once@^1.3.1, once@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
+ integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
+ dependencies:
+ wrappy "1"
+
+onetime@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e"
+ integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==
+ dependencies:
+ mimic-fn "^2.1.0"
+
+optionator@^0.8.1:
+ version "0.8.3"
+ resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
+ integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==
+ dependencies:
+ deep-is "~0.1.3"
+ fast-levenshtein "~2.0.6"
+ levn "~0.3.0"
+ prelude-ls "~1.1.2"
+ type-check "~0.3.2"
+ word-wrap "~1.2.3"
+
+p-limit@^2.0.0, p-limit@^2.2.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
+ integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
+ dependencies:
+ p-try "^2.0.0"
+
+p-locate@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
+ integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
+ dependencies:
+ p-limit "^2.0.0"
+
+p-locate@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
+ integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
+ dependencies:
+ p-limit "^2.2.0"
+
+p-try@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
+ integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
+
+parse-json@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
+ integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==
+ dependencies:
+ "@babel/code-frame" "^7.0.0"
+ error-ex "^1.3.1"
+ json-parse-even-better-errors "^2.3.0"
+ lines-and-columns "^1.1.6"
+
+parse5@6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b"
+ integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==
+
+parseurl@~1.3.3:
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
+ integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
+
+path-exists@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
+ integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==
+
+path-exists@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
+ integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
+
+path-is-absolute@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
+ integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==
+
+path-key@^3.0.0, path-key@^3.1.0:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
+ integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
+
+path-parse@^1.0.7:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
+ integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
+
+path-to-regexp@0.1.7:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
+ integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==
+
+picocolors@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
+ integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
+
+picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
+ integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
+
+pino-pretty@^4.1.0:
+ version "4.8.0"
+ resolved "https://registry.yarnpkg.com/pino-pretty/-/pino-pretty-4.8.0.tgz#f2f3055bf222456217b14ffb04d8be0a0cc17fce"
+ integrity sha512-mhQfHG4rw5ZFpWL44m0Utjo4GC2+HMfdNvxyA8lLw0sIqn6fCf7uQe6dPckUcW/obly+OQHD7B/MTso6LNizYw==
+ dependencies:
+ "@hapi/bourne" "^2.0.0"
+ args "^5.0.1"
+ chalk "^4.0.0"
+ dateformat "^4.5.1"
+ fast-safe-stringify "^2.0.7"
+ jmespath "^0.15.0"
+ joycon "^2.2.5"
+ pump "^3.0.0"
+ readable-stream "^3.6.0"
+ rfdc "^1.3.0"
+ split2 "^3.1.1"
+ strip-json-comments "^3.1.1"
+
+pino-std-serializers@^3.1.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-3.2.0.tgz#b56487c402d882eb96cd67c257868016b61ad671"
+ integrity sha512-EqX4pwDPrt3MuOAAUBMU0Tk5kR/YcCM5fNPEzgCO2zJ5HfX0vbiH9HbJglnyeQsN96Kznae6MWD47pZB5avTrg==
+
+pino@^6.11.0, pino@^6.5.1:
+ version "6.14.0"
+ resolved "https://registry.yarnpkg.com/pino/-/pino-6.14.0.tgz#b745ea87a99a6c4c9b374e4f29ca7910d4c69f78"
+ integrity sha512-iuhEDel3Z3hF9Jfe44DPXR8l07bhjuFY3GMHIXbjnY9XcafbyDDwl2sN2vw2GjMPf5Nkoe+OFao7ffn9SXaKDg==
+ dependencies:
+ fast-redact "^3.0.0"
+ fast-safe-stringify "^2.0.8"
+ flatstr "^1.0.12"
+ pino-std-serializers "^3.1.0"
+ process-warning "^1.0.0"
+ quick-format-unescaped "^4.0.3"
+ sonic-boom "^1.0.2"
+
+pirates@^4.0.4:
+ version "4.0.5"
+ resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b"
+ integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==
+
+pkg-dir@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3"
+ integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==
+ dependencies:
+ find-up "^4.0.0"
+
+pkginfo@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/pkginfo/-/pkginfo-0.4.1.tgz#b5418ef0439de5425fc4995042dced14fb2a84ff"
+ integrity sha512-8xCNE/aT/EXKenuMDZ+xTVwkT8gsoHN2z/Q29l80u0ppGEXVvsKRzNMbtKhg8LS8k1tJLAHHylf6p4VFmP6XUQ==
+
+popsicle@^9.2.0:
+ version "9.2.0"
+ resolved "https://registry.yarnpkg.com/popsicle/-/popsicle-9.2.0.tgz#adc9fc808644739b360ff063426545cdfe58e0b4"
+ integrity sha512-petRj39w05GvH1WKuGFmzxR9+k+R9E7zX5XWTFee7P/qf88hMuLT7aAO/RsmldpQMtJsWQISkTQlfMRECKlxhw==
+ dependencies:
+ concat-stream "^1.4.7"
+ form-data "^2.0.0"
+ make-error-cause "^1.2.1"
+ tough-cookie "^2.0.0"
+
+prelude-ls@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54"
+ integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==
+
+prettier@^2.5.1:
+ version "2.7.1"
+ resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64"
+ integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==
+
+pretty-format@^27.5.1:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e"
+ integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==
+ dependencies:
+ ansi-regex "^5.0.1"
+ ansi-styles "^5.0.0"
+ react-is "^17.0.1"
+
+process-nextick-args@~2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
+ integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
+
+process-warning@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-1.0.0.tgz#980a0b25dc38cd6034181be4b7726d89066b4616"
+ integrity sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q==
+
+prompts@^2.0.1:
+ version "2.4.2"
+ resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069"
+ integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==
+ dependencies:
+ kleur "^3.0.3"
+ sisteransi "^1.0.5"
+
+proxy-addr@~2.0.7:
+ version "2.0.7"
+ resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025"
+ integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==
+ dependencies:
+ forwarded "0.2.0"
+ ipaddr.js "1.9.1"
+
+psl@^1.1.28, psl@^1.1.33:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24"
+ integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==
+
+pump@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
+ integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
+ dependencies:
+ end-of-stream "^1.1.0"
+ once "^1.3.1"
+
+punycode@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
+ integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
+
+q@1.5.1:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
+ integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==
+
+qs@6.10.3:
+ version "6.10.3"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e"
+ integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==
+ dependencies:
+ side-channel "^1.0.4"
+
+quick-format-unescaped@^4.0.3:
+ version "4.0.4"
+ resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7"
+ integrity sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==
+
+range-parser@~1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
+ integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
+
+raw-body@2.5.1:
+ version "2.5.1"
+ resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857"
+ integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==
+ dependencies:
+ bytes "3.1.2"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ unpipe "1.0.0"
+
+react-is@^17.0.1:
+ version "17.0.2"
+ resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0"
+ integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==
+
+readable-stream@^2.0.2, readable-stream@^2.2.2, readable-stream@~2.3.6:
+ version "2.3.7"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
+ integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.1.1"
+ util-deprecate "~1.0.1"
+
+readable-stream@^3.0.0, readable-stream@^3.6.0:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
+ integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
+ dependencies:
+ inherits "^2.0.3"
+ string_decoder "^1.1.1"
+ util-deprecate "^1.0.1"
+
+regenerate-unicode-properties@^10.0.1:
+ version "10.0.1"
+ resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz#7f442732aa7934a3740c779bb9b3340dccc1fb56"
+ integrity sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw==
+ dependencies:
+ regenerate "^1.4.2"
+
+regenerate@^1.4.2:
+ version "1.4.2"
+ resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a"
+ integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==
+
+regenerator-runtime@^0.13.4:
+ version "0.13.9"
+ resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
+ integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==
+
+regenerator-transform@^0.15.0:
+ version "0.15.0"
+ resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537"
+ integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==
+ dependencies:
+ "@babel/runtime" "^7.8.4"
+
+regexpu-core@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.0.1.tgz#c531122a7840de743dcf9c83e923b5560323ced3"
+ integrity sha512-CriEZlrKK9VJw/xQGJpQM5rY88BtuL8DM+AEwvcThHilbxiTAy8vq4iJnd2tqq8wLmjbGZzP7ZcKFjbGkmEFrw==
+ dependencies:
+ regenerate "^1.4.2"
+ regenerate-unicode-properties "^10.0.1"
+ regjsgen "^0.6.0"
+ regjsparser "^0.8.2"
+ unicode-match-property-ecmascript "^2.0.0"
+ unicode-match-property-value-ecmascript "^2.0.0"
+
+regjsgen@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.6.0.tgz#83414c5354afd7d6627b16af5f10f41c4e71808d"
+ integrity sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA==
+
+regjsparser@^0.8.2:
+ version "0.8.4"
+ resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.8.4.tgz#8a14285ffcc5de78c5b95d62bbf413b6bc132d5f"
+ integrity sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA==
+ dependencies:
+ jsesc "~0.5.0"
+
+remove-trailing-separator@^1.0.1:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
+ integrity sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==
+
+require-directory@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
+ integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==
+
+requires-port@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
+ integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==
+
+resolve-cwd@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d"
+ integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==
+ dependencies:
+ resolve-from "^5.0.0"
+
+resolve-from@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
+ integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
+
+resolve.exports@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9"
+ integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==
+
+resolve@^1.14.2, resolve@^1.20.0:
+ version "1.22.0"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198"
+ integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==
+ dependencies:
+ is-core-module "^2.8.1"
+ path-parse "^1.0.7"
+ supports-preserve-symlinks-flag "^1.0.0"
+
+rfdc@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b"
+ integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==
+
+rimraf@2:
+ version "2.7.1"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
+ integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
+ dependencies:
+ glob "^7.1.3"
+
+rimraf@2.6.2:
+ version "2.6.2"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36"
+ integrity sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==
+ dependencies:
+ glob "^7.0.5"
+
+rimraf@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
+ integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==
+ dependencies:
+ glob "^7.1.3"
+
+safe-buffer@5.2.1, safe-buffer@~5.2.0:
+ version "5.2.1"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
+ integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
+
+safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
+ integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
+
+"safer-buffer@>= 2.1.2 < 3":
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
+ integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
+
+sax@^1.2.4:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
+ integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
+
+saxes@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d"
+ integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==
+ dependencies:
+ xmlchars "^2.2.0"
+
+semver@7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
+ integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
+
+semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0:
+ version "6.3.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
+ integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
+
+semver@^7.3.2:
+ version "7.3.7"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f"
+ integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==
+ dependencies:
+ lru-cache "^6.0.0"
+
+send@0.18.0:
+ version "0.18.0"
+ resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be"
+ integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==
+ dependencies:
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ fresh "0.5.2"
+ http-errors "2.0.0"
+ mime "1.6.0"
+ ms "2.1.3"
+ on-finished "2.4.1"
+ range-parser "~1.2.1"
+ statuses "2.0.1"
+
+serve-static@1.15.0:
+ version "1.15.0"
+ resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540"
+ integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==
+ dependencies:
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ parseurl "~1.3.3"
+ send "0.18.0"
+
+setimmediate@~1.0.4:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
+ integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==
+
+setprototypeof@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
+ integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
+
+shebang-command@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
+ integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
+ dependencies:
+ shebang-regex "^3.0.0"
+
+shebang-regex@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
+ integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
+
+side-channel@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf"
+ integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==
+ dependencies:
+ call-bind "^1.0.0"
+ get-intrinsic "^1.0.2"
+ object-inspect "^1.9.0"
+
+signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7:
+ version "3.0.7"
+ resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
+ integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
+
+sisteransi@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
+ integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==
+
+slash@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
+ integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
+
+sonic-boom@^1.0.2:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-1.4.1.tgz#d35d6a74076624f12e6f917ade7b9d75e918f53e"
+ integrity sha512-LRHh/A8tpW7ru89lrlkU4AszXt1dbwSjVWguGrmlxE7tawVmDBlI1PILMkXAxJTwqhgsEeTHzj36D5CmHgQmNg==
+ dependencies:
+ atomic-sleep "^1.0.0"
+ flatstr "^1.0.12"
+
+sonic-boom@^2.1.0:
+ version "2.8.0"
+ resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-2.8.0.tgz#c1def62a77425090e6ad7516aad8eb402e047611"
+ integrity sha512-kuonw1YOYYNOve5iHdSahXPOK49GqwA+LZhI6Wz/l0rP57iKyXXIHaRagOBHAPmGwJC6od2Z9zgvZ5loSgMlVg==
+ dependencies:
+ atomic-sleep "^1.0.0"
+
+source-map-support@^0.5.6:
+ version "0.5.21"
+ resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
+ integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==
+ dependencies:
+ buffer-from "^1.0.0"
+ source-map "^0.6.0"
+
+source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+ integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
+
+source-map@^0.7.3:
+ version "0.7.4"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656"
+ integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==
+
+split2@^3.1.1:
+ version "3.2.2"
+ resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f"
+ integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==
+ dependencies:
+ readable-stream "^3.0.0"
+
+sprintf-js@~1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
+ integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
+
+stack-utils@^2.0.3:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5"
+ integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==
+ dependencies:
+ escape-string-regexp "^2.0.0"
+
+statuses@2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
+ integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
+
+string-length@^4.0.1:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a"
+ integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==
+ dependencies:
+ char-regex "^1.0.2"
+ strip-ansi "^6.0.0"
+
+string-width@^4.1.0, string-width@^4.2.0:
+ version "4.2.3"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
+ integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
+ dependencies:
+ emoji-regex "^8.0.0"
+ is-fullwidth-code-point "^3.0.0"
+ strip-ansi "^6.0.1"
+
+string_decoder@^1.1.1:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
+ integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
+ dependencies:
+ safe-buffer "~5.2.0"
+
+string_decoder@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+ integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
+ dependencies:
+ safe-buffer "~5.1.0"
+
+strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
+ integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
+ dependencies:
+ ansi-regex "^5.0.1"
+
+strip-bom@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878"
+ integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==
+
+strip-final-newline@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad"
+ integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==
+
+strip-json-comments@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
+ integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+
+sumchecker@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/sumchecker/-/sumchecker-2.0.2.tgz#0f42c10e5d05da5d42eea3e56c3399a37d6c5b3e"
+ integrity sha512-16O54scwFPgX60Of/+QJSufmklGqnHZyBK6uewBvtcp3VxT5RM65c/OnGCeEPnjBF8TJoO5Pf6gHAOXfxIjNpA==
+ dependencies:
+ debug "^2.2.0"
+
+supports-color@^5.2.0, supports-color@^5.3.0:
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
+ integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
+ dependencies:
+ has-flag "^3.0.0"
+
+supports-color@^7.0.0, supports-color@^7.1.0:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
+ integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
+ dependencies:
+ has-flag "^4.0.0"
+
+supports-color@^8.0.0:
+ version "8.1.1"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c"
+ integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==
+ dependencies:
+ has-flag "^4.0.0"
+
+supports-hyperlinks@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb"
+ integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ==
+ dependencies:
+ has-flag "^4.0.0"
+ supports-color "^7.0.0"
+
+supports-preserve-symlinks-flag@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
+ integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
+
+symbol-tree@^3.2.4:
+ version "3.2.4"
+ resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2"
+ integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==
+
+tar@^6.1.11:
+ version "6.1.11"
+ resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621"
+ integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==
+ dependencies:
+ chownr "^2.0.0"
+ fs-minipass "^2.0.0"
+ minipass "^3.0.0"
+ minizlib "^2.1.1"
+ mkdirp "^1.0.3"
+ yallist "^4.0.0"
+
+terminal-link@^2.0.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994"
+ integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==
+ dependencies:
+ ansi-escapes "^4.2.1"
+ supports-hyperlinks "^2.0.0"
+
+test-exclude@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e"
+ integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==
+ dependencies:
+ "@istanbuljs/schema" "^0.1.2"
+ glob "^7.1.4"
+ minimatch "^3.0.4"
+
+throat@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375"
+ integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==
+
+timers-ext@^0.1.5, timers-ext@^0.1.7:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6"
+ integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ==
+ dependencies:
+ es5-ext "~0.10.46"
+ next-tick "1"
+
+tmpl@1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
+ integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
+
+to-fast-properties@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
+ integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==
+
+to-regex-range@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
+ integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
+ dependencies:
+ is-number "^7.0.0"
+
+toidentifier@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
+ integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
+
+tough-cookie@^2.0.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
+ integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==
+ dependencies:
+ psl "^1.1.28"
+ punycode "^2.1.1"
+
+tough-cookie@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4"
+ integrity sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==
+ dependencies:
+ psl "^1.1.33"
+ punycode "^2.1.1"
+ universalify "^0.1.2"
+
+tr46@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240"
+ integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==
+ dependencies:
+ punycode "^2.1.1"
+
+"traverse@>=0.3.0 <0.4":
+ version "0.3.9"
+ resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9"
+ integrity sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==
+
+tslib@^2.1.0:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3"
+ integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==
+
+type-check@~0.3.2:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72"
+ integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==
+ dependencies:
+ prelude-ls "~1.1.2"
+
+type-detect@4.0.8:
+ version "4.0.8"
+ resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
+ integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
+
+type-fest@^0.21.3:
+ version "0.21.3"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37"
+ integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==
+
+type-is@~1.6.18:
+ version "1.6.18"
+ resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
+ integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
+ dependencies:
+ media-typer "0.3.0"
+ mime-types "~2.1.24"
+
+type@^1.0.1:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0"
+ integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==
+
+type@^2.5.0:
+ version "2.6.0"
+ resolved "https://registry.yarnpkg.com/type/-/type-2.6.0.tgz#3ca6099af5981d36ca86b78442973694278a219f"
+ integrity sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ==
+
+typedarray-to-buffer@^3.1.5:
+ version "3.1.5"
+ resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
+ integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
+ dependencies:
+ is-typedarray "^1.0.0"
+
+typedarray@^0.0.6:
+ version "0.0.6"
+ resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
+ integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==
+
+underscore@1.12.1:
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.12.1.tgz#7bb8cc9b3d397e201cf8553336d262544ead829e"
+ integrity sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==
+
+unicode-canonical-property-names-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc"
+ integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==
+
+unicode-match-property-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3"
+ integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==
+ dependencies:
+ unicode-canonical-property-names-ecmascript "^2.0.0"
+ unicode-property-aliases-ecmascript "^2.0.0"
+
+unicode-match-property-value-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714"
+ integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==
+
+unicode-property-aliases-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz#0a36cb9a585c4f6abd51ad1deddb285c165297c8"
+ integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==
+
+universalify@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
+ integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
+
+unixify@1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unixify/-/unixify-1.0.0.tgz#3a641c8c2ffbce4da683a5c70f03a462940c2090"
+ integrity sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg==
+ dependencies:
+ normalize-path "^2.1.1"
+
+unpipe@1.0.0, unpipe@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
+ integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
+
+unzipper@^0.10.10:
+ version "0.10.11"
+ resolved "https://registry.yarnpkg.com/unzipper/-/unzipper-0.10.11.tgz#0b4991446472cbdb92ee7403909f26c2419c782e"
+ integrity sha512-+BrAq2oFqWod5IESRjL3S8baohbevGcVA+teAIOYWM3pDVdseogqbzhhvvmiyQrUNKFUnDMtELW3X8ykbyDCJw==
+ dependencies:
+ big-integer "^1.6.17"
+ binary "~0.3.0"
+ bluebird "~3.4.1"
+ buffer-indexof-polyfill "~1.0.0"
+ duplexer2 "~0.1.4"
+ fstream "^1.0.12"
+ graceful-fs "^4.2.2"
+ listenercount "~1.0.1"
+ readable-stream "~2.3.6"
+ setimmediate "~1.0.4"
+
+url-join@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7"
+ integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==
+
+util-deprecate@^1.0.1, util-deprecate@~1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
+ integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
+
+utils-merge@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
+ integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==
+
+v8-to-istanbul@^8.1.0:
+ version "8.1.1"
+ resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed"
+ integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w==
+ dependencies:
+ "@types/istanbul-lib-coverage" "^2.0.1"
+ convert-source-map "^1.6.0"
+ source-map "^0.7.3"
+
+vary@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
+ integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==
+
+w3c-hr-time@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd"
+ integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==
+ dependencies:
+ browser-process-hrtime "^1.0.0"
+
+w3c-xmlserializer@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a"
+ integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==
+ dependencies:
+ xml-name-validator "^3.0.0"
+
+walker@^1.0.7, walker@^1.0.8:
+ version "1.0.8"
+ resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f"
+ integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==
+ dependencies:
+ makeerror "1.0.12"
+
+webidl-conversions@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff"
+ integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==
+
+webidl-conversions@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514"
+ integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==
+
+whatwg-encoding@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0"
+ integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==
+ dependencies:
+ iconv-lite "0.4.24"
+
+whatwg-mimetype@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf"
+ integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==
+
+whatwg-url@^8.0.0, whatwg-url@^8.5.0:
+ version "8.7.0"
+ resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77"
+ integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==
+ dependencies:
+ lodash "^4.7.0"
+ tr46 "^2.1.0"
+ webidl-conversions "^6.1.0"
+
+which@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
+ integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
+ dependencies:
+ isexe "^2.0.0"
+
+word-wrap@~1.2.3:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
+ integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
+
+wrap-ansi@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
+ integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
+ dependencies:
+ ansi-styles "^4.0.0"
+ string-width "^4.1.0"
+ strip-ansi "^6.0.0"
+
+wrappy@1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
+ integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
+
+write-file-atomic@^3.0.0:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
+ integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
+ dependencies:
+ imurmurhash "^0.1.4"
+ is-typedarray "^1.0.0"
+ signal-exit "^3.0.2"
+ typedarray-to-buffer "^3.1.5"
+
+write-file-atomic@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.1.tgz#9faa33a964c1c85ff6f849b80b42a88c2c537c8f"
+ integrity sha512-nSKUxgAbyioruk6hU87QzVbY279oYT6uiwgDoujth2ju4mJ+TZau7SQBhtbTmUyuNYTuXnSyRn66FV0+eCgcrQ==
+ dependencies:
+ imurmurhash "^0.1.4"
+ signal-exit "^3.0.7"
+
+ws@^7.4.6:
+ version "7.5.8"
+ resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.8.tgz#ac2729881ab9e7cbaf8787fe3469a48c5c7f636a"
+ integrity sha512-ri1Id1WinAX5Jqn9HejiGb8crfRio0Qgu8+MtL36rlTA6RLsMdWt1Az/19A2Qij6uSHUMphEFaTKa4WG+UNHNw==
+
+xml-name-validator@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a"
+ integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==
+
+xmlchars@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"
+ integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==
+
+y18n@^5.0.5:
+ version "5.0.8"
+ resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55"
+ integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==
+
+yallist@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"
+ integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
+
+yargs-parser@^20.2.2:
+ version "20.2.9"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee"
+ integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==
+
+yargs@^16.2.0:
+ version "16.2.0"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66"
+ integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==
+ dependencies:
+ cliui "^7.0.2"
+ escalade "^3.1.1"
+ get-caller-file "^2.0.5"
+ require-directory "^2.1.1"
+ string-width "^4.2.0"
+ y18n "^5.0.5"
+ yargs-parser "^20.2.2"
diff --git a/spec/contracts/contracts/project/pipeline/index/pipelines#index-get_list_project_pipelines.json b/spec/contracts/contracts/project/pipeline/index/pipelines#index-get_list_project_pipelines.json
new file mode 100644
index 00000000000..b725ae400a7
--- /dev/null
+++ b/spec/contracts/contracts/project/pipeline/index/pipelines#index-get_list_project_pipelines.json
@@ -0,0 +1,472 @@
+{
+ "consumer": {
+ "name": "Pipelines#index"
+ },
+ "provider": {
+ "name": "GET List project pipelines"
+ },
+ "interactions": [
+ {
+ "description": "a request for a list of project pipelines",
+ "providerState": "a few pipelines for a project exists",
+ "request": {
+ "method": "GET",
+ "path": "/gitlab-org/gitlab-qa/-/pipelines.json",
+ "query": "scope=all&page=1",
+ "headers": {
+ "Accept": "*/*"
+ }
+ },
+ "response": {
+ "status": 200,
+ "headers": {
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": {
+ "pipelines": [
+ {
+ "id": 564173401,
+ "iid": 8197225,
+ "user": {
+ "id": 1781152,
+ "username": "gitlab-bot",
+ "name": "🤖 GitLab Bot 🤖",
+ "state": "active",
+ "avatar_url": "https://gitlab.com/uploads/-/system/user/avatar/1516152/avatar.png",
+ "web_url": "https://gitlab.com/gitlab-bot",
+ "show_status": false,
+ "path": "/gitlab-bot"
+ },
+ "active": true,
+ "source": "schedule",
+ "created_at": "2022-06-11T00:05:21.558Z",
+ "updated_at": "2022-06-11T00:05:34.258Z",
+ "path": "/gitlab-org/gitlab/-/pipelines/561224401",
+ "flags": {
+ "stuck": false,
+ "auto_devops": false,
+ "merge_request": false,
+ "yaml_errors": false,
+ "retryable": false,
+ "cancelable": false,
+ "failure_reason": false,
+ "detached_merge_request_pipeline": false,
+ "merge_request_pipeline": false,
+ "merge_train_pipeline": false,
+ "latest": true
+ },
+ "details": {
+ "status": {
+ "icon": "status_running",
+ "text": "running",
+ "label": "running",
+ "group": "running",
+ "tooltip": "passed",
+ "has_details": true,
+ "details_path": "/gitlab-org/gitlab/-/pipelines/566374401",
+ "illustration": null,
+ "favicon": "/assets/ci_favicons/favicon_status_running.png"
+ },
+ "stages": [
+ {
+ "name": "sync",
+ "title": "sync: passed",
+ "status": {
+ "icon": "status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "tooltip": "passed",
+ "has_details": true,
+ "details_path": "/gitlab-org/gitlab/-/pipelines/561174401#sync",
+ "illustration": null,
+ "favicon": "/assets/ci_favicons/favicon_status_success.png"
+ },
+ "path": "/gitlab-org/gitlab/-/pipelines/561124401#sync",
+ "dropdown_path": "/gitlab-org/gitlab/-/pipelines/561174401/stage.json?stage=sync"
+ }
+ ],
+ "duration": 25,
+ "finished_at": "2022-06-11T00:55:21.558Z",
+ "name": "Pipeline",
+ "manual_actions": [
+ {
+ "name": "review-docs-deploy",
+ "playable": true,
+ "scheduled": false
+ }
+ ],
+ "scheduled_actions": [
+ {
+ "name": "review-docs-schedule",
+ "playable": true,
+ "scheduled": false
+ }
+ ]
+ },
+ "ref": {
+ "name": "master",
+ "path": "/gitlab-org/gitlab/-/commits/master",
+ "tag": false,
+ "branch": true,
+ "merge_request": false
+ },
+ "commit": {
+ "id": "e6d797385144b955c6d4ecfa00e9656dc33efd2b",
+ "short_id": "e6d79738",
+ "created_at": "2022-06-10T22:02:10.000+00:00",
+ "parent_ids": [
+ "3b0e053a24958174eaa7e3b183c7263432890d1c"
+ ],
+ "title": "Merge branch 'ee-test' into 'master'",
+ "message": "Merge branch 'ee-test' into 'master'\nThis is a test.",
+ "author_name": "John Doe",
+ "author_email": "jdoe@gitlab.com",
+ "authored_date": "2022-06-10T22:02:10.000+00:00",
+ "committer_name": "John Doe",
+ "committer_email": "jdoe@gitlab.com",
+ "committed_date": "2022-06-10T22:02:10.000+00:00",
+ "trailers": {
+ },
+ "web_url": "https://gitlab.com/gitlab-org/gitlab/-/commit/f559253c514d9ab707c66e",
+ "author": null,
+ "author_gravatar_url": "https://secure.gravatar.com/avatar/d85e45af29611ac2c1395e3c3d6ec5d6?s=80&d=identicon",
+ "commit_url": "https://gitlab.com/gitlab-org/gitlab/-/commit/dc7522f559253c514d9ab707c66e7a1026abca5a",
+ "commit_path": "/gitlab-org/gitlab/-/commit/dc7522f559253c514d9ab707c66e7a1026abca5a"
+ },
+ "project": {
+ "id": 253964,
+ "name": "GitLab",
+ "full_path": "/gitlab-org/gitlab",
+ "full_name": "GitLab.org / GitLab"
+ },
+ "triggered_by": null,
+ "triggered": [
+
+ ]
+ }
+ ],
+ "count": {
+ "all": "1,000+"
+ }
+ },
+ "matchingRules": {
+ "$.body.pipelines": {
+ "min": 1
+ },
+ "$.body.pipelines[*].*": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].id": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].iid": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].user.id": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].user.username": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].user.name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].user.state": {
+ "match": "regex",
+ "regex": "^(active|blocked)$"
+ },
+ "$.body.pipelines[*].user.avatar_url": {
+ "match": "regex",
+ "regex": "^(http|https):\\/\\/[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$"
+ },
+ "$.body.pipelines[*].user.web_url": {
+ "match": "regex",
+ "regex": "^(http|https):\\/\\/[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$"
+ },
+ "$.body.pipelines[*].user.show_status": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].user.path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].active": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].source": {
+ "match": "regex",
+ "regex": "^(push|web|trigger|schedule|api|external|pipeline|chat|webide|merge_request_event|external_pull_request_event|parent_pipeline|ondemand_dast_scan|ondemand_dast_validation)$"
+ },
+ "$.body.pipelines[*].created_at": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body.pipelines[*].updated_at": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body.pipelines[*].path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].flags.stuck": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.auto_devops": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.merge_request": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.yaml_errors": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.retryable": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.cancelable": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.failure_reason": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.detached_merge_request_pipeline": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.merge_request_pipeline": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.merge_train_pipeline": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].flags.latest": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.status.icon": {
+ "match": "regex",
+ "regex": "^status_(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|warning)$"
+ },
+ "$.body.pipelines[*].details.status.text": {
+ "match": "regex",
+ "regex": "^(canceled|created|delayed|failed|manual|passed|pending|preparing|running|skipped|waiting)$"
+ },
+ "$.body.pipelines[*].details.status.label": {
+ "match": "regex",
+ "regex": "^(canceled|created|delayed|failed|manual action|passed|pending|preparing|running|skipped|passed with warnings|waiting for resource)$"
+ },
+ "$.body.pipelines[*].details.status.group": {
+ "match": "regex",
+ "regex": "^(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|success_warning|waiting-for-resource)$"
+ },
+ "$.body.pipelines[*].details.status.tooltip": {
+ "match": "regex",
+ "regex": "^(canceled|created|delayed|failed|manual action|passed|pending|preparing|running|skipped|passed with warnings|waiting for resource)$"
+ },
+ "$.body.pipelines[*].details.status.has_details": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.status.details_path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].details.status.favicon": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].details.stages": {
+ "min": 1
+ },
+ "$.body.pipelines[*].details.stages[*].*": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.stages[*].name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.stages[*].title": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.stages[*].status.icon": {
+ "match": "regex",
+ "regex": "^status_(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|warning)$"
+ },
+ "$.body.pipelines[*].details.stages[*].status.text": {
+ "match": "regex",
+ "regex": "^(canceled|created|delayed|failed|manual|passed|pending|preparing|running|skipped|waiting)$"
+ },
+ "$.body.pipelines[*].details.stages[*].status.label": {
+ "match": "regex",
+ "regex": "^(canceled|created|delayed|failed|manual action|passed|pending|preparing|running|skipped|passed with warnings|waiting for resource)$"
+ },
+ "$.body.pipelines[*].details.stages[*].status.group": {
+ "match": "regex",
+ "regex": "^(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|success_warning|waiting-for-resource)$"
+ },
+ "$.body.pipelines[*].details.stages[*].status.tooltip": {
+ "match": "regex",
+ "regex": "^(canceled|created|delayed|failed|manual action|passed|pending|preparing|running|skipped|passed with warnings|waiting for resource)$"
+ },
+ "$.body.pipelines[*].details.stages[*].status.has_details": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.stages[*].status.details_path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].details.stages[*].status.favicon": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].details.stages[*].path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].details.stages[*].dropdown_path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].details.duration": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.finished_at": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body.pipelines[*].details.name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.manual_actions": {
+ "min": 1
+ },
+ "$.body.pipelines[*].details.manual_actions[*].*": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.manual_actions[*].name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.manual_actions[*].playable": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.manual_actions[*].scheduled": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.scheduled_actions": {
+ "min": 1
+ },
+ "$.body.pipelines[*].details.scheduled_actions[*].*": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.scheduled_actions[*].name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.scheduled_actions[*].playable": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].details.scheduled_actions[*].scheduled": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].ref.name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].ref.path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].ref.tag": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].ref.branch": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].ref.merge_request": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.id": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.short_id": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.created_at": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body.pipelines[*].commit.parent_ids": {
+ "min": 1
+ },
+ "$.body.pipelines[*].commit.parent_ids[*].*": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.parent_ids[*]": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.title": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.message": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.author_name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.author_email": {
+ "match": "regex",
+ "regex": "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+.[a-zA-Z]{2,}$"
+ },
+ "$.body.pipelines[*].commit.authored_date": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body.pipelines[*].commit.committer_name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].commit.committer_email": {
+ "match": "regex",
+ "regex": "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+.[a-zA-Z]{2,}$"
+ },
+ "$.body.pipelines[*].commit.committed_date": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body.pipelines[*].commit.web_url": {
+ "match": "regex",
+ "regex": "^(http|https):\\/\\/[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$"
+ },
+ "$.body.pipelines[*].commit.author_gravatar_url": {
+ "match": "regex",
+ "regex": "^(http|https):\\/\\/[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$"
+ },
+ "$.body.pipelines[*].commit.commit_url": {
+ "match": "regex",
+ "regex": "^(http|https):\\/\\/[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$"
+ },
+ "$.body.pipelines[*].commit.commit_path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].project.id": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].project.name": {
+ "match": "type"
+ },
+ "$.body.pipelines[*].project.full_path": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.pipelines[*].project.full_name": {
+ "match": "type"
+ },
+ "$.body.count.all": {
+ "match": "type"
+ }
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/contracts/project/pipeline/show/pipelines#show-get_pipeline_header_data.json b/spec/contracts/contracts/project/pipeline/show/pipelines#show-get_pipeline_header_data.json
new file mode 100644
index 00000000000..2d775dc0f61
--- /dev/null
+++ b/spec/contracts/contracts/project/pipeline/show/pipelines#show-get_pipeline_header_data.json
@@ -0,0 +1,152 @@
+{
+ "consumer": {
+ "name": "Pipelines#show"
+ },
+ "provider": {
+ "name": "GET pipeline header data"
+ },
+ "interactions": [
+ {
+ "description": "a request for the pipeline header data",
+ "providerState": "a pipeline for a project exists",
+ "request": {
+ "method": "POST",
+ "path": "/api/graphql",
+ "headers": {
+ "content-type": "application/json"
+ },
+ "body": {
+ "query": "query getPipelineHeaderData($fullPath: ID!, $iid: ID!) {\n project(fullPath: $fullPath) {\n id\n pipeline(iid: $iid) {\n id\n iid\n status\n retryable\n cancelable\n userPermissions {\n destroyPipeline\n updatePipeline\n }\n detailedStatus {\n id\n detailsPath\n icon\n group\n text\n }\n createdAt\n user {\n id\n name\n username\n webPath\n webUrl\n email\n avatarUrl\n status {\n message\n emoji\n }\n }\n }\n }\n}\n",
+ "variables": {
+ "fullPath": "gitlab-org/gitlab-qa",
+ "iid": 1
+ }
+ },
+ "matchingRules": {
+ "$.body.query": {
+ "match": "regex",
+ "regex": "query\\s*getPipelineHeaderData\\(\\$fullPath:\\s*ID!,\\s*\\$iid:\\s*ID!\\)\\s*\\{\\s*project\\(fullPath:\\s*\\$fullPath\\)\\s*\\{\\s*id\\s*pipeline\\(iid:\\s*\\$iid\\)\\s*\\{\\s*id\\s*iid\\s*status\\s*retryable\\s*cancelable\\s*userPermissions\\s*\\{\\s*destroyPipeline\\s*updatePipeline\\s*\\}\\s*detailedStatus\\s*\\{\\s*id\\s*detailsPath\\s*icon\\s*group\\s*text\\s*\\}\\s*createdAt\\s*user\\s*\\{\\s*id\\s*name\\s*username\\s*webPath\\s*webUrl\\s*email\\s*avatarUrl\\s*status\\s*\\{\\s*message\\s*emoji\\s*\\}\\s*\\}\\s*\\}\\s*\\}\\s*\\}\\s*"
+ }
+ }
+ },
+ "response": {
+ "status": 200,
+ "headers": {
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": {
+ "data": {
+ "project": {
+ "id": "gid://gitlab/Project/278964",
+ "pipeline": {
+ "id": "gid://gitlab/Ci::Pipeline/577266584",
+ "iid": "1175084",
+ "status": "RUNNING",
+ "retryable": false,
+ "cancelable": true,
+ "userPermissions": {
+ "destroyPipeline": false,
+ "updatePipeline": true
+ },
+ "detailedStatus": {
+ "id": "running-577266584-577266584",
+ "detailsPath": "/gitlab-org/gitlab/-/pipelines/577266584",
+ "icon": "status_running",
+ "group": "running",
+ "text": "running"
+ },
+ "createdAt": "2022-06-30T16:58:59Z",
+ "user": {
+ "id": "gid://gitlab/User/194645",
+ "name": "John Doe",
+ "username": "jdoe",
+ "webPath": "/gitlab-bot",
+ "webUrl": "https://gitlab.com/gitlab-bot",
+ "email": null,
+ "avatarUrl": "https://www.gravatar.com/avatar/10fc7f102be8de7657fb4d80898bbfe3?s=80&d=identicon",
+ "status": null
+ }
+ }
+ }
+ }
+ },
+ "matchingRules": {
+ "$.body.data.project.id": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.id": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.iid": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.status": {
+ "match": "regex",
+ "regex": "^(CANCELED|CREATED|FAILED|MANUAL|PENDING|PREPARING|RUNNING|SCHEDULED|SKIPPED|SUCCESS|WAITING_FOR_RESOURCE)$"
+ },
+ "$.body.data.project.pipeline.retryable": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.cancelable": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.userPermissions.destroyPipeline": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.userPermissions.updatePipeline": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.detailedStatus.id": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.detailedStatus.detailsPath": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.data.project.pipeline.detailedStatus.icon": {
+ "match": "regex",
+ "regex": "^status_(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|warning)$"
+ },
+ "$.body.data.project.pipeline.detailedStatus.group": {
+ "match": "regex",
+ "regex": "^(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|success_warning|waiting-for-resource)$"
+ },
+ "$.body.data.project.pipeline.detailedStatus.text": {
+ "match": "regex",
+ "regex": "^(canceled|created|delayed|failed|manual|passed|pending|preparing|running|skipped|waiting)$"
+ },
+ "$.body.data.project.pipeline.createdAt": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)$"
+ },
+ "$.body.data.project.pipeline.user.id": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.user.name": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.user.username": {
+ "match": "type"
+ },
+ "$.body.data.project.pipeline.user.webPath": {
+ "match": "regex",
+ "regex": "^\\/[a-zA-Z0-9#-=?_]+$"
+ },
+ "$.body.data.project.pipeline.user.webUrl": {
+ "match": "regex",
+ "regex": "^(http|https):\\/\\/[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$"
+ },
+ "$.body.data.project.pipeline.user.avatarUrl": {
+ "match": "regex",
+ "regex": "^(http|https):\\/\\/[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(\\/.*)?$"
+ }
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline/get_list_project_pipelines_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline/get_list_project_pipelines_helper.rb
new file mode 100644
index 00000000000..5307468b7c6
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/pipeline/get_list_project_pipelines_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../spec_helper'
+require_relative '../../../states/project/pipeline/pipelines_state'
+
+module Provider
+ module GetListProjectPipelinesHelper
+ Pact.service_provider "GET List project pipelines" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'Pipelines#index' do
+ pact_uri '../contracts/project/project/pipeline/index/pipelines#index-get_list_project_pipelines.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline/get_pipeline_header_data_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline/get_pipeline_header_data_helper.rb
new file mode 100644
index 00000000000..abb2781f987
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/pipeline/get_pipeline_header_data_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../spec_helper'
+require_relative '../../../states/project/pipeline/pipeline_state'
+
+module Provider
+ module GetPipelinesHeaderDataHelper
+ Pact.service_provider "GET pipeline header data" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'Pipelines#show' do
+ pact_uri '../contracts/project/pipeline/show/pipelines#show-get_project_pipeline_header_data.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/states/project/pipeline/pipeline_state.rb b/spec/contracts/provider/states/project/pipeline/pipeline_state.rb
new file mode 100644
index 00000000000..d1a4cd34bdd
--- /dev/null
+++ b/spec/contracts/provider/states/project/pipeline/pipeline_state.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "Pipelines#show" do
+ provider_state "a pipeline for a project exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, :repository, name: 'gitlab-qa', namespace: namespace, creator: user)
+ scheduled_job = create(:ci_build, :scheduled)
+ manual_job = create(:ci_build, :manual)
+
+ project.add_maintainer(user)
+
+ create(
+ :ci_pipeline,
+ :with_job,
+ :success,
+ iid: 1,
+ project: project,
+ user: user,
+ duration: 10,
+ finished_at: '2022-06-01T02:47:31.432Z',
+ builds: [scheduled_job, manual_job]
+ )
+ end
+ end
+end
diff --git a/spec/contracts/provider/states/project/pipeline/pipelines_state.rb b/spec/contracts/provider/states/project/pipeline/pipelines_state.rb
new file mode 100644
index 00000000000..639c25e9894
--- /dev/null
+++ b/spec/contracts/provider/states/project/pipeline/pipelines_state.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "Pipelines#index" do
+ provider_state "a few pipelines for a project exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, :repository, name: 'gitlab-qa', namespace: namespace, creator: user)
+ scheduled_job = create(:ci_build, :scheduled)
+ manual_job = create(:ci_build, :manual)
+
+ project.add_maintainer(user)
+
+ create(
+ :ci_pipeline,
+ :with_job,
+ :success,
+ project: project,
+ user: user,
+ duration: 10,
+ finished_at: '2022-06-01T02:47:31.432Z',
+ builds: [scheduled_job, manual_job]
+ )
+ end
+ end
+end
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 4a92911f914..e02589ddc83 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -382,6 +382,24 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
+ describe 'PUT #reset_error_tracking_access_token' do
+ before do
+ sign_in(admin)
+ end
+
+ subject { put :reset_error_tracking_access_token }
+
+ it 'resets error_tracking_access_token' do
+ expect { subject }.to change { ApplicationSetting.current.error_tracking_access_token }
+ end
+
+ it 'redirects the user to application settings page' do
+ subject
+
+ expect(response).to redirect_to(general_admin_application_settings_path)
+ end
+ end
+
describe 'GET #lets_encrypt_terms_of_service' do
include LetsEncryptHelpers
diff --git a/spec/controllers/admin/hooks_controller_spec.rb b/spec/controllers/admin/hooks_controller_spec.rb
index 17c4222530d..14f4a2f40e7 100644
--- a/spec/controllers/admin/hooks_controller_spec.rb
+++ b/spec/controllers/admin/hooks_controller_spec.rb
@@ -17,16 +17,46 @@ RSpec.describe Admin::HooksController do
url: "http://example.com",
push_events: true,
- tag_push_events: true,
+ tag_push_events: false,
repository_update_events: true,
- merge_requests_events: true
+ merge_requests_events: false,
+ url_variables: [{ key: 'token', value: 'some secret value' }]
}
post :create, params: { hook: hook_params }
expect(response).to have_gitlab_http_status(:found)
expect(SystemHook.all.size).to eq(1)
- expect(SystemHook.first).to have_attributes(hook_params)
+ expect(SystemHook.first).to have_attributes(hook_params.except(:url_variables))
+ expect(SystemHook.first).to have_attributes(url_variables: { 'token' => 'some secret value' })
+ end
+ end
+
+ describe 'POST #update' do
+ let!(:hook) { create(:system_hook) }
+
+ it 'sets all parameters' do
+ hook.update!(url_variables: { 'foo' => 'bar', 'baz' => 'woo' })
+
+ hook_params = {
+ url: 'http://example.com/{baz}?token={token}',
+ enable_ssl_verification: false,
+ url_variables: [
+ { key: 'token', value: 'some secret value' },
+ { key: 'foo', value: nil }
+ ]
+ }
+
+ put :update, params: { id: hook.id, hook: hook_params }
+
+ hook.reload
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:notice]).to include('successfully updated')
+ expect(hook).to have_attributes(hook_params.except(:url_variables))
+ expect(hook).to have_attributes(
+ url_variables: { 'token' => 'some secret value', 'baz' => 'woo' }
+ )
end
end
diff --git a/spec/controllers/admin/topics_controller_spec.rb b/spec/controllers/admin/topics_controller_spec.rb
index 67943525687..ee36d5f1def 100644
--- a/spec/controllers/admin/topics_controller_spec.rb
+++ b/spec/controllers/admin/topics_controller_spec.rb
@@ -151,4 +151,26 @@ RSpec.describe Admin::TopicsController do
end
end
end
+
+ describe 'DELETE #destroy' do
+ it 'removes topic' do
+ delete :destroy, params: { id: topic.id }
+
+ expect(response).to redirect_to(admin_topics_path)
+ expect { topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'as a normal user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a 404 error' do
+ delete :destroy, params: { id: topic.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect { topic.reload }.not_to raise_error
+ end
+ end
+ end
end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index c5306fda0a5..1e28ef4ba93 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -559,6 +559,28 @@ RSpec.describe ApplicationController do
expect(controller.last_payload[:target_duration_s]).to eq(0.25)
end
end
+
+ it 'logs response length' do
+ sign_in user
+
+ get :index
+
+ expect(controller.last_payload[:response_bytes]).to eq('authenticated'.bytesize)
+ end
+
+ context 'with log_response_length disabled' do
+ before do
+ stub_feature_flags(log_response_length: false)
+ end
+
+ it 'logs response length' do
+ sign_in user
+
+ get :index
+
+ expect(controller.last_payload).not_to include(:response_bytes)
+ end
+ end
end
describe '#access_denied' do
diff --git a/spec/controllers/concerns/harbor/artifact_spec.rb b/spec/controllers/concerns/harbor/artifact_spec.rb
new file mode 100644
index 00000000000..6716d615a3b
--- /dev/null
+++ b/spec/controllers/concerns/harbor/artifact_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Harbor::Artifact do
+ controller(ActionController::Base) do
+ include ::Harbor::Artifact
+ end
+ it_behaves_like 'raises NotImplementedError when calling #container'
+end
diff --git a/spec/controllers/concerns/harbor/repository_spec.rb b/spec/controllers/concerns/harbor/repository_spec.rb
new file mode 100644
index 00000000000..cae038ceed2
--- /dev/null
+++ b/spec/controllers/concerns/harbor/repository_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Harbor::Repository do
+ controller(ActionController::Base) do
+ include ::Harbor::Repository
+ end
+ it_behaves_like 'raises NotImplementedError when calling #container'
+end
diff --git a/spec/controllers/concerns/harbor/tag_spec.rb b/spec/controllers/concerns/harbor/tag_spec.rb
new file mode 100644
index 00000000000..0d72ef303b0
--- /dev/null
+++ b/spec/controllers/concerns/harbor/tag_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Harbor::Tag do
+ controller(ActionController::Base) do
+ include ::Harbor::Tag
+ end
+ it_behaves_like 'raises NotImplementedError when calling #container'
+end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index e85f5b7a972..1d2f1085d3c 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -27,6 +27,18 @@ RSpec.describe GraphqlController do
)
end
+ it 'handles a timeout nicely' do
+ allow(subject).to receive(:execute) do
+ raise ActiveRecord::QueryCanceled, '**taps wristwatch**'
+ end
+
+ post :execute
+
+ expect(json_response).to include(
+ 'errors' => include(a_hash_including('message' => /Request timed out/))
+ )
+ end
+
it 'handles StandardError' do
allow(subject).to receive(:execute) do
raise StandardError, message
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
index 28febd786de..7322ca5e522 100644
--- a/spec/controllers/groups/group_links_controller_spec.rb
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -131,8 +131,24 @@ RSpec.describe Groups::GroupLinksController do
expect { subject }.to change(GroupGroupLink, :count).by(-1)
end
- it 'updates project permissions', :sidekiq_inline do
- expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false)
+ context 'with skip_group_share_unlink_auth_refresh feature flag disabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: false)
+ end
+
+ it 'updates project permissions', :sidekiq_inline do
+ expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false)
+ end
+ end
+
+ context 'with skip_group_share_unlink_auth_refresh feature flag enabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: true)
+ end
+
+ it 'maintains project authorization', :sidekiq_inline do
+ expect(Ability.allowed?(user, :read_project, project)).to be_truthy
+ end
end
end
diff --git a/spec/controllers/groups/variables_controller_spec.rb b/spec/controllers/groups/variables_controller_spec.rb
index 8c0aa83b9c4..6dbe75bb1df 100644
--- a/spec/controllers/groups/variables_controller_spec.rb
+++ b/spec/controllers/groups/variables_controller_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Groups::VariablesController do
before do
sign_in(user)
- group.add_user(user, access_level)
+ group.add_member(user, access_level)
end
describe 'GET #show' do
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index aabceda7187..c4e4eeec953 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -1112,9 +1112,11 @@ RSpec.describe GroupsController, factory_default: :keep do
before do
sign_in(admin)
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:group_export][:threshold].call + 1)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy)
+ .to receive(:increment)
+ .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:group_export][:threshold].call + 1)
+ end
end
it 'throttles the endpoint' do
@@ -1194,9 +1196,11 @@ RSpec.describe GroupsController, factory_default: :keep do
before do
sign_in(admin)
- allow(Gitlab::ApplicationRateLimiter)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy)
.to receive(:increment)
.and_return(Gitlab::ApplicationRateLimiter.rate_limits[:group_download_export][:threshold].call + 1)
+ end
end
it 'throttles the endpoint' do
diff --git a/spec/controllers/import/available_namespaces_controller_spec.rb b/spec/controllers/import/available_namespaces_controller_spec.rb
index 0f98d649338..26ea1d92189 100644
--- a/spec/controllers/import/available_namespaces_controller_spec.rb
+++ b/spec/controllers/import/available_namespaces_controller_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Import::AvailableNamespacesController do
it "does not include group with access level #{params[:role]} in list" do
group = create(:group, project_creation_level: group_project_creation_level)
- group.add_user(user, role)
+ group.add_member(user, role)
get :index
expect(response).to have_gitlab_http_status(:ok)
@@ -52,7 +52,7 @@ RSpec.describe Import::AvailableNamespacesController do
it "does not include group with access level #{params[:role]} in list" do
group = create(:group, project_creation_level: group_project_creation_level)
- group.add_user(user, role)
+ group.add_member(user, role)
get :index
expect(response).to have_gitlab_http_status(:ok)
@@ -81,7 +81,7 @@ RSpec.describe Import::AvailableNamespacesController do
it "#{params[:is_visible] ? 'includes' : 'does not include'} group with access level #{params[:role]} in list" do
group = create(:group, project_creation_level: project_creation_level)
- group.add_user(user, :developer)
+ group.add_member(user, :developer)
get :index
diff --git a/spec/controllers/import/bitbucket_controller_spec.rb b/spec/controllers/import/bitbucket_controller_spec.rb
index 6d24830af27..af220e2d515 100644
--- a/spec/controllers/import/bitbucket_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_controller_spec.rb
@@ -45,24 +45,27 @@ RSpec.describe Import::BitbucketController do
end
context "when auth state param is valid" do
+ let(:expires_at) { Time.current + 1.day }
+ let(:expires_in) { 1.day }
+ let(:access_token) do
+ double(token: token,
+ secret: secret,
+ expires_at: expires_at,
+ expires_in: expires_in,
+ refresh_token: refresh_token)
+ end
+
before do
session[:bitbucket_auth_state] = 'state'
end
it "updates access token" do
- expires_at = Time.current + 1.day
- expires_in = 1.day
- access_token = double(token: token,
- secret: secret,
- expires_at: expires_at,
- expires_in: expires_in,
- refresh_token: refresh_token)
allow_any_instance_of(OAuth2::Client)
.to receive(:get_token)
.with(hash_including(
'grant_type' => 'authorization_code',
'code' => code,
- redirect_uri: users_import_bitbucket_callback_url),
+ 'redirect_uri' => users_import_bitbucket_callback_url),
{})
.and_return(access_token)
stub_omniauth_provider('bitbucket')
@@ -75,6 +78,18 @@ RSpec.describe Import::BitbucketController do
expect(session[:bitbucket_expires_in]).to eq(expires_in)
expect(controller).to redirect_to(status_import_bitbucket_url)
end
+
+ it "passes namespace_id query param to status if provided" do
+ namespace_id = 30
+
+ allow_any_instance_of(OAuth2::Client)
+ .to receive(:get_token)
+ .and_return(access_token)
+
+ get :callback, params: { code: code, state: 'state', namespace_id: namespace_id }
+
+ expect(controller).to redirect_to(status_import_bitbucket_url(namespace_id: namespace_id))
+ end
end
end
@@ -82,7 +97,6 @@ RSpec.describe Import::BitbucketController do
before do
@repo = double(name: 'vim', slug: 'vim', owner: 'asd', full_name: 'asd/vim', clone_url: 'http://test.host/demo/url.git', 'valid?' => true)
@invalid_repo = double(name: 'mercurialrepo', slug: 'mercurialrepo', owner: 'asd', full_name: 'asd/mercurialrepo', clone_url: 'http://test.host/demo/mercurialrepo.git', 'valid?' => false)
- allow(controller).to receive(:provider_url).and_return('http://demobitbucket.org')
end
context "when token does not exists" do
@@ -109,10 +123,6 @@ RSpec.describe Import::BitbucketController do
end
it_behaves_like 'import controller status' do
- before do
- allow(controller).to receive(:provider_url).and_return('http://demobitbucket.org')
- end
-
let(:repo) { @repo }
let(:repo_id) { @repo.full_name }
let(:import_source) { @repo.full_name }
diff --git a/spec/controllers/import/bitbucket_server_controller_spec.rb b/spec/controllers/import/bitbucket_server_controller_spec.rb
index d5f94be65b6..ac56d3af54f 100644
--- a/spec/controllers/import/bitbucket_server_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_server_controller_spec.rb
@@ -134,6 +134,15 @@ RSpec.describe Import::BitbucketServerController do
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
+
+ it 'passes namespace_id to status page if provided' do
+ namespace_id = 5
+ allow(controller).to receive(:allow_local_requests?).and_return(true)
+
+ post :configure, params: { personal_access_token: token, bitbucket_server_username: username, bitbucket_server_url: url, namespace_id: namespace_id }
+
+ expect(response).to redirect_to(status_import_bitbucket_server_path(namespace_id: namespace_id))
+ end
end
describe 'GET status' do
@@ -160,6 +169,14 @@ RSpec.describe Import::BitbucketServerController do
expect(json_response.dig("provider_repos", 0, "id")).to eq(@repo.full_name)
end
+ it 'redirects to connection form if session is missing auth data' do
+ session[:bitbucket_server_url] = nil
+
+ get :status, format: :html
+
+ expect(response).to redirect_to(new_import_bitbucket_server_path)
+ end
+
it_behaves_like 'import controller status' do
let(:repo) { @repo }
let(:repo_id) { "#{@repo.project_key}/#{@repo.slug}" }
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index a7089005abf..7177c8c10a6 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -48,15 +48,25 @@ RSpec.describe Import::BulkImportsController do
expect(session[:bulk_import_gitlab_access_token]).to eq(token)
expect(controller).to redirect_to(status_import_bulk_imports_url)
end
+
+ it 'passes namespace_id to status' do
+ namespace_id = 5
+ token = 'token'
+ url = 'https://gitlab.example'
+
+ post :configure, params: { bulk_import_gitlab_access_token: token, bulk_import_gitlab_url: url, namespace_id: namespace_id }
+
+ expect(controller).to redirect_to(status_import_bulk_imports_url(namespace_id: namespace_id))
+ end
end
describe 'GET status' do
- def get_status(params_override = {})
+ def get_status(params_override = {}, format = :json)
params = { page: 1, per_page: 20, filter: '' }.merge(params_override)
get :status,
params: params,
- format: :json,
+ format: format,
session: {
bulk_import_gitlab_url: 'https://gitlab.example.com',
bulk_import_gitlab_access_token: 'demo-pat'
@@ -169,6 +179,25 @@ RSpec.describe Import::BulkImportsController do
end
end
end
+
+ context 'when namespace_id is provided' do
+ let_it_be(:group) { create(:group) }
+
+ it 'renders 404 if user does not have access to namespace' do
+ get_status({ namespace_id: group.id }, :html)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'passes namespace to template' do
+ group.add_owner(user)
+
+ get_status({ namespace_id: group.id }, :html)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:namespace)).to eq(group)
+ end
+ end
end
context 'when connection error occurs' do
diff --git a/spec/controllers/import/fogbugz_controller_spec.rb b/spec/controllers/import/fogbugz_controller_spec.rb
index 8f8cc9590a5..ed2a588eadf 100644
--- a/spec/controllers/import/fogbugz_controller_spec.rb
+++ b/spec/controllers/import/fogbugz_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Import::FogbugzController do
let(:user) { create(:user) }
let(:token) { FFaker::Lorem.characters(8) }
let(:uri) { 'https://example.com' }
+ let(:namespace_id) { 5 }
before do
sign_in(user)
@@ -16,9 +17,11 @@ RSpec.describe Import::FogbugzController do
describe 'POST #callback' do
let(:xml_response) { %Q(<?xml version=\"1.0\" encoding=\"UTF-8\"?><response><token><![CDATA[#{token}]]></token></response>) }
- it 'attempts to contact Fogbugz server' do
+ before do
stub_request(:post, "https://example.com/api.asp").to_return(status: 200, body: xml_response, headers: {})
+ end
+ it 'attempts to contact Fogbugz server' do
post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword' }
expect(session[:fogbugz_token]).to eq(token)
@@ -26,6 +29,29 @@ RSpec.describe Import::FogbugzController do
expect(response).to redirect_to(new_user_map_import_fogbugz_path)
end
+ it 'preserves namespace_id query param on success' do
+ post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword', namespace_id: namespace_id }
+
+ expect(response).to redirect_to(new_user_map_import_fogbugz_path(namespace_id: namespace_id))
+ end
+
+ it 'redirects to new page maintaining namespace_id when client raises standard error' do
+ namespace_id = 5
+ allow(::Gitlab::FogbugzImport::Client).to receive(:new).and_raise(StandardError)
+
+ post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword', namespace_id: namespace_id }
+
+ expect(response).to redirect_to(new_import_fogbugz_url(namespace_id: namespace_id))
+ end
+
+ it 'redirects to new page form when client raises authentication exception' do
+ allow(::Gitlab::FogbugzImport::Client).to receive(:new).and_raise(::Fogbugz::AuthenticationException)
+
+ post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword' }
+
+ expect(response).to redirect_to(new_import_fogbugz_url)
+ end
+
context 'verify url' do
shared_examples 'denies local request' do |reason|
it 'does not allow requests' do
@@ -76,6 +102,16 @@ RSpec.describe Import::FogbugzController do
expect(session[:fogbugz_user_map]).to eq(user_map)
expect(response).to redirect_to(status_import_fogbugz_path)
end
+
+ it 'preserves namespace_id query param' do
+ client = double(user_map: {})
+ expect(controller).to receive(:client).and_return(client)
+
+ post :create_user_map, params: { users: user_map, namespace_id: namespace_id }
+
+ expect(session[:fogbugz_user_map]).to eq(user_map)
+ expect(response).to redirect_to(status_import_fogbugz_path(namespace_id: namespace_id))
+ end
end
describe 'GET status' do
@@ -84,11 +120,19 @@ RSpec.describe Import::FogbugzController do
id: 'demo', name: 'vim', safe_name: 'vim', path: 'vim')
end
- before do
- stub_client(valid?: true)
+ it 'redirects to new page form when client is invalid' do
+ stub_client(valid?: false)
+
+ get :status
+
+ expect(response).to redirect_to(new_import_fogbugz_path)
end
it_behaves_like 'import controller status' do
+ before do
+ stub_client(valid?: true)
+ end
+
let(:repo_id) { repo.id }
let(:import_source) { repo.name }
let(:provider_name) { 'fogbugz' }
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index 56e55c45e66..46160aac0c1 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -83,11 +83,10 @@ RSpec.describe Import::GithubController do
expect(flash[:alert]).to eq('Access denied to your GitHub account.')
end
- it "includes namespace_id from session if it is present" do
+ it "includes namespace_id from query params if it is present" do
namespace_id = 1
- session[:namespace_id] = 1
- get :callback, params: { state: valid_auth_state }
+ get :callback, params: { state: valid_auth_state, namespace_id: namespace_id }
expect(controller).to redirect_to(status_import_github_url(namespace_id: namespace_id))
end
diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb
index 117c934ad5d..7b3978297fb 100644
--- a/spec/controllers/import/gitlab_controller_spec.rb
+++ b/spec/controllers/import/gitlab_controller_spec.rb
@@ -38,21 +38,47 @@ RSpec.describe Import::GitlabController do
expect(controller.send(:importable_repos)).to be_an_instance_of(Array)
end
+
+ it "passes namespace_id query param to status if provided" do
+ namespace_id = 30
+
+ allow_next_instance_of(Gitlab::GitlabImport::Client) do |instance|
+ allow(instance).to receive(:get_token).and_return(token)
+ end
+
+ get :callback, params: { namespace_id: namespace_id }
+
+ expect(controller).to redirect_to(status_import_gitlab_url(namespace_id: namespace_id))
+ end
end
describe "GET status" do
let(:repo_fake) { Struct.new(:id, :path, :path_with_namespace, :web_url, keyword_init: true) }
let(:repo) { repo_fake.new(id: 1, path: 'vim', path_with_namespace: 'asd/vim', web_url: 'https://gitlab.com/asd/vim') }
- before do
- assign_session_token
+ context 'when session contains access token' do
+ before do
+ assign_session_token
+ end
+
+ it_behaves_like 'import controller status' do
+ let(:repo_id) { repo.id }
+ let(:import_source) { repo.path_with_namespace }
+ let(:provider_name) { 'gitlab' }
+ let(:client_repos_field) { :projects }
+ end
end
- it_behaves_like 'import controller status' do
- let(:repo_id) { repo.id }
- let(:import_source) { repo.path_with_namespace }
- let(:provider_name) { 'gitlab' }
- let(:client_repos_field) { :projects }
+ it 'redirects to auth if session does not contain access token' do
+ remote_gitlab_url = 'https://test.host/auth/gitlab'
+
+ allow(Gitlab::GitlabImport::Client)
+ .to receive(:new)
+ .and_return(double(authorize_url: remote_gitlab_url))
+
+ get :status
+
+ expect(response).to redirect_to(remote_gitlab_url)
end
end
diff --git a/spec/controllers/profiles/emails_controller_spec.rb b/spec/controllers/profiles/emails_controller_spec.rb
index b63db831462..818aba77354 100644
--- a/spec/controllers/profiles/emails_controller_spec.rb
+++ b/spec/controllers/profiles/emails_controller_spec.rb
@@ -20,9 +20,9 @@ RSpec.describe Profiles::EmailsController do
before do
allowed_threshold = Gitlab::ApplicationRateLimiter.rate_limits[action][:threshold]
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(allowed_threshold + 1)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy).to receive(:increment).and_return(allowed_threshold + 1)
+ end
end
it 'does not send any email' do
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
index 48c747bf074..aafea0050d3 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
@@ -65,5 +65,42 @@ RSpec.describe Profiles::PersonalAccessTokensController do
scopes: contain_exactly(:api, :read_user)
)
end
+
+ context "access_token_pagination feature flag is enabled" do
+ before do
+ stub_feature_flags(access_token_pagination: true)
+ allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+ create(:personal_access_token, user: user)
+ end
+
+ it "returns paginated response" do
+ get :index, params: { page: 1 }
+ expect(assigns(:active_personal_access_tokens).count).to eq(1)
+ end
+
+ it 'adds appropriate headers' do
+ get :index, params: { page: 1 }
+ expect_header('X-Per-Page', '1')
+ expect_header('X-Page', '1')
+ expect_header('X-Next-Page', '2')
+ expect_header('X-Total', '2')
+ end
+ end
+
+ context "access_token_pagination feature flag is disabled" do
+ before do
+ stub_feature_flags(access_token_pagination: false)
+ create(:personal_access_token, user: user)
+ end
+
+ it "returns all tokens in system" do
+ get :index, params: { page: 1 }
+ expect(assigns(:active_personal_access_tokens).count).to eq(2)
+ end
+ end
+ end
+
+ def expect_header(header_name, header_val)
+ expect(response.headers[header_name]).to eq(header_val)
end
end
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index ebcf35a7ecd..a275bc28631 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::HooksController do
+ include AfterNextHelpers
+
let_it_be(:project) { create(:project) }
let(:user) { project.first_owner }
@@ -20,6 +22,36 @@ RSpec.describe Projects::HooksController do
end
end
+ describe '#update' do
+ let_it_be(:hook) { create(:project_hook, project: project) }
+
+ let(:params) do
+ { namespace_id: project.namespace, project_id: project, id: hook.id }
+ end
+
+ it 'adds, updates and deletes URL variables' do
+ hook.update!(url_variables: { 'a' => 'bar', 'b' => 'woo' })
+
+ params[:hook] = {
+ url_variables: [
+ { key: 'a', value: 'updated' },
+ { key: 'b', value: nil },
+ { key: 'c', value: 'new' }
+ ]
+ }
+
+ put :update, params: params
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:notice]).to include('successfully updated')
+
+ expect(hook.reload.url_variables).to eq(
+ 'a' => 'updated',
+ 'c' => 'new'
+ )
+ end
+ end
+
describe '#edit' do
let_it_be(:hook) { create(:project_hook, project: project) }
@@ -87,14 +119,30 @@ RSpec.describe Projects::HooksController do
job_events: true,
pipeline_events: true,
wiki_page_events: true,
- deployment_events: true
+ deployment_events: true,
+
+ url_variables: [{ key: 'token', value: 'some secret value' }]
}
post :create, params: { namespace_id: project.namespace, project_id: project, hook: hook_params }
expect(response).to have_gitlab_http_status(:found)
- expect(ProjectHook.all.size).to eq(1)
- expect(ProjectHook.first).to have_attributes(hook_params)
+ expect(flash[:alert]).to be_blank
+ expect(ProjectHook.count).to eq(1)
+ expect(ProjectHook.first).to have_attributes(hook_params.except(:url_variables))
+ expect(ProjectHook.first).to have_attributes(url_variables: { 'token' => 'some secret value' })
+ end
+
+ it 'alerts the user if the new hook is invalid' do
+ hook_params = {
+ token: "TEST\nTOKEN",
+ url: "http://example.com"
+ }
+
+ post :create, params: { namespace_id: project.namespace, project_id: project, hook: hook_params }
+
+ expect(flash[:alert]).to be_present
+ expect(ProjectHook.count).to eq(0)
end
end
@@ -109,6 +157,45 @@ RSpec.describe Projects::HooksController do
describe '#test' do
let(:hook) { create(:project_hook, project: project) }
+ context 'when the hook executes successfully' do
+ before do
+ stub_request(:post, hook.url).to_return(status: 200)
+ end
+
+ it 'informs the user' do
+ post :test, params: { namespace_id: project.namespace, project_id: project, id: hook }
+
+ expect(flash[:notice]).to include('executed successfully')
+ expect(flash[:notice]).to include('HTTP 200')
+ end
+ end
+
+ context 'when the hook runs, but fails' do
+ before do
+ stub_request(:post, hook.url).to_return(status: 400)
+ end
+
+ it 'informs the user' do
+ post :test, params: { namespace_id: project.namespace, project_id: project, id: hook }
+
+ expect(flash[:alert]).to include('executed successfully but')
+ expect(flash[:alert]).to include('HTTP 400')
+ end
+ end
+
+ context 'when the hook fails completely' do
+ before do
+ allow_next(::TestHooks::ProjectService)
+ .to receive(:execute).and_return({ message: 'All is woe' })
+ end
+
+ it 'informs the user' do
+ post :test, params: { namespace_id: project.namespace, project_id: project, id: hook }
+
+ expect(flash[:alert]).to include('failed: All is woe')
+ end
+ end
+
context 'when the endpoint receives requests above the limit', :freeze_time, :clean_gitlab_redis_rate_limiting do
before do
allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 1305693372c..badac688229 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1818,7 +1818,7 @@ RSpec.describe Projects::IssuesController do
context 'user is allowed access' do
before do
- project.add_user(user, :maintainer)
+ project.add_member(user, :maintainer)
end
it 'displays all available notes' do
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 107eb1ed3a3..e4e3151dd12 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -752,28 +752,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
- describe 'GET status.json' do
- let(:job) { create(:ci_build, pipeline: pipeline) }
- let(:status) { job.detailed_status(double('user')) }
-
- before do
- get :status, params: {
- namespace_id: project.namespace,
- project_id: project,
- id: job.id
- },
- format: :json
- end
-
- it 'return a detailed job status in json' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['text']).to eq status.text
- expect(json_response['label']).to eq status.label
- expect(json_response['icon']).to eq status.icon
- expect(json_response['favicon']).to match_asset_path "/assets/ci_favicons/#{status.favicon}.png"
- end
- end
-
describe 'POST retry' do
before do
project.add_developer(user)
diff --git a/spec/controllers/projects/logs_controller_spec.rb b/spec/controllers/projects/logs_controller_spec.rb
deleted file mode 100644
index 1c81ae93b42..00000000000
--- a/spec/controllers/projects/logs_controller_spec.rb
+++ /dev/null
@@ -1,214 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::LogsController do
- include KubernetesHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
-
- let_it_be(:environment) do
- create(:environment, name: 'production', project: project)
- end
-
- let(:pod_name) { "foo" }
- let(:container) { 'container-1' }
-
- before do
- sign_in(user)
- end
-
- describe 'GET #index' do
- let(:empty_project) { create(:project) }
-
- it 'returns 404 with reporter access' do
- project.add_reporter(user)
-
- get :index, params: environment_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it 'renders empty logs page if no environment exists' do
- empty_project.add_developer(user)
-
- get :index, params: { namespace_id: empty_project.namespace, project_id: empty_project }
-
- expect(response).to be_ok
- expect(response).to render_template 'empty_logs'
- end
-
- it 'renders index template' do
- project.add_developer(user)
-
- get :index, params: environment_params
-
- expect(response).to be_ok
- expect(response).to render_template 'index'
- end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(monitor_logging: false)
- end
-
- it 'returns 404 with reporter access' do
- project.add_developer(user)
-
- get :index, params: environment_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- shared_examples 'pod logs service' do |endpoint, service|
- let(:service_result) do
- {
- status: :success,
- logs: ['Log 1', 'Log 2', 'Log 3'],
- pods: [pod_name],
- pod_name: pod_name,
- container_name: container
- }
- end
-
- let(:service_result_json) { Gitlab::Json.parse(service_result.to_json) }
-
- let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*', projects: [project]) }
-
- before do
- allow_next_instance_of(service) do |instance|
- allow(instance).to receive(:execute).and_return(service_result)
- end
- end
-
- it 'returns 404 with reporter access' do
- project.add_reporter(user)
-
- get endpoint, params: environment_params(pod_name: pod_name, format: :json)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- context 'with developer access' do
- before do
- project.add_developer(user)
- end
-
- it 'returns the service result' do
- get endpoint, params: environment_params(pod_name: pod_name, format: :json)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response).to eq(service_result_json)
- end
- end
-
- context 'with maintainer access' do
- before do
- project.add_maintainer(user)
- end
-
- it 'returns the service result' do
- get endpoint, params: environment_params(pod_name: pod_name, format: :json)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response).to eq(service_result_json)
- end
-
- it 'sets the polling header' do
- get endpoint, params: environment_params(pod_name: pod_name, format: :json)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(response.headers['Poll-Interval']).to eq('3000')
- end
-
- context 'with gitlab managed apps logs' do
- it 'uses cluster finder services to select cluster', :aggregate_failures do
- cluster_list = [cluster]
- service_params = { params: ActionController::Parameters.new(pod_name: pod_name).permit! }
- request_params = {
- namespace_id: project.namespace,
- project_id: project,
- cluster_id: cluster.id,
- pod_name: pod_name,
- format: :json
- }
-
- expect_next_instance_of(ClusterAncestorsFinder, project, user) do |finder|
- expect(finder).to receive(:execute).and_return(cluster_list)
- expect(cluster_list).to receive(:find).and_call_original
- end
-
- expect_next_instance_of(service, cluster, Gitlab::Kubernetes::Helm::NAMESPACE, service_params) do |instance|
- expect(instance).to receive(:execute).and_return(service_result)
- end
-
- get endpoint, params: request_params
-
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response).to eq(service_result_json)
- end
- end
-
- context 'when service is processing' do
- let(:service_result) { nil }
-
- it 'returns a 202' do
- get endpoint, params: environment_params(pod_name: pod_name, format: :json)
-
- expect(response).to have_gitlab_http_status(:accepted)
- end
- end
-
- shared_examples 'unsuccessful execution response' do |message|
- let(:service_result) do
- {
- status: :error,
- message: message
- }
- end
-
- it 'returns the error' do
- get endpoint, params: environment_params(pod_name: pod_name, format: :json)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq(service_result_json)
- end
- end
-
- context 'when service is failing' do
- it_behaves_like 'unsuccessful execution response', 'some error'
- end
-
- context 'when cluster is nil' do
- let!(:cluster) { nil }
-
- it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
- end
-
- context 'when namespace is empty' do
- before do
- allow(environment).to receive(:deployment_namespace).and_return('')
- end
-
- it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
- end
- end
- end
-
- describe 'GET #k8s' do
- it_behaves_like 'pod logs service', :k8s, PodLogs::KubernetesService
- end
-
- describe 'GET #elasticsearch' do
- it_behaves_like 'pod logs service', :elasticsearch, PodLogs::ElasticsearchService
- end
-
- def environment_params(opts = {})
- opts.reverse_merge(namespace_id: project.namespace,
- project_id: project,
- environment_name: environment.name)
- end
-end
diff --git a/spec/controllers/projects/mirrors_controller_spec.rb b/spec/controllers/projects/mirrors_controller_spec.rb
index 686effd799e..d33bc215cfc 100644
--- a/spec/controllers/projects/mirrors_controller_spec.rb
+++ b/spec/controllers/projects/mirrors_controller_spec.rb
@@ -211,7 +211,7 @@ RSpec.describe Projects::MirrorsController do
context 'data in the cache' do
let(:ssh_key) { 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAfuCHKVTjquxvt6CM6tdG4SLp1Btn/nOeHHE5UOzRdf' }
- let(:ssh_fp) { { type: 'ed25519', bits: 256, fingerprint: '2e:65:6a:c8:cf:bf:b2:8b:9a:bd:6d:9f:11:5c:12:16', index: 0 } }
+ let(:ssh_fp) { { type: 'ed25519', bits: 256, fingerprint: '2e:65:6a:c8:cf:bf:b2:8b:9a:bd:6d:9f:11:5c:12:16', fingerprint_sha256: 'SHA256:eUXGGm1YGsMAS7vkcx6JOJdOGHPem5gQp4taiCfCLB8', index: 0 } }
it 'returns the data with a 200 response' do
stub_reactive_cache(cache, known_hosts: ssh_key)
diff --git a/spec/controllers/projects/pipelines/tests_controller_spec.rb b/spec/controllers/projects/pipelines/tests_controller_spec.rb
index 2db54dbe671..ddcab8b048e 100644
--- a/spec/controllers/projects/pipelines/tests_controller_spec.rb
+++ b/spec/controllers/projects/pipelines/tests_controller_spec.rb
@@ -45,11 +45,26 @@ RSpec.describe Projects::Pipelines::TestsController do
pipeline.job_artifacts.first.update!(expire_at: Date.yesterday)
end
+ it 'renders test suite', :aggregate_failures do
+ get_tests_show_json(build_ids)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq('test')
+ expect(json_response['total_count']).to eq(3)
+ expect(json_response['test_cases'].size).to eq(3)
+ end
+ end
+
+ context 'when artifacts do not exist' do
+ before do
+ pipeline.job_artifacts.each(&:destroy)
+ end
+
it 'renders not_found errors', :aggregate_failures do
get_tests_show_json(build_ids)
expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['errors']).to eq('Test report artifacts have expired')
+ expect(json_response['errors']).to eq('Test report artifacts not found')
end
end
@@ -68,7 +83,6 @@ RSpec.describe Projects::Pipelines::TestsController do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq('test')
- expect(json_response['artifacts_expired']).to be_falsey
# Each test failure in this pipeline has a matching failure in the default branch
recent_failures = json_response['test_cases'].map { |tc| tc['recent_failures'] }
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index b3b803649d1..06930d8727b 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -827,6 +827,14 @@ RSpec.describe Projects::PipelinesController do
{
chart_param: 'lead-time',
event: 'p_analytics_ci_cd_lead_time'
+ },
+ {
+ chart_param: 'time-to-restore-service',
+ event: 'p_analytics_ci_cd_time_to_restore_service'
+ },
+ {
+ chart_param: 'change-failure-rate',
+ event: 'p_analytics_ci_cd_change_failure_rate'
}
].each do |tab|
it_behaves_like 'tracking unique visits', :charts do
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 9bb34a38005..46eb340cbba 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -68,27 +68,6 @@ RSpec.describe Projects::ProjectMembersController do
end
end
- context 'group links' do
- let_it_be(:project_group_link) { create(:project_group_link, project: project, group: group) }
-
- it 'lists group links' do
- get :index, params: { namespace_id: project.namespace, project_id: project }
-
- expect(assigns(:group_links).map(&:id)).to contain_exactly(project_group_link.id)
- end
-
- context 'when `search_groups` param is present' do
- let(:group_2) { create(:group, :public, name: 'group_2') }
- let!(:project_group_link_2) { create(:project_group_link, project: project, group: group_2) }
-
- it 'lists group links that match search' do
- get :index, params: { namespace_id: project.namespace, project_id: project, search_groups: 'group_2' }
-
- expect(assigns(:group_links).map(&:id)).to contain_exactly(project_group_link_2.id)
- end
- end
- end
-
context 'invited members' do
let_it_be(:invited_member) { create(:project_member, :invited, project: project) }
diff --git a/spec/controllers/projects/service_ping_controller_spec.rb b/spec/controllers/projects/service_ping_controller_spec.rb
index fa92efee079..22fb18edc80 100644
--- a/spec/controllers/projects/service_ping_controller_spec.rb
+++ b/spec/controllers/projects/service_ping_controller_spec.rb
@@ -80,16 +80,24 @@ RSpec.describe Projects::ServicePingController do
it_behaves_like 'counter is not increased'
it_behaves_like 'counter is increased', 'WEB_IDE_PREVIEWS_SUCCESS_COUNT'
- context 'when the user has access to the project' do
+ context 'when the user has access to the project', :snowplow do
let(:user) { project.owner }
it 'increases the live preview view counter' do
- expect(Gitlab::UsageDataCounters::EditorUniqueCounter).to receive(:track_live_preview_edit_action).with(author: user)
+ expect(Gitlab::UsageDataCounters::EditorUniqueCounter).to receive(:track_live_preview_edit_action).with(author: user, project: project)
subject
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it_behaves_like 'Snowplow event tracking' do
+ let(:project) { create(:project) }
+ let(:category) { 'ide_edit' }
+ let(:action) { 'g_edit_by_live_preview' }
+ let(:namespace) { project.namespace }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ end
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index d50f1aa1dd8..e5ae1b04a86 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -8,305 +8,294 @@ RSpec.describe Projects::Settings::CiCdController do
let(:project) { project_auto_devops.project }
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- describe 'GET show' do
- let_it_be(:parent_group) { create(:group) }
- let_it_be(:group) { create(:group, parent: parent_group) }
- let_it_be(:other_project) { create(:project, group: group) }
+ context 'as a maintainer' do
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
- it 'renders show with 200 status code' do
- get :show, params: { namespace_id: project.namespace, project_id: project }
+ describe 'GET show' do
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:other_project) { create(:project, group: group) }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:show)
- end
+ it 'renders show with 200 status code' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
- context 'with CI/CD disabled' do
- before do
- project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
end
- it 'renders show with 404 status code' do
- get :show, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(:not_found)
+ context 'with CI/CD disabled' do
+ before do
+ project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
+ end
+
+ it 'renders show with 404 status code' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
- end
- context 'with group runners' do
- let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
- let_it_be(:project_runner) { create(:ci_runner, :project, projects: [other_project]) }
- let_it_be(:shared_runner) { create(:ci_runner, :instance) }
+ context 'with group runners' do
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
+ let_it_be(:project_runner) { create(:ci_runner, :project, projects: [other_project]) }
+ let_it_be(:shared_runner) { create(:ci_runner, :instance) }
- it 'sets assignable project runners only' do
- group.add_maintainer(user)
+ it 'sets assignable project runners only' do
+ group.add_maintainer(user)
- get :show, params: { namespace_id: project.namespace, project_id: project }
+ get :show, params: { namespace_id: project.namespace, project_id: project }
- expect(assigns(:assignable_runners)).to contain_exactly(project_runner)
+ expect(assigns(:assignable_runners)).to contain_exactly(project_runner)
+ end
end
- end
- context 'prevents N+1 queries for tags' do
- render_views
+ context 'prevents N+1 queries for tags' do
+ render_views
- def show
- get :show, params: { namespace_id: project.namespace, project_id: project }
- end
+ def show
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+ end
- it 'has the same number of queries with one tag or with many tags', :request_store do
- group.add_maintainer(user)
+ it 'has the same number of queries with one tag or with many tags', :request_store do
+ group.add_maintainer(user)
- show # warmup
+ show # warmup
- # with one tag
- create(:ci_runner, :instance, tag_list: %w(shared_runner))
- create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner))
- create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner))
- control = ActiveRecord::QueryRecorder.new { show }
+ # with one tag
+ create(:ci_runner, :instance, tag_list: %w(shared_runner))
+ create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner))
+ create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner))
+ control = ActiveRecord::QueryRecorder.new { show }
- # with several tags
- create(:ci_runner, :instance, tag_list: %w(shared_runner tag2 tag3))
- create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner tag2 tag3))
- create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner tag2 tag3))
+ # with several tags
+ create(:ci_runner, :instance, tag_list: %w(shared_runner tag2 tag3))
+ create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner tag2 tag3))
+ create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner tag2 tag3))
- expect { show }.not_to exceed_query_limit(control)
+ expect { show }.not_to exceed_query_limit(control)
+ end
end
end
- end
- describe '#reset_cache' do
- before do
- sign_in(user)
-
- project.add_maintainer(user)
+ describe '#reset_cache' do
+ before do
+ sign_in(user)
- allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(true)
- end
+ project.add_maintainer(user)
- subject { post :reset_cache, params: { namespace_id: project.namespace, project_id: project }, format: :json }
+ allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(true)
+ end
- it 'calls reset project cache service' do
- expect(ResetProjectCacheService).to receive_message_chain(:new, :execute)
+ subject { post :reset_cache, params: { namespace_id: project.namespace, project_id: project }, format: :json }
- subject
- end
+ it 'calls reset project cache service' do
+ expect(ResetProjectCacheService).to receive_message_chain(:new, :execute)
- context 'when service returns successfully' do
- it 'returns a success header' do
subject
-
- expect(response).to have_gitlab_http_status(:ok)
end
- end
- context 'when service does not return successfully' do
- before do
- allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(false)
+ context 'when service returns successfully' do
+ it 'returns a success header' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
- it 'returns an error header' do
- subject
+ context 'when service does not return successfully' do
+ before do
+ allow(ResetProjectCacheService).to receive_message_chain(:new, :execute).and_return(false)
+ end
- expect(response).to have_gitlab_http_status(:bad_request)
+ it 'returns an error header' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
- end
- describe 'PUT #reset_registration_token' do
- subject { put :reset_registration_token, params: { namespace_id: project.namespace, project_id: project } }
+ describe 'PUT #reset_registration_token' do
+ subject { put :reset_registration_token, params: { namespace_id: project.namespace, project_id: project } }
- it 'resets runner registration token' do
- expect { subject }.to change { project.reload.runners_token }
- expect(flash[:toast]).to eq('New runners registration token has been generated!')
- end
+ it 'resets runner registration token' do
+ expect { subject }.to change { project.reload.runners_token }
+ expect(flash[:toast]).to eq('New runners registration token has been generated!')
+ end
- it 'redirects the user to admin runners page' do
- subject
+ it 'redirects the user to admin runners page' do
+ subject
- expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
+ expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
+ end
end
- end
- describe 'PATCH update' do
- let(:params) { { ci_config_path: '' } }
-
- subject do
- patch :update,
- params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- project: params
- }
- end
+ describe 'PATCH update' do
+ let(:params) { { ci_config_path: '' } }
- it 'redirects to the settings page' do
- subject
+ subject do
+ patch :update,
+ params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ project: params
+ }
+ end
- expect(response).to have_gitlab_http_status(:found)
- expect(flash[:toast]).to eq("Pipelines settings for '#{project.name}' were successfully updated.")
- end
+ it 'redirects to the settings page' do
+ subject
- context 'when updating the auto_devops settings' do
- let(:params) { { auto_devops_attributes: { enabled: '' } } }
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:toast]).to eq("Pipelines settings for '#{project.name}' were successfully updated.")
+ end
- context 'following the instance default' do
+ context 'when updating the auto_devops settings' do
let(:params) { { auto_devops_attributes: { enabled: '' } } }
- it 'allows enabled to be set to nil' do
- subject
- project_auto_devops.reload
+ context 'following the instance default' do
+ let(:params) { { auto_devops_attributes: { enabled: '' } } }
- expect(project_auto_devops.enabled).to be_nil
- end
- end
+ it 'allows enabled to be set to nil' do
+ subject
+ project_auto_devops.reload
- context 'when run_auto_devops_pipeline is true' do
- before do
- expect_next_instance_of(Projects::UpdateService) do |instance|
- expect(instance).to receive(:run_auto_devops_pipeline?).and_return(true)
+ expect(project_auto_devops.enabled).to be_nil
end
end
- context 'when the project repository is empty' do
- it 'sets a notice flash' do
- subject
-
- expect(controller).to set_flash[:notice]
+ context 'when run_auto_devops_pipeline is true' do
+ before do
+ expect_next_instance_of(Projects::UpdateService) do |instance|
+ expect(instance).to receive(:run_auto_devops_pipeline?).and_return(true)
+ end
end
- it 'does not queue a CreatePipelineWorker' do
- expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
+ context 'when the project repository is empty' do
+ it 'sets a notice flash' do
+ subject
- subject
+ expect(controller).to set_flash[:notice]
+ end
+
+ it 'does not queue a CreatePipelineWorker' do
+ expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
+
+ subject
+ end
end
- end
- context 'when the project repository is not empty' do
- let(:project) { create(:project, :repository) }
+ context 'when the project repository is not empty' do
+ let(:project) { create(:project, :repository) }
- it 'displays a toast message' do
- allow(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
+ it 'displays a toast message' do
+ allow(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
- subject
+ subject
- expect(controller).to set_flash[:toast]
- end
+ expect(controller).to set_flash[:toast]
+ end
- it 'queues a CreatePipelineWorker' do
- expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
+ it 'queues a CreatePipelineWorker' do
+ expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
- subject
- end
+ subject
+ end
- it 'creates a pipeline', :sidekiq_inline do
- project.repository.create_file(user, 'Gemfile', 'Gemfile contents',
- message: 'Add Gemfile',
- branch_name: 'master')
+ it 'creates a pipeline', :sidekiq_inline do
+ project.repository.create_file(user, 'Gemfile', 'Gemfile contents',
+ message: 'Add Gemfile',
+ branch_name: 'master')
- expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ end
end
end
- end
- context 'when run_auto_devops_pipeline is not true' do
- before do
- expect_next_instance_of(Projects::UpdateService) do |instance|
- expect(instance).to receive(:run_auto_devops_pipeline?).and_return(false)
+ context 'when run_auto_devops_pipeline is not true' do
+ before do
+ expect_next_instance_of(Projects::UpdateService) do |instance|
+ expect(instance).to receive(:run_auto_devops_pipeline?).and_return(false)
+ end
end
- end
- it 'does not queue a CreatePipelineWorker' do
- expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args)
+ it 'does not queue a CreatePipelineWorker' do
+ expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args)
- subject
+ subject
+ end
end
end
- end
- context 'when updating general settings' do
- context 'when build_timeout_human_readable is not specified' do
- let(:params) { { build_timeout_human_readable: '' } }
+ context 'when updating general settings' do
+ context 'when build_timeout_human_readable is not specified' do
+ let(:params) { { build_timeout_human_readable: '' } }
- it 'set default timeout' do
- subject
+ it 'set default timeout' do
+ subject
- project.reload
- expect(project.build_timeout).to eq(3600)
+ project.reload
+ expect(project.build_timeout).to eq(3600)
+ end
end
- end
- context 'when build_timeout_human_readable is specified' do
- let(:params) { { build_timeout_human_readable: '1h 30m' } }
+ context 'when build_timeout_human_readable is specified' do
+ let(:params) { { build_timeout_human_readable: '1h 30m' } }
- it 'set specified timeout' do
- subject
+ it 'set specified timeout' do
+ subject
- project.reload
- expect(project.build_timeout).to eq(5400)
+ project.reload
+ expect(project.build_timeout).to eq(5400)
+ end
end
- end
-
- context 'when build_timeout_human_readable is invalid' do
- let(:params) { { build_timeout_human_readable: '5m' } }
- it 'set specified timeout' do
- subject
-
- expect(controller).to set_flash[:alert]
- expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
- end
- end
+ context 'when build_timeout_human_readable is invalid' do
+ let(:params) { { build_timeout_human_readable: '5m' } }
- context 'when default_git_depth is not specified' do
- let(:params) { { ci_cd_settings_attributes: { default_git_depth: 10 } } }
+ it 'set specified timeout' do
+ subject
- before do
- project.ci_cd_settings.update!(default_git_depth: nil)
+ expect(controller).to set_flash[:alert]
+ expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
+ end
end
- it 'set specified git depth' do
- subject
+ context 'when default_git_depth is not specified' do
+ let(:params) { { ci_cd_settings_attributes: { default_git_depth: 10 } } }
- project.reload
- expect(project.ci_default_git_depth).to eq(10)
- end
- end
+ before do
+ project.ci_cd_settings.update!(default_git_depth: nil)
+ end
- context 'when forward_deployment_enabled is not specified' do
- let(:params) { { ci_cd_settings_attributes: { forward_deployment_enabled: false } } }
+ it 'set specified git depth' do
+ subject
- before do
- project.ci_cd_settings.update!(forward_deployment_enabled: nil)
+ project.reload
+ expect(project.ci_default_git_depth).to eq(10)
+ end
end
- it 'sets forward deployment enabled' do
- subject
-
- project.reload
- expect(project.ci_forward_deployment_enabled).to eq(false)
- end
- end
+ context 'when forward_deployment_enabled is not specified' do
+ let(:params) { { ci_cd_settings_attributes: { forward_deployment_enabled: false } } }
- context 'when max_artifacts_size is specified' do
- let(:params) { { max_artifacts_size: 10 } }
+ before do
+ project.ci_cd_settings.update!(forward_deployment_enabled: nil)
+ end
- context 'and user is not an admin' do
- it 'does not set max_artifacts_size' do
+ it 'sets forward deployment enabled' do
subject
project.reload
- expect(project.max_artifacts_size).to be_nil
+ expect(project.ci_forward_deployment_enabled).to eq(false)
end
end
- context 'and user is an admin' do
- let(:user) { create(:admin) }
+ context 'when max_artifacts_size is specified' do
+ let(:params) { { max_artifacts_size: 10 } }
- context 'with admin mode disabled' do
+ context 'and user is not an admin' do
it 'does not set max_artifacts_size' do
subject
@@ -315,33 +304,81 @@ RSpec.describe Projects::Settings::CiCdController do
end
end
- context 'with admin mode enabled', :enable_admin_mode do
- it 'sets max_artifacts_size' do
- subject
+ context 'and user is an admin' do
+ let(:user) { create(:admin) }
- project.reload
- expect(project.max_artifacts_size).to eq(10)
+ context 'with admin mode disabled' do
+ it 'does not set max_artifacts_size' do
+ subject
+
+ project.reload
+ expect(project.max_artifacts_size).to be_nil
+ end
+ end
+
+ context 'with admin mode enabled', :enable_admin_mode do
+ it 'sets max_artifacts_size' do
+ subject
+
+ project.reload
+ expect(project.max_artifacts_size).to eq(10)
+ end
end
end
end
end
end
+
+ describe 'GET #runner_setup_scripts' do
+ it 'renders the setup scripts' do
+ get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("install")
+ expect(json_response).to have_key("register")
+ end
+
+ it 'renders errors if they occur' do
+ get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to have_key("errors")
+ end
+ end
end
- describe 'GET #runner_setup_scripts' do
- it 'renders the setup scripts' do
- get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', namespace_id: project.namespace, project_id: project }
+ context 'as a developer' do
+ before do
+ sign_in(user)
+ project.add_developer(user)
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to have_key("install")
- expect(json_response).to have_key("register")
+ it 'responds with 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
end
+ end
- it 'renders errors if they occur' do
- get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', namespace_id: project.namespace, project_id: project }
+ context 'as a reporter' do
+ before do
+ sign_in(user)
+ project.add_reporter(user)
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+ end
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to have_key("errors")
+ it 'responds with 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'as an unauthenticated user' do
+ before do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+ end
+
+ it 'redirects to sign in' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to('/users/sign_in')
end
end
end
diff --git a/spec/controllers/projects/settings/integrations_controller_spec.rb b/spec/controllers/projects/settings/integrations_controller_spec.rb
index e6ca088a533..8ee9f22aa7f 100644
--- a/spec/controllers/projects/settings/integrations_controller_spec.rb
+++ b/spec/controllers/projects/settings/integrations_controller_spec.rb
@@ -138,7 +138,7 @@ RSpec.describe Projects::Settings::IntegrationsController do
end
end
- context 'when unsuccessful' do
+ context 'when unsuccessful', :clean_gitlab_redis_rate_limiting do
it 'returns an error response when the integration test fails' do
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
.to_return(status: 404)
@@ -148,7 +148,7 @@ RSpec.describe Projects::Settings::IntegrationsController do
expect(response).to be_successful
expect(json_response).to eq(
'error' => true,
- 'message' => 'Connection failed. Please check your settings.',
+ 'message' => 'Connection failed. Check your integration settings.',
'service_response' => '',
'test_failed' => true
)
@@ -163,7 +163,7 @@ RSpec.describe Projects::Settings::IntegrationsController do
expect(response).to be_successful
expect(json_response).to eq(
'error' => true,
- 'message' => 'Connection failed. Please check your settings.',
+ 'message' => 'Connection failed. Check your integration settings.',
'service_response' => "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed",
'test_failed' => true
)
@@ -177,13 +177,33 @@ RSpec.describe Projects::Settings::IntegrationsController do
expect(response).to be_successful
expect(json_response).to eq(
'error' => true,
- 'message' => 'Connection failed. Please check your settings.',
+ 'message' => 'Connection failed. Check your integration settings.',
'service_response' => 'Connection refused',
'test_failed' => true
)
end
end
end
+
+ context 'when the endpoint receives requests above the limit', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
+ .and_return(project_testing_integration: { threshold: 1, interval: 1.minute })
+ end
+
+ it 'prevents making test requests' do
+ stub_jira_integration_test
+
+ expect_next_instance_of(::Integrations::Test::ProjectService) do |service|
+ expect(service).to receive(:execute).and_return(http_status: 200)
+ end
+
+ 2.times { post :test, params: project_params(service: integration_params) }
+
+ expect(response.body).to eq(_('This endpoint has been requested too many times. Try again later.'))
+ expect(response).to have_gitlab_http_status(:too_many_requests)
+ end
+ end
end
describe 'PUT #update' do
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index c1fa91e9f8b..76d8191e342 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -437,108 +437,6 @@ RSpec.describe Projects::Settings::OperationsController do
end
end
- context 'tracing integration' do
- describe 'GET #show' do
- context 'with existing setting' do
- let_it_be(:setting) do
- create(:project_tracing_setting, project: project)
- end
-
- it 'loads existing setting' do
- get :show, params: project_params(project)
-
- expect(controller.helpers.tracing_setting).to eq(setting)
- end
- end
-
- context 'without an existing setting' do
- it 'builds a new setting' do
- get :show, params: project_params(project)
-
- expect(controller.helpers.tracing_setting).to be_new_record
- end
- end
- end
-
- describe 'PATCH #update' do
- let_it_be(:external_url) { 'https://gitlab.com' }
-
- let(:params) do
- {
- tracing_setting_attributes: {
- external_url: external_url
- }
- }
- end
-
- it_behaves_like 'PATCHable'
-
- describe 'gitlab tracking', :snowplow do
- shared_examples 'event tracking' do
- it 'tracks an event' do
- expect_snowplow_event(
- category: 'project:operations:tracing',
- action: 'external_url_populated',
- user: user,
- project: project,
- namespace: project.namespace
- )
- end
- end
-
- shared_examples 'no event tracking' do
- it 'does not track an event' do
- expect_no_snowplow_event
- end
- end
-
- before do
- make_request
- end
-
- subject(:make_request) do
- patch :update, params: project_params(project, params), format: :json
- end
-
- context 'without existing setting' do
- context 'when creating a new setting' do
- it_behaves_like 'event tracking'
- end
-
- context 'with invalid external_url' do
- let_it_be(:external_url) { nil }
-
- it_behaves_like 'no event tracking'
- end
- end
-
- context 'with existing setting' do
- let_it_be(:existing_setting) do
- create(:project_tracing_setting,
- project: project,
- external_url: external_url)
- end
-
- context 'when changing external_url' do
- let_it_be(:external_url) { 'https://example.com' }
-
- it_behaves_like 'no event tracking'
- end
-
- context 'with unchanged external_url' do
- it_behaves_like 'no event tracking'
- end
-
- context 'with invalid external_url' do
- let_it_be(:external_url) { nil }
-
- it_behaves_like 'no event tracking'
- end
- end
- end
- end
- end
-
private
def project_params(project, params = {})
diff --git a/spec/controllers/projects/tracings_controller_spec.rb b/spec/controllers/projects/tracings_controller_spec.rb
deleted file mode 100644
index 80e21349e20..00000000000
--- a/spec/controllers/projects/tracings_controller_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::TracingsController do
- let_it_be(:user) { create(:user) }
-
- describe 'GET show' do
- shared_examples 'user with read access' do |visibility_level|
- let(:project) { create(:project, visibility_level) }
-
- %w[developer maintainer].each do |role|
- context "with a #{visibility_level} project and #{role} role" do
- before do
- project.add_role(user, role)
- end
-
- it 'renders OK' do
- get :show, params: { namespace_id: project.namespace, project_id: project }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:show)
- end
- end
- end
- end
-
- shared_examples 'user without read access' do |visibility_level|
- let(:project) { create(:project, visibility_level) }
-
- %w[guest reporter].each do |role|
- context "with a #{visibility_level} project and #{role} role" do
- before do
- project.add_role(user, role)
- end
-
- it 'returns 404' do
- get :show, params: { namespace_id: project.namespace, project_id: project }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-
- before do
- sign_in(user)
- end
-
- context 'with maintainer role' do
- it_behaves_like 'user with read access', :public
- it_behaves_like 'user with read access', :internal
- it_behaves_like 'user with read access', :private
-
- context 'feature flag disabled' do
- before do
- stub_feature_flags(monitor_tracing: false)
- end
-
- it_behaves_like 'user without read access', :public
- it_behaves_like 'user without read access', :internal
- it_behaves_like 'user without read access', :private
- end
- end
-
- context 'without maintainer role' do
- it_behaves_like 'user without read access', :public
- it_behaves_like 'user without read access', :internal
- it_behaves_like 'user without read access', :private
- end
- end
-end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 537f7aa5fee..34477a7bb68 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -409,7 +409,7 @@ RSpec.describe ProjectsController do
before do
project.update!(visibility: project_visibility.to_s)
- project.team.add_user(user, :guest) if user_type == :member
+ project.team.add_member(user, :guest) if user_type == :member
sign_in(user) unless user_type == :anonymous
end
@@ -1432,9 +1432,11 @@ RSpec.describe ProjectsController do
shared_examples 'rate limits project export endpoint' do
before do
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(Gitlab::ApplicationRateLimiter.rate_limits["project_#{action}".to_sym][:threshold].call + 1)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy)
+ .to receive(:increment)
+ .and_return(Gitlab::ApplicationRateLimiter.rate_limits["project_#{action}".to_sym][:threshold].call + 1)
+ end
end
it 'prevents requesting project export' do
@@ -1546,9 +1548,11 @@ RSpec.describe ProjectsController do
context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_rate_limiting do
before do
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:project_download_export][:threshold].call + 1)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy)
+ .to receive(:increment)
+ .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:project_download_export][:threshold].call + 1)
+ end
end
it 'prevents requesting project export' do
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 36b230103db..c5a97812d1f 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -178,7 +178,8 @@ RSpec.describe RegistrationsController do
category: 'RegistrationsController',
action: 'accepted',
label: 'invite_email',
- property: member.id.to_s
+ property: member.id.to_s,
+ user: member.reload.user
)
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 4abcd414e51..b4d4e01e972 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe SearchController do
end
end
- describe 'GET #show' do
+ describe 'GET #show', :snowplow do
it_behaves_like 'when the user cannot read cross project', :show, { search: 'hello' } do
it 'still allows accessing the search page' do
get :show
@@ -257,6 +257,16 @@ RSpec.describe SearchController do
end
end
+ it_behaves_like 'Snowplow event tracking' do
+ subject { get :show, params: { group_id: namespace.id, scope: 'blobs', search: 'term' } }
+
+ let(:project) { nil }
+ let(:category) { described_class.to_s }
+ let(:action) { 'i_search_total' }
+ let(:namespace) { create(:group) }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ end
+
context 'on restricted projects' do
context 'when signed out' do
before do
@@ -398,10 +408,11 @@ RSpec.describe SearchController do
expect(payload[:metadata]['meta.search.filters.confidential']).to eq('true')
expect(payload[:metadata]['meta.search.filters.state']).to eq('true')
expect(payload[:metadata]['meta.search.project_ids']).to eq(%w(456 789))
- expect(payload[:metadata]['meta.search.search_level']).to eq('multi-project')
+ expect(payload[:metadata]['meta.search.type']).to eq('basic')
+ expect(payload[:metadata]['meta.search.level']).to eq('global')
end
- get :show, params: { scope: 'issues', search: 'hello world', group_id: '123', project_id: '456', project_ids: %w(456 789), search_level: 'multi-project', confidential: true, state: true, force_search_results: true }
+ get :show, params: { scope: 'issues', search: 'hello world', group_id: '123', project_id: '456', project_ids: %w(456 789), confidential: true, state: true, force_search_results: true }
end
it 'appends the default scope in meta.search.scope' do
@@ -413,6 +424,16 @@ RSpec.describe SearchController do
get :show, params: { search: 'hello world', group_id: '123', project_id: '456' }
end
+
+ it 'appends the search time based on the search' do
+ expect(controller).to receive(:append_info_to_payload).and_wrap_original do |method, payload|
+ method.call(payload)
+
+ expect(payload[:metadata][:global_search_duration_s]).to be_a_kind_of(Numeric)
+ end
+
+ get :show, params: { search: 'hello world', group_id: '123', project_id: '456' }
+ end
end
context 'abusive searches', :aggregate_failures do
@@ -430,18 +451,6 @@ RSpec.describe SearchController do
make_abusive_request
expect(response).to have_gitlab_http_status(:ok)
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(prevent_abusive_searches: false)
- end
-
- it 'returns a regular search result' do
- expect(Gitlab::EmptySearchResults).not_to receive(:new)
- make_abusive_request
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
end
diff --git a/spec/db/docs_spec.rb b/spec/db/docs_spec.rb
index 20746e107fb..ad3705c3dbe 100644
--- a/spec/db/docs_spec.rb
+++ b/spec/db/docs_spec.rb
@@ -4,8 +4,11 @@ require 'spec_helper'
RSpec.describe 'Database Documentation' do
context 'for each table' do
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/366834
+ let(:database_base_models) { Gitlab::Database.database_base_models.select { |k, _| k != 'geo' } }
+
let(:all_tables) do
- Gitlab::Database.database_base_models.flat_map { |_, m| m.connection.tables }.sort.uniq
+ database_base_models.flat_map { |_, m| m.connection.tables }.sort.uniq
end
let(:metadata_required_fields) do
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 8070e17b7af..bd13f86034a 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -114,11 +114,17 @@ RSpec.describe 'Database schema' do
context 'all foreign keys' do
# for index to be effective, the FK constraint has to be at first place
it 'are indexed' do
- first_indexed_column = indexes.map(&:columns).map do |columns|
+ first_indexed_column = indexes.filter_map do |index|
+ columns = index.columns
+
# In cases of complex composite indexes, a string is returned eg:
# "lower((extern_uid)::text), group_id"
columns = columns.split(',') if columns.is_a?(String)
- columns.first.chomp
+ column = columns.first.chomp
+
+ # A partial index is not suitable for a foreign key column, unless
+ # the only condition is for the presence of the foreign key itself
+ column if index.where.nil? || index.where == "(#{column} IS NOT NULL)"
end
foreign_keys_columns = all_foreign_keys.map(&:column)
required_indexed_columns = foreign_keys_columns - ignored_index_columns(table)
diff --git a/spec/events/pages/page_deleted_event_spec.rb b/spec/events/pages/page_deleted_event_spec.rb
index ee05b770c48..8fcd807eeb4 100644
--- a/spec/events/pages/page_deleted_event_spec.rb
+++ b/spec/events/pages/page_deleted_event_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Pages::PageDeletedEvent do
where(:data, :valid) do
[
[{ project_id: 1, namespace_id: 2 }, true],
+ [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
[{ project_id: 1 }, false],
[{ namespace_id: 1 }, false],
[{ project_id: 'foo', namespace_id: 2 }, false],
diff --git a/spec/events/pages/page_deployed_event_spec.rb b/spec/events/pages/page_deployed_event_spec.rb
new file mode 100644
index 00000000000..0c33a95b281
--- /dev/null
+++ b/spec/events/pages/page_deployed_event_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::PageDeployedEvent do
+ where(:data, :valid) do
+ [
+ [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
+ [{ project_id: 1 }, false],
+ [{ namespace_id: 1 }, false],
+ [{ project_id: 'foo', namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: 'foo' }, false],
+ [{ project_id: [], namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: [] }, false],
+ [{ project_id: {}, namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: {} }, false],
+ ['foo', false],
+ [123, false],
+ [[], false]
+ ]
+ end
+
+ with_them do
+ it 'validates data' do
+ constructor = -> { described_class.new(data: data) }
+
+ if valid
+ expect { constructor.call }.not_to raise_error
+ else
+ expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+end
diff --git a/spec/events/projects/project_created_event_spec.rb b/spec/events/projects/project_created_event_spec.rb
new file mode 100644
index 00000000000..d70c737afb0
--- /dev/null
+++ b/spec/events/projects/project_created_event_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ProjectCreatedEvent do
+ where(:data, :valid) do
+ [
+ [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
+ [{ project_id: 1 }, false],
+ [{ namespace_id: 1 }, false],
+ [{ project_id: 'foo', namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: 'foo' }, false],
+ [{ project_id: [], namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: [] }, false],
+ [{ project_id: {}, namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: {} }, false],
+ ['foo', false],
+ [123, false],
+ [[], false]
+ ]
+ end
+
+ with_them do
+ it 'validates data' do
+ constructor = -> { described_class.new(data: data) }
+
+ if valid
+ expect { constructor.call }.not_to raise_error
+ else
+ expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+end
diff --git a/spec/events/projects/project_deleted_event_spec.rb b/spec/events/projects/project_deleted_event_spec.rb
index fd8cec7271b..c3de2b22224 100644
--- a/spec/events/projects/project_deleted_event_spec.rb
+++ b/spec/events/projects/project_deleted_event_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Projects::ProjectDeletedEvent do
where(:data, :valid) do
[
[{ project_id: 1, namespace_id: 2 }, true],
+ [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
[{ project_id: 1 }, false],
[{ namespace_id: 1 }, false],
[{ project_id: 'foo', namespace_id: 2 }, false],
diff --git a/spec/events/projects/project_path_changed_event_spec.rb b/spec/events/projects/project_path_changed_event_spec.rb
new file mode 100644
index 00000000000..a157428de04
--- /dev/null
+++ b/spec/events/projects/project_path_changed_event_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ProjectPathChangedEvent do
+ where(:data, :valid) do
+ valid_event = {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ old_path: 'old',
+ new_path: 'new'
+ }
+
+ # All combinations of missing keys
+ with_missing_keys = 0.upto(valid_event.size - 1)
+ .flat_map { |size| valid_event.keys.combination(size).to_a }
+ .map { |keys| [valid_event.slice(*keys), false] }
+
+ [
+ [valid_event, true],
+ *with_missing_keys,
+ [{ project_id: 'foo', namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: 'foo' }, false],
+ [{ project_id: [], namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: [] }, false],
+ [{ project_id: {}, namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: {} }, false],
+ ['foo', false],
+ [123, false],
+ [[], false]
+ ]
+ end
+
+ with_them do
+ it 'validates data' do
+ constructor = -> { described_class.new(data: data) }
+
+ if valid
+ expect { constructor.call }.not_to raise_error
+ else
+ expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+end
diff --git a/spec/factories/ci/runner_versions.rb b/spec/factories/ci/runner_versions.rb
new file mode 100644
index 00000000000..69127aa6e54
--- /dev/null
+++ b/spec/factories/ci/runner_versions.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_runner_version, class: 'Ci::RunnerVersion' do
+ sequence(:version) { |n| "1.0.#{n}" }
+ end
+end
diff --git a/spec/factories/ci/stages.rb b/spec/factories/ci/stages.rb
index 4751c04584e..41297b01f92 100644
--- a/spec/factories/ci/stages.rb
+++ b/spec/factories/ci/stages.rb
@@ -1,24 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_stage, class: 'Ci::LegacyStage' do
- skip_create
-
- transient do
- name { 'test' }
- status { nil }
- warnings { nil }
- pipeline factory: :ci_empty_pipeline
- end
-
- initialize_with do
- Ci::LegacyStage.new(pipeline, name: name,
- status: status,
- warnings: warnings)
- end
- end
-
- factory :ci_stage_entity, class: 'Ci::Stage' do
+ factory :ci_stage, class: 'Ci::Stage' do
project factory: :project
pipeline factory: :ci_empty_pipeline
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index 919b45e57e2..6a21df943f5 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -103,10 +103,6 @@ FactoryBot.define do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_elastic_stack, class: 'Clusters::Applications::ElasticStack' do
- cluster factory: %i(cluster with_installed_helm provided_by_gcp)
- end
-
factory :clusters_applications_crossplane, class: 'Clusters::Applications::Crossplane' do
stack { 'gcp' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index 7666533691e..72424a3c321 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -100,7 +100,6 @@ FactoryBot.define do
application_runner factory: %i(clusters_applications_runner installed)
application_jupyter factory: %i(clusters_applications_jupyter installed)
application_knative factory: %i(clusters_applications_knative installed)
- application_elastic_stack factory: %i(clusters_applications_elastic_stack installed)
application_cilium factory: %i(clusters_applications_cilium installed)
end
diff --git a/spec/factories/clusters/integrations/elastic_stack.rb b/spec/factories/clusters/integrations/elastic_stack.rb
deleted file mode 100644
index 1ab3256845b..00000000000
--- a/spec/factories/clusters/integrations/elastic_stack.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :clusters_integrations_elastic_stack, class: 'Clusters::Integrations::ElasticStack' do
- cluster factory: %i(cluster provided_by_gcp)
- enabled { true }
-
- trait :disabled do
- enabled { false }
- end
- end
-end
diff --git a/spec/factories/error_tracking/open_api.rb b/spec/factories/error_tracking/open_api.rb
new file mode 100644
index 00000000000..ad134701fd0
--- /dev/null
+++ b/spec/factories/error_tracking/open_api.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :error_tracking_open_api_error, class: 'ErrorTrackingOpenAPI::Error' do
+ fingerprint { 1 }
+ project_id { 2 }
+ name { 'ActionView::MissingTemplate' }
+ description { 'Missing template posts/edit' }
+ actor { 'PostsController#edit' }
+ event_count { 3 }
+ approximated_user_count { 4 }
+ first_seen_at { Time.now.iso8601 }
+ last_seen_at { Time.now.iso8601 }
+ status { 'unresolved' }
+
+ skip_create
+ end
+
+ factory :error_tracking_open_api_error_event, class: 'ErrorTrackingOpenAPI::ErrorEvent' do
+ fingerprint { 1 }
+ project_id { 2 }
+ payload { File.read(Rails.root.join('spec/fixtures/error_tracking/parsed_event.json')) }
+ name { 'ActionView::MissingTemplate' }
+ description { 'Missing template posts/edit' }
+ actor { 'PostsController#edit' }
+ environment { 'development' }
+ platform { 'ruby' }
+
+ trait :golang do
+ payload { File.read(Rails.root.join('spec/fixtures/error_tracking/go_parsed_event.json')) }
+ platform { 'go' }
+ end
+
+ trait :browser do
+ payload { File.read(Rails.root.join('spec/fixtures/error_tracking/browser_event.json')) }
+ platform { 'javascript' }
+ end
+
+ skip_create
+ end
+end
diff --git a/spec/factories/gitlab/database/postgres_autovacuum_activity.rb b/spec/factories/gitlab/database/postgres_autovacuum_activity.rb
new file mode 100644
index 00000000000..a770da19a43
--- /dev/null
+++ b/spec/factories/gitlab/database/postgres_autovacuum_activity.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :postgres_autovacuum_activity, class: 'Gitlab::Database::PostgresAutovacuumActivity' do
+ table_identifier { "#{schema}.#{table}" }
+ schema { 'public' }
+ table { 'projects' }
+ vacuum_start { Time.zone.now - 3.minutes }
+ end
+end
diff --git a/spec/factories/import_states.rb b/spec/factories/import_states.rb
index 4dca78b1059..0c73082be57 100644
--- a/spec/factories/import_states.rb
+++ b/spec/factories/import_states.rb
@@ -34,6 +34,10 @@ FactoryBot.define do
status { :failed }
end
+ trait :canceled do
+ status { :canceled }
+ end
+
after(:create) do |import_state, evaluator|
columns = {}
columns[:import_url] = evaluator.import_url unless evaluator.import_url.blank?
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index 3945637c2c3..5ac26b7a260 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -233,7 +233,7 @@ FactoryBot.define do
factory :harbor_integration, class: 'Integrations::Harbor' do
project
active { true }
- type { 'HarborService' }
+ type { 'Integrations::Harbor' }
url { 'https://demo.goharbor.io' }
project_name { 'testproject' }
diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb
index a7478ce2657..f6f06a99494 100644
--- a/spec/factories/keys.rb
+++ b/spec/factories/keys.rb
@@ -3,8 +3,13 @@
FactoryBot.define do
factory :key do
title
- key { SSHData::PrivateKey::RSA.generate(1024, unsafe_allow_small_key: true).public_key.openssh(comment: 'dummy@gitlab.com') }
-
+ key do
+ # Larger keys take longer to generate, and since this factory gets called frequently,
+ # let's only create the smallest one we need.
+ SSHData::PrivateKey::RSA.generate(
+ ::Gitlab::SSHPublicKey.supported_sizes(:rsa).min, unsafe_allow_small_key: true
+ ).public_key.openssh(comment: 'dummy@gitlab.com')
+ end
trait :expired do
to_create { |key| key.save!(validate: false) }
expires_at { 2.days.ago }
@@ -15,8 +20,14 @@ FactoryBot.define do
expires_at { Date.today.beginning_of_day + 3.hours }
end
+ trait :without_md5_fingerprint do
+ after(:create) do |key|
+ key.update_column(:fingerprint, nil)
+ end
+ end
+
factory :key_without_comment do
- key { SSHData::PrivateKey::RSA.generate(1024, unsafe_allow_small_key: true).public_key.openssh }
+ key { SSHData::PrivateKey::RSA.generate(3072, unsafe_allow_small_key: true).public_key.openssh }
end
factory :deploy_key, class: 'DeployKey'
diff --git a/spec/factories/oauth_access_tokens.rb b/spec/factories/oauth_access_tokens.rb
index 8d1075dacbb..8fd8aef9b49 100644
--- a/spec/factories/oauth_access_tokens.rb
+++ b/spec/factories/oauth_access_tokens.rb
@@ -5,6 +5,7 @@ FactoryBot.define do
resource_owner
application
token { Doorkeeper::OAuth::Helpers::UniqueToken.generate }
+ refresh_token { Doorkeeper::OAuth::Helpers::UniqueToken.generate }
scopes { application.scopes }
end
end
diff --git a/spec/factories/project_group_links.rb b/spec/factories/project_group_links.rb
index b1b0f04d84c..84c590e3ea1 100644
--- a/spec/factories/project_group_links.rb
+++ b/spec/factories/project_group_links.rb
@@ -13,7 +13,7 @@ FactoryBot.define do
trait(:maintainer) { group_access { Gitlab::Access::MAINTAINER } }
after(:create) do |project_group_link, evaluator|
- project_group_link.group.refresh_members_authorized_projects
+ AuthorizedProjectUpdate::ProjectRecalculateService.new(project_group_link.project).execute
end
end
end
diff --git a/spec/factories/project_hooks.rb b/spec/factories/project_hooks.rb
index e0b61526ba0..dbb5c357acb 100644
--- a/spec/factories/project_hooks.rb
+++ b/spec/factories/project_hooks.rb
@@ -7,7 +7,7 @@ FactoryBot.define do
project
trait :token do
- token { SecureRandom.hex(10) }
+ token { generate(:token) }
end
trait :all_events_enabled do
@@ -29,5 +29,9 @@ FactoryBot.define do
trait :with_push_branch_filter do
push_events_branch_filter { 'my-branch-*' }
end
+
+ trait :permanently_disabled do
+ recent_failures { WebHook::FAILURE_THRESHOLD + 1 }
+ end
end
end
diff --git a/spec/factories/project_tracing_settings.rb b/spec/factories/project_tracing_settings.rb
deleted file mode 100644
index 05c1529c18e..00000000000
--- a/spec/factories/project_tracing_settings.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :project_tracing_setting do
- project
- external_url { 'https://example.com' }
- end
-end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 86321350962..d60512e2b2a 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -98,7 +98,9 @@ FactoryBot.define do
project.add_owner(project.first_owner)
end
- project.group&.refresh_members_authorized_projects
+ if project.group
+ AuthorizedProjectUpdate::ProjectRecalculateService.new(project).execute
+ end
# assign the delegated `#ci_cd_settings` attributes after create
project.group_runners_enabled = evaluator.group_runners_enabled unless evaluator.group_runners_enabled.nil?
@@ -151,6 +153,10 @@ FactoryBot.define do
import_status { :failed }
end
+ trait :import_canceled do
+ import_status { :canceled }
+ end
+
trait :jira_dvcs_cloud do
before(:create) do |project|
create(:project_feature_usage, :dvcs_cloud, project: project)
@@ -328,9 +334,10 @@ FactoryBot.define do
trait :test_repo do
after :create do |project|
- TestEnv.copy_repo(project,
- bare_repo: TestEnv.factory_repo_path_bare,
- refs: TestEnv::BRANCH_SHA)
+ # There are various tests that rely on there being no repository cache.
+ # Using raw avoids caching.
+ repo = Gitlab::GlRepository::PROJECT.repository_for(project).raw
+ repo.create_from_bundle(TestEnv.factory_repo_bundle_path)
end
end
@@ -428,9 +435,10 @@ FactoryBot.define do
path { 'forked-gitlabhq' }
after :create do |project|
- TestEnv.copy_repo(project,
- bare_repo: TestEnv.forked_repo_path_bare,
- refs: TestEnv::FORKED_BRANCH_SHA)
+ # There are various tests that rely on there being no repository cache.
+ # Using raw avoids caching.
+ repo = Gitlab::GlRepository::PROJECT.repository_for(project).raw
+ repo.create_from_bundle(TestEnv.forked_repo_bundle_path)
end
end
diff --git a/spec/factories/projects/import_export/export_relation.rb b/spec/factories/projects/import_export/export_relation.rb
new file mode 100644
index 00000000000..2b6419dcecb
--- /dev/null
+++ b/spec/factories/projects/import_export/export_relation.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_relation_export, class: 'Projects::ImportExport::RelationExport' do
+ project_export_job factory: :project_export_job
+
+ relation { 'labels' }
+ status { 0 }
+ sequence(:jid) { |n| "project_relation_export_#{n}" }
+ end
+end
diff --git a/spec/factories/sequences.rb b/spec/factories/sequences.rb
index c10fab8588d..fd7f9223965 100644
--- a/spec/factories/sequences.rb
+++ b/spec/factories/sequences.rb
@@ -22,4 +22,5 @@ FactoryBot.define do
sequence(:job_name) { |n| "job #{n}" }
sequence(:work_item_type_name) { |n| "bug#{n}" }
sequence(:short_text) { |n| "someText#{n}" }
+ sequence(:token) { SecureRandom.hex(10) }
end
diff --git a/spec/factories/snippets.rb b/spec/factories/snippets.rb
index 21e1d911f96..75e2ef3db02 100644
--- a/spec/factories/snippets.rb
+++ b/spec/factories/snippets.rb
@@ -9,6 +9,10 @@ FactoryBot.define do
file_name { generate(:filename) }
secret { false }
+ transient do
+ repository_storage { 'default' }
+ end
+
trait :public do
visibility_level { Snippet::PUBLIC }
end
@@ -23,12 +27,13 @@ FactoryBot.define do
# Test repository - https://gitlab.com/gitlab-org/gitlab-test
trait :repository do
- after :create do |snippet|
- TestEnv.copy_repo(snippet,
- bare_repo: TestEnv.factory_repo_path_bare,
- refs: TestEnv::BRANCH_SHA)
+ after :create do |snippet, evaluator|
+ snippet.track_snippet_repository(evaluator.repository_storage)
- snippet.track_snippet_repository(snippet.repository.storage)
+ # There are various tests that rely on there being no repository cache.
+ # Using raw avoids caching.
+ repo = Gitlab::GlRepository::SNIPPET.repository_for(snippet).raw
+ repo.create_from_bundle(TestEnv.factory_repo_bundle_path)
end
end
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 316e0c2b8d6..0e944b90d0c 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -59,9 +59,6 @@ FactoryBot.define do
create(:alert_management_http_integration, project: projects[0], name: 'DataCat')
create(:alert_management_http_integration, :inactive, project: projects[1], name: 'DataFox')
- # Tracing
- create(:project_tracing_setting, project: projects[0])
-
# Alert Issues
create(:alert_management_alert, issue: issues[0], project: projects[0])
create(:alert_management_alert, issue: alert_bot_issues[0], project: projects[0])
@@ -86,7 +83,6 @@ FactoryBot.define do
# Cluster Integrations
create(:clusters_integrations_prometheus, cluster: gcp_cluster)
- create(:clusters_integrations_elastic_stack, cluster: gcp_cluster)
create(:grafana_integration, project: projects[0], enabled: true)
create(:grafana_integration, project: projects[1], enabled: true)
diff --git a/spec/factories/users/namespace_user_callouts.rb b/spec/factories/users/namespace_user_callouts.rb
new file mode 100644
index 00000000000..fded63d0cce
--- /dev/null
+++ b/spec/factories/users/namespace_user_callouts.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :namespace_callout, class: 'Users::NamespaceCallout' do
+ feature_name { :invite_members_banner }
+
+ user
+ namespace
+ end
+end
diff --git a/spec/factories/work_items.rb b/spec/factories/work_items.rb
index e80aa9cc008..81c9fb6ed87 100644
--- a/spec/factories/work_items.rb
+++ b/spec/factories/work_items.rb
@@ -14,5 +14,10 @@ FactoryBot.define do
issue_type { :task }
association :work_item_type, :default, :task
end
+
+ trait :incident do
+ issue_type { :incident }
+ association :work_item_type, :default, :incident
+ end
end
end
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 2d541a34f62..040c6a65b7c 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Admin Groups' do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
- let_it_be(:current_user) { create(:admin) }
+ let_it_be_with_reload(:current_user) { create(:admin) }
before do
sign_in(current_user)
@@ -231,11 +231,33 @@ RSpec.describe 'Admin Groups' do
it_behaves_like 'adds user into a group' do
let(:user_selector) { user.email }
end
+
+ context 'when membership is set to expire' do
+ it 'renders relative time' do
+ expire_time = Time.current + 2.days
+ current_user.update!(time_display_relative: true)
+ group.add_member(user, Gitlab::Access::REPORTER, expires_at: expire_time)
+
+ visit admin_group_path(group)
+
+ expect(page).to have_content(/Expires in \d day/)
+ end
+
+ it 'renders absolute time' do
+ expire_time = Time.current.tomorrow.middle_of_day
+ current_user.update!(time_display_relative: false)
+ group.add_member(user, Gitlab::Access::REPORTER, expires_at: expire_time)
+
+ visit admin_group_path(group)
+
+ expect(page).to have_content("Expires on #{expire_time.strftime('%b %-d')}")
+ end
+ end
end
describe 'add admin himself to a group' do
before do
- group.add_user(:user, Gitlab::Access::OWNER)
+ group.add_member(:user, Gitlab::Access::OWNER)
end
it 'adds admin a to a group as developer', :js do
@@ -252,7 +274,7 @@ RSpec.describe 'Admin Groups' do
describe 'admin removes themself from a group', :js do
it 'removes admin from the group' do
- group.add_user(current_user, Gitlab::Access::DEVELOPER)
+ group.add_member(current_user, Gitlab::Access::DEVELOPER)
visit group_group_members_path(group)
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index 2166edf65ff..6b147b01991 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -7,15 +7,37 @@ RSpec.describe "Admin::Projects" do
include Spec::Support::Helpers::Features::InviteMembersModalHelper
include Spec::Support::Helpers::ModalHelpers
- let(:user) { create :user }
- let(:project) { create(:project, :with_namespace_settings) }
- let(:current_user) { create(:admin) }
+ let_it_be_with_reload(:user) { create :user }
+ let_it_be_with_reload(:project) { create(:project, :with_namespace_settings) }
+ let_it_be_with_reload(:current_user) { create(:admin) }
before do
sign_in(current_user)
gitlab_enable_admin_mode_sign_in(current_user)
end
+ describe 'when membership is set to expire', :js do
+ it 'renders relative time' do
+ expire_time = Time.current + 2.days
+ current_user.update!(time_display_relative: true)
+ project.add_member(user, Gitlab::Access::REPORTER, expires_at: expire_time)
+
+ visit admin_project_path(project)
+
+ expect(page).to have_content(/Expires in \d day/)
+ end
+
+ it 'renders absolute time' do
+ expire_time = Time.current.tomorrow.middle_of_day
+ current_user.update!(time_display_relative: false)
+ project.add_member(user, Gitlab::Access::REPORTER, expires_at: expire_time)
+
+ visit admin_project_path(project)
+
+ expect(page).to have_content("Expires on #{expire_time.strftime('%b %-d')}")
+ end
+ end
+
describe "GET /admin/projects" do
let!(:archived_project) { create :project, :public, :archived }
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index d312965f6cf..44fd21e510a 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe "Admin Runners" do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) { create(:project, namespace: namespace, creator: user) }
- context "runners registration" do
+ describe "runners registration" do
before do
visit admin_runners_path
end
@@ -164,7 +164,9 @@ RSpec.describe "Admin Runners" do
end
describe 'filter by status' do
- let!(:never_contacted) { create(:ci_runner, :instance, description: 'runner-never-contacted', contacted_at: nil) }
+ let!(:never_contacted) do
+ create(:ci_runner, :instance, description: 'runner-never-contacted', contacted_at: nil)
+ end
before do
create(:ci_runner, :instance, description: 'runner-1', contacted_at: Time.zone.now)
@@ -326,13 +328,15 @@ RSpec.describe "Admin Runners" do
visit admin_runners_path
page.within('[data-testid="runner-type-tabs"]') do
click_on 'Instance'
-
- expect(page).to have_link('Instance', class: 'active')
end
end
it_behaves_like 'shows no runners found'
+ it 'shows active tab' do
+ expect(page).to have_link('Instance', class: 'active')
+ end
+
it 'shows no runner' do
expect(page).not_to have_content 'runner-project'
expect(page).not_to have_content 'runner-group'
@@ -402,8 +406,8 @@ RSpec.describe "Admin Runners" do
end
it 'sorts by last contact date' do
- create(:ci_runner, :instance, description: 'runner-1', created_at: '2018-07-12 15:37', contacted_at: '2018-07-12 15:37')
- create(:ci_runner, :instance, description: 'runner-2', created_at: '2018-07-12 16:37', contacted_at: '2018-07-12 16:37')
+ create(:ci_runner, :instance, description: 'runner-1', contacted_at: '2018-07-12')
+ create(:ci_runner, :instance, description: 'runner-2', contacted_at: '2018-07-13')
visit admin_runners_path
@@ -448,13 +452,13 @@ RSpec.describe "Admin Runners" do
it 'updates ACTIVE runner status to paused=false' do
visit admin_runners_path('status[]': 'ACTIVE')
- expect(page).to have_current_path(admin_runners_path('paused[]': 'false') )
+ expect(page).to have_current_path(admin_runners_path('paused[]': 'false'))
end
it 'updates PAUSED runner status to paused=true' do
visit admin_runners_path('status[]': 'PAUSED')
- expect(page).to have_current_path(admin_runners_path('paused[]': 'true') )
+ expect(page).to have_current_path(admin_runners_path('paused[]': 'true'))
end
end
end
@@ -477,7 +481,9 @@ RSpec.describe "Admin Runners" do
describe 'runner show page breadcrumbs' do
it 'contains the current runner id and token' do
page.within '[data-testid="breadcrumb-links"]' do
- expect(page.find('[data-testid="breadcrumb-current-link"]')).to have_link("##{runner.id} (#{runner.short_sha})")
+ expect(page.find('[data-testid="breadcrumb-current-link"]')).to have_link(
+ "##{runner.id} (#{runner.short_sha})"
+ )
end
end
end
@@ -515,16 +521,16 @@ RSpec.describe "Admin Runners" do
describe "Runner edit page" do
let(:runner) { create(:ci_runner, :project) }
+ let!(:project1) { create(:project) }
+ let!(:project2) { create(:project) }
before do
- @project1 = create(:project)
- @project2 = create(:project)
visit edit_admin_runner_path(runner)
wait_for_requests
end
- describe 'runner edit page breadcrumbs' do
+ describe 'breadcrumbs' do
it 'contains the current runner id and token' do
page.within '[data-testid="breadcrumb-links"]' do
expect(page).to have_link("##{runner.id} (#{runner.short_sha})")
@@ -539,7 +545,7 @@ RSpec.describe "Admin Runners" do
end
end
- describe 'when a runner is updated', :js do
+ context 'when a runner is updated', :js do
before do
click_on _('Save changes')
wait_for_requests
@@ -556,21 +562,21 @@ RSpec.describe "Admin Runners" do
describe 'projects' do
it 'contains project names' do
- expect(page).to have_content(@project1.full_name)
- expect(page).to have_content(@project2.full_name)
+ expect(page).to have_content(project1.full_name)
+ expect(page).to have_content(project2.full_name)
end
end
describe 'search' do
before do
search_form = find('#runner-projects-search')
- search_form.fill_in 'search', with: @project1.name
+ search_form.fill_in 'search', with: project1.name
search_form.click_button 'Search'
end
it 'contains name of correct project' do
- expect(page).to have_content(@project1.full_name)
- expect(page).not_to have_content(@project2.full_name)
+ expect(page).to have_content(project1.full_name)
+ expect(page).not_to have_content(project2.full_name)
end
end
@@ -584,12 +590,12 @@ RSpec.describe "Admin Runners" do
assigned_project = page.find('[data-testid="assigned-projects"]')
expect(page).to have_content('Runner assigned to project.')
- expect(assigned_project).to have_content(@project2.path)
+ expect(assigned_project).to have_content(project2.path)
end
end
context 'with specific runner' do
- let(:runner) { create(:ci_runner, :project, projects: [@project1]) }
+ let(:runner) { create(:ci_runner, :project, projects: [project1]) }
before do
visit edit_admin_runner_path(runner)
@@ -599,7 +605,7 @@ RSpec.describe "Admin Runners" do
end
context 'with locked runner' do
- let(:runner) { create(:ci_runner, :project, projects: [@project1], locked: true) }
+ let(:runner) { create(:ci_runner, :project, projects: [project1], locked: true) }
before do
visit edit_admin_runner_path(runner)
@@ -610,7 +616,7 @@ RSpec.describe "Admin Runners" do
end
describe 'disable/destroy' do
- let(:runner) { create(:ci_runner, :project, projects: [@project1]) }
+ let(:runner) { create(:ci_runner, :project, projects: [project1]) }
before do
visit edit_admin_runner_path(runner)
@@ -624,7 +630,7 @@ RSpec.describe "Admin Runners" do
new_runner_project = page.find('[data-testid="unassigned-projects"]')
expect(page).to have_content('Runner unassigned from project.')
- expect(new_runner_project).to have_content(@project1.path)
+ expect(new_runner_project).to have_content(project1.path)
end
end
end
diff --git a/spec/features/admin/admin_sees_background_migrations_spec.rb b/spec/features/admin/admin_sees_background_migrations_spec.rb
index 8edddcf9a9b..faf13374719 100644
--- a/spec/features/admin/admin_sees_background_migrations_spec.rb
+++ b/spec/features/admin/admin_sees_background_migrations_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe "Admin > Admin sees background migrations" do
let_it_be(:admin) { create(:admin) }
+ let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
let_it_be(:active_migration) { create(:batched_background_migration, :active, table_name: 'active') }
let_it_be(:failed_migration) { create(:batched_background_migration, :failed, table_name: 'failed', total_tuple_count: 100) }
@@ -107,7 +108,8 @@ RSpec.describe "Admin > Admin sees background migrations" do
anything,
batch_min_value: 6,
batch_size: 5,
- job_arguments: failed_migration.job_arguments
+ job_arguments: failed_migration.job_arguments,
+ job_class: job_class
).and_return([6, 10])
end
end
diff --git a/spec/features/admin/admin_system_info_spec.rb b/spec/features/admin/admin_system_info_spec.rb
index 2225f25aa1e..8ff31dfded7 100644
--- a/spec/features/admin/admin_system_info_spec.rb
+++ b/spec/features/admin/admin_system_info_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Admin System Info' do
expect(page).to have_content 'CPU 2 cores'
expect(page).to have_content 'Memory Usage 4 GB / 16 GB'
expect(page).to have_content 'Disk Usage'
- expect(page).to have_content 'Uptime'
+ expect(page).to have_content 'System started'
end
end
@@ -39,7 +39,7 @@ RSpec.describe 'Admin System Info' do
expect(page).to have_content 'CPU Unable to collect CPU info'
expect(page).to have_content 'Memory Usage 4 GB / 16 GB'
expect(page).to have_content 'Disk Usage'
- expect(page).to have_content 'Uptime'
+ expect(page).to have_content 'System started'
end
end
@@ -54,7 +54,7 @@ RSpec.describe 'Admin System Info' do
expect(page).to have_content 'CPU 2 cores'
expect(page).to have_content 'Memory Usage Unable to collect memory info'
expect(page).to have_content 'Disk Usage'
- expect(page).to have_content 'Uptime'
+ expect(page).to have_content 'System started'
end
end
end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index 18bb03f4617..bc88b90a2dd 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -372,8 +372,8 @@ RSpec.describe 'Admin::Users::User' do
describe 'show user keys', :js do
it do
- key1 = create(:key, user: user, title: 'ssh-rsa Key1', key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4FIEBXGi4bPU8kzxMefudPIJ08/gNprdNTaO9BR/ndy3+58s2HCTw2xCHcsuBmq+TsAqgEidVq4skpqoTMB+Uot5Uzp9z4764rc48dZiI661izoREoKnuRQSsRqUTHg5wrLzwxlQbl1MVfRWQpqiz/5KjBC7yLEb9AbusjnWBk8wvC1bQPQ1uLAauEA7d836tgaIsym9BrLsMVnR4P1boWD3Xp1B1T/ImJwAGHvRmP/ycIqmKdSpMdJXwxcb40efWVj0Ibbe7ii9eeoLdHACqevUZi6fwfbymdow+FeqlkPoHyGg3Cu4vD/D8+8cRc7mE/zGCWcQ15Var83Tczour Key1')
- key2 = create(:key, user: user, title: 'ssh-rsa Key2', key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDQSTWXhJAX/He+nG78MiRRRn7m0Pb0XbcgTxE0etArgoFoh9WtvDf36HG6tOSg/0UUNcp0dICsNAmhBKdncp6cIyPaXJTURPRAGvhI0/VDk4bi27bRnccGbJ/hDaUxZMLhhrzY0r22mjVf8PF6dvv5QUIQVm1/LeaWYsHHvLgiIjwrXirUZPnFrZw6VLREoBKG8uWvfSXw1L5eapmstqfsME8099oi+vWLR8MgEysZQmD28M73fgW4zek6LDQzKQyJx9nB+hJkKUDvcuziZjGmRFlNgSA2mguERwL1OXonD8WYUrBDGKroIvBT39zS5d9tQDnidEJZ9Y8gv5ViYP7x Key2')
+ key1 = create(:key, user: user, title: 'ssh-rsa Key1')
+ key2 = create(:key, user: user, title: 'ssh-rsa Key2')
visit admin_user_path(user)
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index adb43d60306..e02cd182b2c 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -60,6 +60,21 @@ RSpec.describe 'Dashboard Todos' do
end
end
+ context 'when todo references an issue of type task' do
+ let(:task) { create(:issue, :task, project: project) }
+ let!(:task_todo) { create(:todo, :mentioned, user: user, project: project, target: task, author: author) }
+
+ before do
+ sign_in(user)
+
+ visit dashboard_todos_path
+ end
+
+ it 'displays the correct issue type name' do
+ expect(page).to have_content('mentioned you on task')
+ end
+ end
+
context 'user has an unauthorized todo' do
before do
sign_in(user)
@@ -85,6 +100,10 @@ RSpec.describe 'Dashboard Todos' do
visit dashboard_todos_path
end
+ it 'displays the correct issue type name' do
+ expect(page).to have_content('mentioned you on issue')
+ end
+
it 'has todo present' do
expect(page).to have_selector('.todos-list .todo', count: 1)
end
diff --git a/spec/features/file_uploads/multipart_invalid_uploads_spec.rb b/spec/features/file_uploads/multipart_invalid_uploads_spec.rb
index 91c8e100e6a..cff8b4e61a5 100644
--- a/spec/features/file_uploads/multipart_invalid_uploads_spec.rb
+++ b/spec/features/file_uploads/multipart_invalid_uploads_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe 'Invalid uploads that must be rejected', :api, :js do
# These keys are rejected directly by rack itself.
# The request will not be received by multipart.rb (can't use the 'handling file uploads' shared example)
- it_behaves_like 'rejecting invalid keys', key_name: 'x' * 11000, message: 'Puma caught this error: exceeded available parameter key space (RangeError)'
+ it_behaves_like 'rejecting invalid keys', key_name: 'x' * 11000, message: 'Puma caught this error: exceeded available parameter key space (Rack::QueryParser::ParamsTooDeepError)'
it_behaves_like 'rejecting invalid keys', key_name: 'package[]test', status: 400, message: 'Bad Request'
it_behaves_like 'handling file uploads', 'by rejecting uploads with an invalid key'
diff --git a/spec/features/groups/group_runners_spec.rb b/spec/features/groups/group_runners_spec.rb
index a60b8a60da0..a129db6cb6f 100644
--- a/spec/features/groups/group_runners_spec.rb
+++ b/spec/features/groups/group_runners_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe "Group Runners" do
describe "Group runners page", :js do
let!(:group_registration_token) { group.runners_token }
- context "runners registration" do
+ describe "runners registration" do
before do
visit group_runners_path(group)
end
@@ -128,7 +128,7 @@ RSpec.describe "Group Runners" do
end
end
- context 'filtered search' do
+ describe 'filtered search' do
before do
visit group_runners_path(group)
end
@@ -182,5 +182,45 @@ RSpec.describe "Group Runners" do
end
end
end
+
+ context 'when group_runner_view_ui is enabled' do
+ before do
+ stub_feature_flags(group_runner_view_ui: true)
+ end
+
+ it 'user views runner details' do
+ visit group_runner_path(group, runner)
+
+ expect(page).to have_content "#{s_('Runners|Description')} runner-foo"
+ end
+
+ it 'user edits the runner to be protected' do
+ visit edit_group_runner_path(group, runner)
+
+ expect(page.find_field('runner[access_level]')).not_to be_checked
+
+ check 'runner_access_level'
+ click_button _('Save changes')
+
+ expect(page).to have_content "#{s_('Runners|Configuration')} #{s_('Runners|Protected')}"
+ end
+
+ context 'when a runner has a tag' do
+ before do
+ runner.update!(tag_list: ['tag'])
+ end
+
+ it 'user edits runner not to run untagged jobs' do
+ visit edit_group_runner_path(group, runner)
+
+ page.find_field('runner[tag_list]').set('tag, tag2')
+
+ uncheck 'runner_run_untagged'
+ click_button _('Save changes')
+
+ expect(page).to have_content "#{s_('Runners|Tags')} tag tag2"
+ end
+ end
+ end
end
end
diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb
index 019b094ccb5..2f599d24b01 100644
--- a/spec/features/groups/group_settings_spec.rb
+++ b/spec/features/groups/group_settings_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe 'Edit group settings' do
it 'shows the selection menu' do
visit edit_group_path(group)
- expect(page).to have_content('Allowed to create projects')
+ expect(page).to have_content('Roles allowed to create projects')
end
end
@@ -97,7 +97,7 @@ RSpec.describe 'Edit group settings' do
it 'shows the selection menu' do
visit edit_group_path(group)
- expect(page).to have_content('Allowed to create subgroups')
+ expect(page).to have_content('Roles allowed to create subgroups')
end
end
diff --git a/spec/features/groups/import_export/import_file_spec.rb b/spec/features/groups/import_export/import_file_spec.rb
index 3d23451feef..b69b8bf2c19 100644
--- a/spec/features/groups/import_export/import_file_spec.rb
+++ b/spec/features/groups/import_export/import_file_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Import/Export - Group Import', :js do
visit new_group_path
click_link 'Import group'
- fill_in :import_group_name, with: group_name
+ fill_in s_('Groups|Group name'), with: group_name
expect(page).to have_content 'Import group from file'
attach_file(file) do
@@ -41,10 +41,12 @@ RSpec.describe 'Import/Export - Group Import', :js do
group = Group.find_by(name: group_name)
- expect(group).not_to be_nil
- expect(group.description).to eq 'A voluptate non sequi temporibus quam at.'
- expect(group.path).to eq 'test-group-import'
- expect(group.import_state.status).to eq GroupImportState.state_machine.states[:finished].value
+ aggregate_failures do
+ expect(group).not_to be_nil
+ expect(group.description).to eq 'A voluptate non sequi temporibus quam at.'
+ expect(group.path).to eq 'test-group-import'
+ expect(group.import_state.status).to eq GroupImportState.state_machine.states[:finished].value
+ end
end
end
@@ -53,9 +55,9 @@ RSpec.describe 'Import/Export - Group Import', :js do
visit new_group_path
click_link 'Import group'
- fill_in :import_group_name, with: 'Test Group Import'
+ fill_in s_('Groups|Group name'), with: 'Test Group Import'
- fill_in :import_group_path, with: 'custom-path'
+ fill_in s_('Groups|Group URL'), with: 'custom-path'
attach_file(file) do
find('.js-filepicker-button').click
end
@@ -76,8 +78,10 @@ RSpec.describe 'Import/Export - Group Import', :js do
visit new_group_path
click_link 'Import group'
- fill_in :import_group_path, with: 'test-group-import'
- expect(page).to have_content "Group path is already taken. We've suggested one that is available."
+ fill_in s_('Groups|Group URL'), with: 'test-group-import'
+ expect(page).to have_content s_(
+ 'Groups|Group path is unavailable. Path has been replaced with a suggested available path.'
+ )
end
end
end
@@ -89,7 +93,7 @@ RSpec.describe 'Import/Export - Group Import', :js do
visit new_group_path
click_link 'Import group'
- fill_in :import_group_name, with: 'Test Group Import'
+ fill_in s_('Groups|Group name'), with: 'Test Group Import'
attach_file(file) do
find('.js-filepicker-button').click
end
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 7541e54f014..be1db970e9d 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Group merge requests page' do
expect(page).to have_selector('.empty-state')
expect(page).to have_link('Select project to create merge request')
- expect(page).not_to have_selector('.issues-filters')
+ expect(page).to have_selector('.issues-filters')
end
context 'with no open merge requests' do
diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb
index d3141da9160..98dc534f54e 100644
--- a/spec/features/groups/settings/packages_and_registries_spec.rb
+++ b/spec/features/groups/settings/packages_and_registries_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Group Packages & Registries settings' do
sign_in(user)
end
- context 'when packges feature is disabled on the group' do
+ context 'when packages feature is disabled on the group' do
before do
stub_packages_setting(enabled: false)
end
@@ -56,26 +56,26 @@ RSpec.describe 'Group Packages & Registries settings' do
expect(sidebar).to have_link _('Packages & Registries')
end
- it 'has a Package Registry section', :js do
+ it 'has a Duplicate packages section', :js do
visit_settings_page
- expect(page).to have_content('Package Registry')
- expect(page).to have_button('Collapse')
+ expect(page).to have_content('Duplicate packages')
end
it 'automatically saves changes to the server', :js do
visit_settings_page
within '[data-testid="maven-settings"]' do
- expect(page).to have_content('Allow duplicates')
+ expect(page).to have_content('Reject packages with the same name and version')
+ expect(page).not_to have_content('Exceptions')
find('.gl-toggle').click
- expect(page).to have_content('Do not allow duplicates')
+ expect(page).to have_content('Exceptions')
visit_settings_page
- expect(page).to have_content('Do not allow duplicates')
+ expect(page).to have_content('Exceptions')
end
end
@@ -83,12 +83,10 @@ RSpec.describe 'Group Packages & Registries settings' do
visit_settings_page
within '[data-testid="maven-settings"]' do
- expect(page).to have_content('Allow duplicates')
+ expect(page).to have_content('Reject packages with the same name and version')
find('.gl-toggle').click
- expect(page).to have_content('Do not allow duplicates')
-
fill_in 'Exceptions', with: ')'
# simulate blur event
@@ -103,11 +101,11 @@ RSpec.describe 'Group Packages & Registries settings' do
visit_sub_group_settings_page
within '[data-testid="maven-settings"]' do
- expect(page).to have_content('Allow duplicates')
+ expect(page).to have_content('Reject packages with the same name and version')
find('.gl-toggle').click
- expect(page).to have_content('Do not allow duplicates')
+ expect(page).to have_content('Exceptions')
end
end
end
diff --git a/spec/features/groups/settings/user_searches_in_settings_spec.rb b/spec/features/groups/settings/user_searches_in_settings_spec.rb
index c7b7b25caa7..998c3d2ca3f 100644
--- a/spec/features/groups/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/groups/settings/user_searches_in_settings_spec.rb
@@ -48,6 +48,6 @@ RSpec.describe 'User searches group settings', :js do
visit group_settings_packages_and_registries_path(group)
end
- it_behaves_like 'can highlight results', 'Use GitLab as a private registry'
+ it_behaves_like 'can highlight results', 'Allow packages with the same name and version'
end
end
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index fa8db1befb5..9a1e216c6d2 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -97,6 +97,31 @@ RSpec.describe 'Group show page' do
end
end
+ context 'when a public project is shared with a private group' do
+ let_it_be(:private_group) { create(:group, :private) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:project_group_link) { create(:project_group_link, group: private_group, project: public_project) }
+
+ before do
+ private_group.add_owner(user)
+ sign_in(user)
+ end
+
+ it 'shows warning popover', :js do
+ visit group_path(private_group)
+
+ click_link _('Shared projects')
+
+ wait_for_requests
+
+ page.within("[data-testid=\"group-overview-item-#{public_project.id}\"]") do
+ click_button _('Less restrictive visibility')
+ end
+
+ expect(page).to have_content _('Project visibility level is less restrictive than the group settings.')
+ end
+ end
+
context 'when user does not have permissions to create new subgroups or projects', :js do
before do
group.add_reporter(user)
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 31390b110e7..ece6167b193 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Group' do
end
describe 'as a non-admin' do
- it 'creates a group and persists visibility radio selection', :js, :saas do
+ it 'creates a group and persists visibility radio selection', :js do
stub_application_setting(default_group_visibility: :private)
fill_in 'Group name', with: 'test-group'
@@ -499,8 +499,6 @@ RSpec.describe 'Group' do
let_it_be_with_refind(:user) { create(:user) }
before do
- stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
-
group.add_owner(user)
sign_in(user)
end
@@ -509,8 +507,8 @@ RSpec.describe 'Group' do
let_it_be(:storage_enforcement_date) { Date.today + 30 }
before do
- allow_next_found_instance_of(Group) do |g|
- allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Group) do |grp|
+ allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
end
@@ -520,8 +518,8 @@ RSpec.describe 'Group' do
end
it 'does not display the banner in a paid group page' do
- allow_next_found_instance_of(Group) do |g|
- allow(g).to receive(:paid?).and_return(true)
+ allow_next_found_instance_of(Group) do |grp|
+ allow(grp).to receive(:paid?).and_return(true)
end
visit group_path(group)
expect_page_not_to_have_storage_enforcement_banner
@@ -531,12 +529,13 @@ RSpec.describe 'Group' do
visit group_path(group)
expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
find('.js-storage-enforcement-banner [data-testid="close-icon"]').click
+ wait_for_requests
page.refresh
expect_page_not_to_have_storage_enforcement_banner
storage_enforcement_date = Date.today + 13
- allow_next_found_instance_of(Group) do |g|
- allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Group) do |grp|
+ allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
page.refresh
expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
@@ -547,6 +546,7 @@ RSpec.describe 'Group' do
# This test should break and be rewritten after the implementation of the storage_enforcement_date
# TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
it 'does not display the banner in the group page' do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
visit group_path(group)
expect_page_not_to_have_storage_enforcement_banner
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index dad3dfd3440..7c24943eb6f 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Incident details', :js do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:incident) { create(:incident, project: project, author: developer, description: 'description') }
+ let_it_be(:issue) { create(:issue, project: project, author: developer, description: 'Issue description') }
let_it_be(:escalation_status) { create(:incident_management_issuable_escalation_status, issue: incident) }
before_all do
@@ -14,23 +15,24 @@ RSpec.describe 'Incident details', :js do
before do
sign_in(developer)
-
- visit project_issues_incident_path(project, incident)
- wait_for_requests
end
context 'when a developer+ displays the incident' do
- it 'shows the incident' do
+ before do
+ visit project_issues_incident_path(project, incident)
+ wait_for_requests
+ end
+
+ it 'shows correct elements on the page', :aggregate_failures do
+ # shows the incident
page.within('.issuable-details') do
expect(find('h1')).to have_content(incident.title)
end
- end
- it 'does not show design management' do
+ # does not show design management
expect(page).not_to have_selector('.js-design-management')
- end
- it 'shows the incident tabs' do
+ # shows the incident tabs
page.within('.issuable-details') do
incident_tabs = find('[data-testid="incident-tabs"]')
@@ -38,9 +40,8 @@ RSpec.describe 'Incident details', :js do
expect(incident_tabs).to have_content('Summary')
expect(incident_tabs).to have_content(incident.description)
end
- end
- it 'shows the right sidebar mounted with type issue' do
+ # shows the right sidebar mounted with type issue
page.within('.layout-page') do
sidebar = find('.right-sidebar')
@@ -51,12 +52,12 @@ RSpec.describe 'Incident details', :js do
end
end
- context 'escalation status' do
+ describe 'escalation status' do
let(:sidebar) { page.find('.right-sidebar') }
let(:widget) { sidebar.find('[data-testid="escalation_status_container"]') }
let(:expected_dropdown_options) { escalation_status.class::STATUSES.keys.take(3).map { |key| key.to_s.titleize } }
- it 'has an interactable escalation status widget' do
+ it 'has an interactable escalation status widget', :aggregate_failures do
expect(current_status).to have_text(escalation_status.status_name.to_s.titleize)
# list the available statuses
@@ -87,41 +88,41 @@ RSpec.describe 'Incident details', :js do
end
end
- context 'when an incident `issue_type` is edited by a signed in user' do
- it 'routes the user to the incident details page when the `issue_type` is set to incident' do
- wait_for_requests
- project_path = "/#{project.full_path}"
- click_button 'Edit title and description'
- wait_for_requests
+ it 'routes the user to the incident details page when the `issue_type` is set to incident' do
+ visit project_issue_path(project, issue)
+ wait_for_requests
+
+ project_path = "/#{project.full_path}"
+ click_button 'Edit title and description'
+ wait_for_requests
- page.within('[data-testid="issuable-form"]') do
- click_button 'Incident'
- click_button 'Issue'
- click_button 'Save changes'
+ page.within('[data-testid="issuable-form"]') do
+ click_button 'Issue'
+ click_button 'Incident'
+ click_button 'Save changes'
- wait_for_requests
+ wait_for_requests
- expect(page).to have_current_path("#{project_path}/-/issues/#{incident.iid}")
- end
+ expect(page).to have_current_path("#{project_path}/-/issues/incident/#{issue.iid}")
end
end
- context 'when incident details are edited by a signed in user' do
- it 'routes the user to the incident details page when the `issue_type` is set to incident' do
- wait_for_requests
- project_path = "/#{project.full_path}"
- click_button 'Edit title and description'
- wait_for_requests
+ it 'routes the user to the issue details page when the `issue_type` is set to issue' do
+ visit project_issues_incident_path(project, incident)
+ wait_for_requests
- page.within('[data-testid="issuable-form"]') do
- click_button 'Incident'
- click_button 'Issue'
- click_button 'Save changes'
+ project_path = "/#{project.full_path}"
+ click_button 'Edit title and description'
+ wait_for_requests
- wait_for_requests
+ page.within('[data-testid="issuable-form"]') do
+ click_button 'Incident'
+ click_button 'Issue'
+ click_button 'Save changes'
- expect(page).to have_current_path("#{project_path}/-/issues/#{incident.iid}")
- end
+ wait_for_requests
+
+ expect(page).to have_current_path("#{project_path}/-/issues/#{incident.iid}")
end
end
end
diff --git a/spec/features/incidents/incident_timeline_events_spec.rb b/spec/features/incidents/incident_timeline_events_spec.rb
new file mode 100644
index 00000000000..e39f348013c
--- /dev/null
+++ b/spec/features/incidents/incident_timeline_events_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Incident timeline events', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ stub_feature_flags(incident_timeline: true)
+ sign_in(developer)
+
+ visit project_issues_incident_path(project, incident)
+ wait_for_requests
+ click_link 'Timeline'
+ end
+
+ context 'when add event is clicked' do
+ it 'submits event data when save is clicked' do
+ click_button 'Add new timeline event'
+
+ expect(page).to have_selector('.common-note-form')
+
+ fill_in 'Description', with: 'Event note goes here'
+ fill_in 'timeline-input-hours', with: '07'
+ fill_in 'timeline-input-minutes', with: '25'
+
+ click_button 'Save'
+
+ expect(page).to have_selector('.incident-timeline-events')
+
+ page.within '.timeline-event-note' do
+ expect(page).to have_content('Event note goes here')
+ expect(page).to have_content('07:25')
+ end
+ end
+ end
+
+ context 'when delete event is clicked' do
+ before do
+ click_button 'Add new timeline event'
+ fill_in 'Description', with: 'Event note to delete'
+ click_button 'Save'
+ end
+
+ it 'shows the confirmation modal and deletes the event' do
+ click_button 'More actions'
+
+ page.within '.gl-new-dropdown-item-text-wrapper' do
+ expect(page).to have_content('Delete')
+ page.find('.gl-new-dropdown-item-text-primary', text: 'Delete').click
+ end
+
+ page.within '.modal' do
+ expect(page).to have_content('Delete event')
+ end
+
+ click_button 'Delete event'
+
+ wait_for_requests
+
+ expect(page).to have_content('No timeline items have been added yet.')
+ end
+ end
+end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 965e97baadd..fe804dc52d7 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
end
- context 'when invite is sent before account is created - ldap or social sign in for manual acceptance edge case' do
+ context 'when invite is sent before account is created - ldap or service sign in for manual acceptance edge case' do
let(:user) { create(:user, email: 'user@example.com') }
context 'when invite clicked and not signed in' do
@@ -221,7 +221,8 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
category: 'RegistrationsController',
action: 'accepted',
label: 'invite_email',
- property: group_invite.id.to_s
+ property: group_invite.id.to_s,
+ user: group_invite.reload.user
)
end
end
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index 0fa2d238b0a..a1e80586c05 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'issuable list', :js do
issuable_types = [:issue, :merge_request]
before do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
sign_in(user)
issuable_types.each { |type| create_issuables(type) }
end
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index 7a367723609..c44181a60e4 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe 'Visual tokens', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- project.add_user(user, :maintainer)
- project.add_user(user_rock, :maintainer)
+ project.add_member(user, :maintainer)
+ project.add_member(user_rock, :maintainer)
sign_in(user)
visit project_issues_path(project)
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
index 1c707466b51..5ba09703852 100644
--- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -20,25 +20,11 @@ RSpec.describe 'User creates branch and merge request on issue page', :js do
context 'when signed in' do
before do
- project.add_user(user, membership_level)
+ project.add_member(user, membership_level)
sign_in(user)
end
- context 'when ’Create merge request’ button is clicked' do
- before do
- visit project_issue_path(project, issue)
-
- wait_for_requests
-
- click_button('Create merge request')
-
- wait_for_requests
- end
-
- it_behaves_like 'merge request author auto assign'
- end
-
context 'when interacting with the dropdown' do
before do
visit project_issue_path(project, issue)
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index f03c812ebb5..fafaea8ac68 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -45,25 +45,13 @@ RSpec.describe 'Merge request > Batch comments', :js do
expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
end
- it 'publishes single comment' do
- write_diff_comment
-
- click_button 'Add comment now'
-
- wait_for_requests
-
- expect(page).not_to have_selector('.draft-note-component', text: 'Line is wrong')
-
- expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
- end
-
it 'deletes draft note' do
write_diff_comment
find('.js-note-delete').click
page.within('.modal') do
- click_button('Delete Comment', match: :first)
+ click_button('Delete comment', match: :first)
end
wait_for_requests
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 06b29969775..f21929e5275 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -253,7 +253,7 @@ RSpec.describe 'User comments on a diff', :js do
end
page.within('.modal') do
- click_button('Delete Comment', match: :first)
+ click_button('Delete comment', match: :first)
end
page.within('.merge-request-tabs') do
diff --git a/spec/features/merge_request/user_creates_merge_request_spec.rb b/spec/features/merge_request/user_creates_merge_request_spec.rb
index 2bf8e9ba6a4..c8b22bb3125 100644
--- a/spec/features/merge_request/user_creates_merge_request_spec.rb
+++ b/spec/features/merge_request/user_creates_merge_request_spec.rb
@@ -15,39 +15,27 @@ RSpec.describe "User creates a merge request", :js do
sign_in(user)
end
- context 'when completed the compare branches form' do
- before do
- visit(project_new_merge_request_path(project))
+ it "creates a merge request" do
+ visit(project_new_merge_request_path(project))
- find(".js-source-branch").click
- click_link("fix")
+ find(".js-source-branch").click
+ click_link("fix")
- find(".js-target-branch").click
- click_link("feature")
+ find(".js-target-branch").click
+ click_link("feature")
- click_button("Compare branches")
- end
+ click_button("Compare branches")
- it "shows merge request form" do
- page.within('.merge-request-form') do
- expect(page.find('#merge_request_description')['placeholder']).to eq 'Describe the goal of the changes and what reviewers should be aware of.'
- end
+ page.within('.merge-request-form') do
+ expect(page.find('#merge_request_description')['placeholder']).to eq 'Describe the goal of the changes and what reviewers should be aware of.'
end
- context "when completed the merge request form" do
- before do
- fill_in("Title", with: title)
- click_button("Create merge request")
- end
+ fill_in("Title", with: title)
+ click_button("Create merge request")
- it "creates a merge request" do
- page.within(".merge-request") do
- expect(page).to have_content(title)
- end
- end
+ page.within(".merge-request") do
+ expect(page).to have_content(title)
end
-
- it_behaves_like 'merge request author auto assign'
end
context "XSS branch name exists" do
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 8a310aba77b..d461170c990 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
it 'allows commenting' do
should_allow_commenting(find_by_scrolling('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]'))
- accept_gl_confirm(button_text: 'Delete Comment') do
+ accept_gl_confirm(button_text: 'Delete comment') do
first('button.more-actions-toggle').click
first('.js-note-delete').click
end
diff --git a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
index 56517a97716..60ea168940a 100644
--- a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
+++ b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe 'Merge request > User scrolls to note on load', :js do
note_element = find(collapsed_fragment_id)
expect(note_element.visible?).to eq(true)
- expect(note_element.sibling('.replies-toggle')[:class]).to include('expanded')
+ expect(note_element.sibling('li:nth-child(2)')).to have_button s_('Notes|Collapse replies')
end
end
end
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 0e9ff98c3e1..8225fcbfd89 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -124,7 +124,7 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do
it 'removes avatar when note is deleted' do
open_more_actions_dropdown(note)
- accept_gl_confirm(button_text: 'Delete Comment') do
+ accept_gl_confirm(button_text: 'Delete comment') do
find(".note-row-#{note.id} .js-note-delete").click
end
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index 81034caaee2..e045f11c0d8 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
before do
merge_request.update!(merge_commit_sha: sha)
- project.add_user(user, role)
+ project.add_member(user, role)
sign_in(user)
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 2dafd66b406..1d3effd4a2a 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -591,14 +591,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when a new failures exists' do
let(:base_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
end
let(:head_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_failed)
end
@@ -639,14 +639,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when an existing failure exists' do
let(:base_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_failed)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
end
let(:head_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_failed)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
@@ -686,14 +686,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when a resolved failure exists' do
let(:base_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_failed)
end
end
let(:head_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
@@ -732,14 +732,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when a new error exists' do
let(:base_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
end
let(:head_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_error)
end
@@ -779,14 +779,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when an existing error exists' do
let(:base_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_error)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
end
let(:head_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_error)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
@@ -825,14 +825,14 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when a resolved error exists' do
let(:base_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_error)
end
end
let(:head_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
reports.get_suite('junit').add_test_case(create_test_case_java_success)
end
@@ -871,7 +871,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'properly truncates the report' do
let(:base_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
10.times do |index|
reports.get_suite('rspec').add_test_case(
create_test_case_rspec_failed(index))
@@ -882,7 +882,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
end
let(:head_reports) do
- Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ Gitlab::Ci::Reports::TestReport.new.tap do |reports|
10.times do |index|
reports.get_suite('rspec').add_test_case(
create_test_case_rspec_failed(index))
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 9696b1ff551..16b1de0393f 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -123,10 +123,6 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
context 'when actor is a developer in parent project' do
let(:actor) { developer_in_parent }
- before do
- stub_feature_flags(ci_disallow_to_create_merge_request_pipelines_in_target_project: false)
- end
-
it 'creates a pipeline in the parent project when user proceeds with the warning' do
visit project_merge_request_path(parent_project, merge_request)
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 4465d7e29be..2c2a2dfd4a8 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -24,23 +24,21 @@ RSpec.describe 'Merge request > User sees versions', :js do
visit diffs_project_merge_request_path(project, merge_request, params)
end
- shared_examples 'allows commenting' do |file_id:, line_code:, comment:|
+ shared_examples 'allows commenting' do |file_name:, line_text:, comment:|
it do
- diff_file_selector = ".diff-file[id='#{file_id}']"
- line_code = "#{file_id}_#{line_code}"
+ page.within find_by_scrolling('.diff-file', text: file_name) do
+ line_code_element = page.find('.diff-grid-row', text: line_text)
- page.within find_by_scrolling(diff_file_selector) do
- line_code_element = first("[id='#{line_code}']")
# scrolling to element's bottom is required in order for .hover action to work
# otherwise, the element could be hidden underneath a sticky header
scroll_to_elements_bottom(line_code_element)
line_code_element.hover
- first("[id='#{line_code}'] [role='button']").click
+ page.find("[data-testid='left-comment-button']", visible: true).click
- page.within("form[data-line-code='#{line_code}']") do
- fill_in "note[note]", with: comment
- click_button('Add comment now')
- end
+ expect(page).to have_selector("form", count: 1)
+
+ fill_in("note[note]", with: comment)
+ click_button('Add comment now')
wait_for_requests
@@ -59,8 +57,8 @@ RSpec.describe 'Merge request > User sees versions', :js do
end
it_behaves_like 'allows commenting',
- file_id: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44',
- line_code: '1_1',
+ file_name: '.gitmodules',
+ line_text: '[submodule "six"]',
comment: 'Typo, please fix.'
end
@@ -107,8 +105,8 @@ RSpec.describe 'Merge request > User sees versions', :js do
end
it_behaves_like 'allows commenting',
- file_id: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44',
- line_code: '2_2',
+ file_name: '.gitmodules',
+ line_text: 'path = six',
comment: 'Typo, please fix.'
end
@@ -174,9 +172,9 @@ RSpec.describe 'Merge request > User sees versions', :js do
end
it_behaves_like 'allows commenting',
- file_id: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44',
- line_code: '4_4',
- comment: 'Typo, please fix.'
+ file_name: '.gitmodules',
+ line_text: '[submodule "gitlab-shell"]',
+ comment: 'Typo, please fix.'
end
describe 'compare with same version' do
@@ -241,8 +239,8 @@ RSpec.describe 'Merge request > User sees versions', :js do
end
it_behaves_like 'allows commenting',
- file_id: '2f6fcd96b88b36ce98c38da085c795a27d92a3dd',
- line_code: '6_6',
+ file_name: 'files/ruby/popen.rb',
+ line_text: 'RuntimeError',
comment: 'Typo, please fix.'
end
end
diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb
index 5bbd89f1b88..2a1ea1a4e73 100644
--- a/spec/features/milestone_spec.rb
+++ b/spec/features/milestone_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe 'Milestone' do
end
find('input[name="commit"]').click
- expect(find('.alert-danger')).to have_content('already being used for another group or project milestone.')
+ expect(find('.gl-alert-danger')).to have_content('already being used for another group or project milestone.')
end
it 'displays validation message when there is a group milestone with same title' do
diff --git a/spec/features/monitor_sidebar_link_spec.rb b/spec/features/monitor_sidebar_link_spec.rb
index 3c59cd65cdb..b888e2f4171 100644
--- a/spec/features/monitor_sidebar_link_spec.rb
+++ b/spec/features/monitor_sidebar_link_spec.rb
@@ -45,7 +45,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).not_to have_link('Product Analytics', href: project_product_analytics_path(project))
- expect(page).not_to have_link('Logs', href: project_logs_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
end
@@ -78,7 +77,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).not_to have_link('Product Analytics', href: project_product_analytics_path(project))
- expect(page).not_to have_link('Logs', href: project_logs_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
end
@@ -96,7 +94,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
- expect(page).not_to have_link('Logs', href: project_logs_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
end
@@ -113,7 +110,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).to have_link('Environments', href: project_environments_path(project))
expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
- expect(page).to have_link('Logs', href: project_logs_path(project))
expect(page).to have_link('Kubernetes', href: project_clusters_path(project))
end
@@ -130,7 +126,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
expect(page).to have_link('Environments', href: project_environments_path(project))
expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
- expect(page).to have_link('Logs', href: project_logs_path(project))
expect(page).to have_link('Kubernetes', href: project_clusters_path(project))
end
diff --git a/spec/features/nav/top_nav_responsive_spec.rb b/spec/features/nav/top_nav_responsive_spec.rb
index d571327e4b5..4f8e47b5068 100644
--- a/spec/features/nav/top_nav_responsive_spec.rb
+++ b/spec/features/nav/top_nav_responsive_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe 'top nav responsive', :js do
end
it 'has new dropdown', :aggregate_failures do
- click_button('Create new')
+ click_button('Create new...')
expect(page).to have_link('New project', href: new_project_path)
expect(page).to have_link('New group', href: new_group_path)
diff --git a/spec/features/nav/top_nav_tooltip_spec.rb b/spec/features/nav/top_nav_tooltip_spec.rb
index 58bfe1caf65..73e4571e7a2 100644
--- a/spec/features/nav/top_nav_tooltip_spec.rb
+++ b/spec/features/nav/top_nav_tooltip_spec.rb
@@ -15,10 +15,10 @@ RSpec.describe 'top nav tooltips', :js do
page.find(btn).hover
- expect(page).to have_content('Create new')
+ expect(page).to have_content('Create new...')
page.find(btn).click
- expect(page).not_to have_content('Create new')
+ expect(page).not_to have_content('Create new...')
end
end
diff --git a/spec/features/profiles/account_spec.rb b/spec/features/profiles/account_spec.rb
index 6a4a1fca008..4fe0c3d035e 100644
--- a/spec/features/profiles/account_spec.rb
+++ b/spec/features/profiles/account_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Profile > Account', :js do
sign_in(user)
end
- describe 'Social sign-in' do
+ describe 'Service sign-in' do
context 'when an identity does not exist' do
before do
allow(Devise).to receive_messages(omniauth_configs: { google_oauth2: {} })
diff --git a/spec/features/profiles/oauth_applications_spec.rb b/spec/features/profiles/oauth_applications_spec.rb
index ee1daf69f62..7d8cd2dc6ca 100644
--- a/spec/features/profiles/oauth_applications_spec.rb
+++ b/spec/features/profiles/oauth_applications_spec.rb
@@ -35,9 +35,61 @@ RSpec.describe 'Profile > Applications' do
expect(page).to have_content('Your applications (0)')
expect(page).to have_content('Authorized applications (0)')
end
+ end
+
+ describe 'Authorized applications', :js do
+ let(:other_user) { create(:user) }
+ let(:application) { create(:oauth_application, owner: user) }
+ let(:created_at) { 2.days.ago }
+ let(:token) { create(:oauth_access_token, application: application, resource_owner: user) }
+ let(:anonymous_token) { create(:oauth_access_token, resource_owner: user) }
+
+ context 'with multiple access token types and multiple owners' do
+ let!(:token2) { create(:oauth_access_token, application: application, resource_owner: user) }
+ let!(:other_user_token) { create(:oauth_access_token, application: application, resource_owner: other_user) }
+
+ before do
+ token.update_column(:created_at, created_at)
+ token2.update_column(:created_at, created_at - 1.day)
+ anonymous_token.update_columns(application_id: nil, created_at: 1.day.ago)
+ end
+
+ it 'displays the correct authorized applications' do
+ visit oauth_applications_path
+
+ expect(page).to have_content('Authorized applications (2)')
+
+ page.within('div.oauth-authorized-applications') do
+ # Ensure the correct user's token details are displayed
+ # when the application has more than one token
+ page.within("tr#application_#{application.id}") do
+ expect(page).to have_content(created_at)
+ end
+
+ expect(page).to have_content('Anonymous')
+ expect(page).not_to have_content(other_user_token.created_at)
+ end
+ end
+ end
it 'deletes an authorized application' do
- create(:oauth_access_token, resource_owner: user)
+ token
+ visit oauth_applications_path
+
+ page.within('div.oauth-authorized-applications') do
+ page.within("tr#application_#{application.id}") do
+ click_button 'Revoke'
+ end
+ end
+
+ accept_gl_confirm(button_text: 'Revoke application')
+
+ expect(page).to have_content('The application was revoked access.')
+ expect(page).to have_content('Authorized applications (0)')
+ end
+
+ it 'deletes an anonymous authorized application' do
+ anonymous_token
visit oauth_applications_path
page.within('.oauth-authorized-applications') do
@@ -48,7 +100,6 @@ RSpec.describe 'Profile > Applications' do
accept_gl_confirm(button_text: 'Revoke application')
expect(page).to have_content('The application was revoked access.')
- expect(page).to have_content('Your applications (0)')
expect(page).to have_content('Authorized applications (0)')
end
end
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 7eadb74d2d4..07dfbca8cbd 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Profile > Password' do
it 'shows an error message' do
fill_passwords('mypassword', 'mypassword2')
- page.within('.alert-danger') do
+ page.within('.gl-alert-danger') do
expect(page).to have_content("Password confirmation doesn't match Password")
end
end
diff --git a/spec/features/projects/blobs/blame_spec.rb b/spec/features/projects/blobs/blame_spec.rb
index bb3b5cd931c..3b2b74b469e 100644
--- a/spec/features/projects/blobs/blame_spec.rb
+++ b/spec/features/projects/blobs/blame_spec.rb
@@ -49,6 +49,12 @@ RSpec.describe 'File blame', :js do
expect(page).to have_css('#L3')
expect(find('.page-link.active')).to have_text('2')
end
+
+ it 'correctly redirects to the prior blame page' do
+ find('.version-link').click
+
+ expect(find('.page-link.active')).to have_text('2')
+ end
end
context 'when feature flag disabled' do
@@ -64,4 +70,37 @@ RSpec.describe 'File blame', :js do
end
end
end
+
+ context 'when blob length is over global max page limit' do
+ before do
+ stub_const('Projects::BlameService::PER_PAGE', 200)
+ end
+
+ let(:path) { 'files/markdown/ruby-style-guide.md' }
+
+ it 'displays two hundred lines of the file with pagination' do
+ visit_blob_blame(path)
+
+ expect(page).to have_css('.blame-commit')
+ expect(page).to have_css('.gl-pagination')
+
+ expect(page).to have_css('#L1')
+ expect(page).not_to have_css('#L201')
+ expect(find('.page-link.active')).to have_text('1')
+ end
+
+ context 'when user clicks on the next button' do
+ before do
+ visit_blob_blame(path)
+
+ find('.js-next-button').click
+ end
+
+ it 'displays next two hundred lines of the file with pagination' do
+ expect(page).not_to have_css('#L1')
+ expect(page).to have_css('#L201')
+ expect(find('.page-link.active')).to have_text('2')
+ end
+ end
+ end
end
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 76baad63cc2..f5cafa2b2ec 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -29,176 +29,162 @@ RSpec.describe 'File blob', :js do
).execute
end
- context 'with refactor_blob_viewer feature flag enabled' do
- context 'Ruby file' do
+ context 'Ruby file' do
+ before do
+ visit_blob('files/ruby/popen.rb')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows highlighted Ruby code
+ expect(page).to have_css(".js-syntax-highlight")
+ expect(page).to have_content("require 'fileutils'")
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+ end
+ end
+
+ it 'displays file actions on all screen sizes' do
+ file_actions_selector = '.file-actions'
+
+ resize_screen_sm
+ expect(page).to have_selector(file_actions_selector, visible: true)
+
+ resize_screen_xs
+ expect(page).to have_selector(file_actions_selector, visible: true)
+ end
+ end
+
+ context 'Markdown file' do
+ context 'visiting directly' do
before do
- visit_blob('files/ruby/popen.rb')
+ visit_blob('files/markdown/ruby-style-guide.md')
wait_for_requests
end
- it 'displays the blob' do
+ it 'displays the blob using the rich viewer' do
aggregate_failures do
- # shows highlighted Ruby code
- expect(page).to have_css(".js-syntax-highlight")
- expect(page).to have_content("require 'fileutils'")
+ # hides the simple viewer
+ expect(page).not_to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]')
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
+ # shows rendered Markdown
+ expect(page).to have_link("PEP-8")
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ # shows a viewer switcher
+ expect(page).to have_selector('.js-blob-viewer-switcher')
+
+ # shows a disabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
# shows a raw button
expect(page).to have_link('Open raw')
end
end
- it 'displays file actions on all screen sizes' do
- file_actions_selector = '.file-actions'
-
- resize_screen_sm
- expect(page).to have_selector(file_actions_selector, visible: true)
-
- resize_screen_xs
- expect(page).to have_selector(file_actions_selector, visible: true)
- end
- end
-
- context 'Markdown file' do
- context 'visiting directly' do
+ context 'switching to the simple viewer' do
before do
- visit_blob('files/markdown/ruby-style-guide.md')
+ find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
wait_for_requests
end
- it 'displays the blob using the rich viewer' do
+ it 'displays the blob using the simple viewer' do
aggregate_failures do
- # hides the simple viewer
- expect(page).not_to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows rendered Markdown
- expect(page).to have_link("PEP-8")
-
- # shows a viewer switcher
- expect(page).to have_selector('.js-blob-viewer-switcher')
+ # hides the rich viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).not_to have_selector('.blob-viewer[data-type="rich"]')
- # shows a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+ # shows highlighted Markdown code
+ expect(page).to have_css(".js-syntax-highlight")
+ expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
- # shows a raw button
- expect(page).to have_link('Open raw')
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
end
end
- context 'switching to the simple viewer' do
+ context 'switching to the rich viewer again' do
before do
- find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
+ find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
wait_for_requests
end
- it 'displays the blob using the simple viewer' do
+ it 'displays the blob using the rich viewer' do
aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).not_to have_selector('.blob-viewer[data-type="rich"]')
+ # hides the simple viewer
+ expect(page).not_to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]')
- # shows highlighted Markdown code
- expect(page).to have_css(".js-syntax-highlight")
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
-
- context 'switching to the rich viewer again' do
- before do
- find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
-
- wait_for_requests
- end
-
- it 'displays the blob using the rich viewer' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).not_to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
- end
+ # shows a disabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
end
end
end
end
+ end
- context 'when ref switch' do
- def switch_ref_to(ref_name)
- first('.qa-branches-select').click # rubocop:disable QA/SelectorUsage
+ context 'when ref switch' do
+ def switch_ref_to(ref_name)
+ first('.qa-branches-select').click # rubocop:disable QA/SelectorUsage
- page.within '.project-refs-form' do
- click_link ref_name
- wait_for_requests
- end
+ page.within '.project-refs-form' do
+ click_link ref_name
+ wait_for_requests
end
+ end
- it 'displays no highlighted number of different ref' do
- Files::UpdateService.new(
- project,
- project.first_owner,
- commit_message: 'Update',
- start_branch: 'feature',
- branch_name: 'feature',
- file_path: 'files/js/application.js',
- file_content: 'new content'
- ).execute
+ it 'displays no highlighted number of different ref' do
+ Files::UpdateService.new(
+ project,
+ project.first_owner,
+ commit_message: 'Update',
+ start_branch: 'feature',
+ branch_name: 'feature',
+ file_path: 'files/js/application.js',
+ file_content: 'new content'
+ ).execute
- project.commit('feature').diffs.diff_files.first
+ project.commit('feature').diffs.diff_files.first
- visit_blob('files/js/application.js', anchor: 'L3')
- switch_ref_to('feature')
+ visit_blob('files/js/application.js', anchor: 'L3')
+ switch_ref_to('feature')
- page.within '.blob-content' do
- expect(page).not_to have_css('.hll')
- end
+ page.within '.blob-content' do
+ expect(page).not_to have_css('.hll')
end
+ end
- context 'successfully change ref of similar name' do
- before do
- project.repository.create_branch('dev')
- project.repository.create_branch('development')
- end
-
- it 'switch ref from longer to shorter ref name' do
- visit_blob('files/js/application.js', ref: 'development')
- switch_ref_to('dev')
-
- aggregate_failures do
- expect(page.find('.file-title-name').text).to eq('application.js')
- expect(page).not_to have_css('flash-container')
- end
- end
+ context 'successfully change ref of similar name' do
+ before do
+ project.repository.create_branch('dev')
+ project.repository.create_branch('development')
+ end
- it 'switch ref from shorter to longer ref name' do
- visit_blob('files/js/application.js', ref: 'dev')
- switch_ref_to('development')
+ it 'switch ref from longer to shorter ref name' do
+ visit_blob('files/js/application.js', ref: 'development')
+ switch_ref_to('dev')
- aggregate_failures do
- expect(page.find('.file-title-name').text).to eq('application.js')
- expect(page).not_to have_css('flash-container')
- end
+ aggregate_failures do
+ expect(page.find('.file-title-name').text).to eq('application.js')
+ expect(page).not_to have_css('flash-container')
end
end
- it 'successfully changes ref when the ref name matches the project name' do
- project.repository.create_branch(project.name)
-
- visit_blob('files/js/application.js', ref: project.name)
- switch_ref_to('master')
+ it 'switch ref from shorter to longer ref name' do
+ visit_blob('files/js/application.js', ref: 'dev')
+ switch_ref_to('development')
aggregate_failures do
expect(page.find('.file-title-name').text).to eq('application.js')
@@ -206,133 +192,216 @@ RSpec.describe 'File blob', :js do
end
end
end
+
+ it 'successfully changes ref when the ref name matches the project name' do
+ project.repository.create_branch(project.name)
+
+ visit_blob('files/js/application.js', ref: project.name)
+ switch_ref_to('master')
+
+ aggregate_failures do
+ expect(page.find('.file-title-name').text).to eq('application.js')
+ expect(page).not_to have_css('flash-container')
+ end
+ end
+ end
+ end
+
+ context 'Markdown rendering' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add RedCarpet and CommonMark Markdown ",
+ file_path: 'files/commonmark/file.md',
+ file_content: "1. one\n - sublist\n"
+ ).execute
end
- context 'Markdown rendering' do
+ context 'when rendering default markdown' do
before do
- project.add_maintainer(project.creator)
+ visit_blob('files/commonmark/file.md')
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add RedCarpet and CommonMark Markdown ",
- file_path: 'files/commonmark/file.md',
- file_content: "1. one\n - sublist\n"
- ).execute
+ wait_for_requests
end
- context 'when rendering default markdown' do
- before do
- visit_blob('files/commonmark/file.md')
-
- wait_for_requests
+ it 'renders using CommonMark' do
+ aggregate_failures do
+ expect(page).to have_content("sublist")
+ expect(page).not_to have_xpath("//ol//li//ul")
end
+ end
+ end
+ end
- it 'renders using CommonMark' do
- aggregate_failures do
- expect(page).to have_content("sublist")
- expect(page).not_to have_xpath("//ol//li//ul")
- end
+ context 'Markdown file (stored in LFS)' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add Markdown in LFS",
+ file_path: 'files/lfs/file.md',
+ file_content: project.repository.blob_at('master', 'files/lfs/lfs_object.iso').data
+ ).execute
+ end
+
+ context 'when LFS is enabled on the project' do
+ before do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+ project.update_attribute(:lfs_enabled, true)
+
+ visit_blob('files/lfs/file.md')
+
+ wait_for_requests
+ end
+
+ it 'displays an error' do
+ aggregate_failures do
+ # hides the simple viewer
+ expect(page).not_to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).not_to have_selector('.blob-viewer[data-type="rich"]')
+
+ # shows an error message
+ expect(page).to have_content('This content could not be displayed because it is stored in LFS. You can download it instead.')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+
+ # shows a download button
+ expect(page).to have_link('Download')
end
end
end
- context 'Markdown file (stored in LFS)' do
+ context 'when LFS is disabled on the project' do
before do
- project.add_maintainer(project.creator)
+ visit_blob('files/lfs/file.md')
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add Markdown in LFS",
- file_path: 'files/lfs/file.md',
- file_content: project.repository.blob_at('master', 'files/lfs/lfs_object.iso').data
- ).execute
+ wait_for_requests
end
- context 'when LFS is enabled on the project' do
- before do
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- project.update_attribute(:lfs_enabled, true)
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows text
+ expect(page).to have_content('size 1575078')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
- visit_blob('files/lfs/file.md')
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- wait_for_requests
+ # shows a raw button
+ expect(page).to have_link('Open raw')
end
+ end
+ end
+ end
- it 'displays an error' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).not_to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).not_to have_selector('.blob-viewer[data-type="rich"]')
+ context 'PDF file' do
+ before do
+ project.add_maintainer(project.creator)
- # shows an error message
- expect(page).to have_content('This content could not be displayed because it is stored in LFS. You can download it instead.')
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add PDF",
+ file_path: 'files/test.pdf',
+ file_content: project.repository.blob_at('add-pdf-file', 'files/pdf/test.pdf').data
+ ).execute
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
+ visit_blob('files/test.pdf')
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
+ wait_for_requests
+ end
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows rendered PDF
+ expect(page).to have_selector('.js-pdf-viewer')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+
+ # shows a download button
+ expect(page).to have_link('Download')
end
+ end
+ end
- context 'when LFS is disabled on the project' do
- before do
- visit_blob('files/lfs/file.md')
+ context 'Jupiter Notebook file' do
+ before do
+ project.add_maintainer(project.creator)
- wait_for_requests
- end
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add Jupiter Notebook",
+ file_path: 'files/basic.ipynb',
+ file_content: project.repository.blob_at('add-ipython-files', 'files/ipython/basic.ipynb').data
+ ).execute
- it 'displays the blob' do
- aggregate_failures do
- # shows text
- expect(page).to have_content('size 1575078')
+ visit_blob('files/basic.ipynb')
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
+ wait_for_requests
+ end
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows rendered notebook
+ expect(page).to have_selector('.js-notebook-viewer-mounted')
- # shows a raw button
- expect(page).to have_link('Open raw')
- end
- end
+ # does show a viewer switcher
+ expect(page).to have_selector('.js-blob-viewer-switcher')
+
+ # show a disabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+
+ # shows the rendered notebook
+ expect(page).to have_content('test')
end
end
+ end
- context 'PDF file' do
+ context 'ISO file (stored in LFS)' do
+ context 'when LFS is enabled on the project' do
before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add PDF",
- file_path: 'files/test.pdf',
- file_content: project.repository.blob_at('add-pdf-file', 'files/pdf/test.pdf').data
- ).execute
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+ project.update_attribute(:lfs_enabled, true)
- visit_blob('files/test.pdf')
+ visit_blob('files/lfs/lfs_object.iso')
wait_for_requests
end
it 'displays the blob' do
aggregate_failures do
- # shows rendered PDF
- expect(page).to have_selector('.js-pdf-viewer')
+ # shows a download link
+ expect(page).to have_link('Download (1.50 MiB)')
# does not show a viewer switcher
expect(page).not_to have_selector('.js-blob-viewer-switcher')
@@ -346,126 +415,108 @@ RSpec.describe 'File blob', :js do
end
end
- context 'Jupiter Notebook file' do
+ context 'when LFS is disabled on the project' do
before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add Jupiter Notebook",
- file_path: 'files/basic.ipynb',
- file_content: project.repository.blob_at('add-ipython-files', 'files/ipython/basic.ipynb').data
- ).execute
-
- visit_blob('files/basic.ipynb')
+ visit_blob('files/lfs/lfs_object.iso')
wait_for_requests
end
it 'displays the blob' do
aggregate_failures do
- # shows rendered notebook
- expect(page).to have_selector('.js-notebook-viewer-mounted')
+ # shows text
+ expect(page).to have_content('size 1575078')
- # does show a viewer switcher
- expect(page).to have_selector('.js-blob-viewer-switcher')
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
- # show a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
# shows a raw button
expect(page).to have_link('Open raw')
-
- # shows a download button
- expect(page).to have_link('Download')
-
- # shows the rendered notebook
- expect(page).to have_content('test')
end
end
end
+ end
- context 'ISO file (stored in LFS)' do
- context 'when LFS is enabled on the project' do
- before do
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- project.update_attribute(:lfs_enabled, true)
-
- visit_blob('files/lfs/lfs_object.iso')
+ context 'ZIP file' do
+ before do
+ visit_blob('Gemfile.zip')
- wait_for_requests
- end
+ wait_for_requests
+ end
- it 'displays the blob' do
- aggregate_failures do
- # shows a download link
- expect(page).to have_link('Download (1.50 MiB)')
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows a download link
+ expect(page).to have_link('Download (2.11 KiB)')
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
+ # shows a download button
+ expect(page).to have_link('Download')
end
+ end
+ end
- context 'when LFS is disabled on the project' do
- before do
- visit_blob('files/lfs/lfs_object.iso')
+ context 'binary file that appears to be text in the first 1024 bytes' do
+ before do
+ visit_blob('encoding/binary-1.bin', ref: 'binary-encoding')
+ end
- wait_for_requests
- end
+ it 'displays the blob' do
+ expect(page).to have_link('Download (23.81 KiB)')
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+ expect(page).not_to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ expect(page).not_to have_link('Open raw')
+ end
+ end
- it 'displays the blob' do
- aggregate_failures do
- # shows text
- expect(page).to have_content('size 1575078')
+ context 'empty file' do
+ before do
+ project.add_maintainer(project.creator)
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add empty file",
+ file_path: 'files/empty.md',
+ file_content: ''
+ ).execute
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ visit_blob('files/empty.md')
- # shows a raw button
- expect(page).to have_link('Open raw')
- end
- end
- end
+ wait_for_requests
end
- context 'ZIP file' do
- before do
- visit_blob('Gemfile.zip')
+ it 'displays an error' do
+ aggregate_failures do
+ # shows an error message
+ expect(page).to have_content('Empty file')
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows a download link
- expect(page).to have_link('Download (2.11 KiB)')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
- # shows a download button
- expect(page).to have_link('Download')
- end
+ # does not show a download or raw button
+ expect(page).not_to have_link('Download')
+ expect(page).not_to have_link('Open raw')
end
end
+ end
- context 'empty file' do
+ context 'files with auxiliary viewers' do
+ describe '.gitlab-ci.yml' do
before do
project.add_maintainer(project.creator)
@@ -474,660 +525,586 @@ RSpec.describe 'File blob', :js do
project.creator,
start_branch: 'master',
branch_name: 'master',
- commit_message: "Add empty file",
- file_path: 'files/empty.md',
- file_content: ''
+ commit_message: "Add .gitlab-ci.yml",
+ file_path: '.gitlab-ci.yml',
+ file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
).execute
- visit_blob('files/empty.md')
-
- wait_for_requests
+ visit_blob('.gitlab-ci.yml')
end
- it 'displays an error' do
+ it 'displays an auxiliary viewer' do
aggregate_failures do
- # shows an error message
- expect(page).to have_content('Empty file')
+ # shows that configuration is valid
+ expect(page).to have_content('This GitLab CI configuration is valid.')
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
-
- # does not show a download or raw button
- expect(page).not_to have_link('Download')
- expect(page).not_to have_link('Open raw')
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
end
end
end
- context 'files with auxiliary viewers' do
- describe '.gitlab-ci.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab-ci.yml",
- file_path: '.gitlab-ci.yml',
- file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
- ).execute
-
- visit_blob('.gitlab-ci.yml')
- end
+ describe '.gitlab/route-map.yml' do
+ before do
+ project.add_maintainer(project.creator)
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that configuration is valid
- expect(page).to have_content('This GitLab CI configuration is valid.')
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/route-map.yml",
+ file_path: '.gitlab/route-map.yml',
+ file_content: <<-MAP.strip_heredoc
+ # Team data
+ - source: 'data/team.yml'
+ public: 'team/'
+ MAP
+ ).execute
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
+ visit_blob('.gitlab/route-map.yml')
end
- describe '.gitlab/route-map.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/route-map.yml",
- file_path: '.gitlab/route-map.yml',
- file_content: <<-MAP.strip_heredoc
- # Team data
- - source: 'data/team.yml'
- public: 'team/'
- MAP
- ).execute
-
- visit_blob('.gitlab/route-map.yml')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that map is valid
+ expect(page).to have_content('This Route Map is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
end
+ end
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that map is valid
- expect(page).to have_content('This Route Map is valid.')
+ describe '.gitlab/dashboards/custom-dashboard.yml' do
+ before do
+ project.add_maintainer(project.creator)
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
+ file_path: '.gitlab/dashboards/custom-dashboard.yml',
+ file_content: file_content
+ ).execute
end
- describe '.gitlab/dashboards/custom-dashboard.yml' do
+ context 'with metrics_dashboard_exhaustive_validations feature flag off' do
before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
- file_path: '.gitlab/dashboards/custom-dashboard.yml',
- file_content: file_content
- ).execute
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
end
- context 'with metrics_dashboard_exhaustive_validations feature flag off' do
- before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
- end
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
end
end
+ end
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
end
end
end
+ end
- context 'with metrics_dashboard_exhaustive_validations feature flag on' do
- before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
- end
+ context 'with metrics_dashboard_exhaustive_validations feature flag on' do
+ before do
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ end
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
end
end
+ end
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("root is missing required keys: panel_groups")
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("root is missing required keys: panel_groups")
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
end
end
end
end
+ end
- context 'LICENSE' do
- before do
- visit_blob('LICENSE')
- end
+ context 'LICENSE' do
+ before do
+ visit_blob('LICENSE')
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows license
- expect(page).to have_content('This project is licensed under the MIT License.')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows license
+ expect(page).to have_content('This project is licensed under the MIT License.')
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
- end
+ # shows a learn more link
+ expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
end
end
+ end
- context '*.gemspec' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add activerecord.gemspec",
- file_path: 'activerecord.gemspec',
- file_content: <<-SPEC.strip_heredoc
- Gem::Specification.new do |s|
- s.platform = Gem::Platform::RUBY
- s.name = "activerecord"
- end
- SPEC
- ).execute
-
- visit_blob('activerecord.gemspec')
- end
+ context '*.gemspec' do
+ before do
+ project.add_maintainer(project.creator)
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows names of dependency manager and package
- expect(page).to have_content('This project manages its dependencies using RubyGems.')
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add activerecord.gemspec",
+ file_path: 'activerecord.gemspec',
+ file_content: <<-SPEC.strip_heredoc
+ Gem::Specification.new do |s|
+ s.platform = Gem::Platform::RUBY
+ s.name = "activerecord"
+ end
+ SPEC
+ ).execute
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
- end
- end
+ visit_blob('activerecord.gemspec')
end
- context 'CONTRIBUTING.md' do
- before do
- file_name = 'CONTRIBUTING.md'
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows names of dependency manager and package
+ expect(page).to have_content('This project manages its dependencies using RubyGems.')
- create_file(file_name, '## Contribution guidelines')
- visit_blob(file_name)
+ # shows a learn more link
+ expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
end
+ end
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("After you've reviewed these contribution guidelines, you'll be all set to contribute to this project.")
- end
+ context 'CONTRIBUTING.md' do
+ before do
+ file_name = 'CONTRIBUTING.md'
+
+ create_file(file_name, '## Contribution guidelines')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("After you've reviewed these contribution guidelines, you'll be all set to contribute to this project.")
end
end
+ end
- context 'CHANGELOG.md' do
- before do
- file_name = 'CHANGELOG.md'
+ context 'CHANGELOG.md' do
+ before do
+ file_name = 'CHANGELOG.md'
- create_file(file_name, '## Changelog for v1.0.0')
- visit_blob(file_name)
- end
+ create_file(file_name, '## Changelog for v1.0.0')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("To find the state of this project's repository at the time of any of these versions, check out the tags.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("To find the state of this project's repository at the time of any of these versions, check out the tags.")
end
end
+ end
- context 'Cargo.toml' do
- before do
- file_name = 'Cargo.toml'
-
- create_file(file_name, '
- [package]
- name = "hello_world" # the name of the package
- version = "0.1.0" # the current version, obeying semver
- authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
- ')
- visit_blob(file_name)
- end
+ context 'Cargo.toml' do
+ before do
+ file_name = 'Cargo.toml'
+
+ create_file(file_name, '
+ [package]
+ name = "hello_world" # the name of the package
+ version = "0.1.0" # the current version, obeying semver
+ authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Cargo.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Cargo.")
end
end
+ end
- context 'Cartfile' do
- before do
- file_name = 'Cartfile'
+ context 'Cartfile' do
+ before do
+ file_name = 'Cartfile'
- create_file(file_name, '
- gitlab "Alamofire/Alamofire" == 4.9.0
- gitlab "Alamofire/AlamofireImage" ~> 3.4
- ')
- visit_blob(file_name)
- end
+ create_file(file_name, '
+ gitlab "Alamofire/Alamofire" == 4.9.0
+ gitlab "Alamofire/AlamofireImage" ~> 3.4
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Carthage.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Carthage.")
end
end
+ end
- context 'composer.json' do
- before do
- file_name = 'composer.json'
-
- create_file(file_name, '
- {
- "license": "MIT"
- }
- ')
- visit_blob(file_name)
- end
+ context 'composer.json' do
+ before do
+ file_name = 'composer.json'
+
+ create_file(file_name, '
+ {
+ "license": "MIT"
+ }
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Composer.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Composer.")
end
end
+ end
- context 'Gemfile' do
- before do
- file_name = 'Gemfile'
+ context 'Gemfile' do
+ before do
+ file_name = 'Gemfile'
- create_file(file_name, '
- source "https://rubygems.org"
+ create_file(file_name, '
+ source "https://rubygems.org"
- # Gems here
- ')
- visit_blob(file_name)
- end
+ # Gems here
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Bundler.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Bundler.")
end
end
+ end
- context 'Godeps.json' do
- before do
- file_name = 'Godeps.json'
-
- create_file(file_name, '
- {
- "GoVersion": "go1.6"
- }
- ')
- visit_blob(file_name)
- end
+ context 'Godeps.json' do
+ before do
+ file_name = 'Godeps.json'
+
+ create_file(file_name, '
+ {
+ "GoVersion": "go1.6"
+ }
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using godep.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using godep.")
end
end
+ end
- context 'go.mod' do
- before do
- file_name = 'go.mod'
+ context 'go.mod' do
+ before do
+ file_name = 'go.mod'
- create_file(file_name, '
- module example.com/mymodule
+ create_file(file_name, '
+ module example.com/mymodule
- go 1.14
- ')
- visit_blob(file_name)
- end
+ go 1.14
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Go Modules.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Go Modules.")
end
end
+ end
- context 'package.json' do
- before do
- file_name = 'package.json'
-
- create_file(file_name, '
- {
- "name": "my-awesome-package",
- "version": "1.0.0"
- }
- ')
- visit_blob(file_name)
- end
+ context 'package.json' do
+ before do
+ file_name = 'package.json'
+
+ create_file(file_name, '
+ {
+ "name": "my-awesome-package",
+ "version": "1.0.0"
+ }
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using npm.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using npm.")
end
end
+ end
- context 'podfile' do
- before do
- file_name = 'podfile'
+ context 'podfile' do
+ before do
+ file_name = 'podfile'
- create_file(file_name, 'platform :ios, "8.0"')
- visit_blob(file_name)
- end
+ create_file(file_name, 'platform :ios, "8.0"')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
end
end
+ end
- context 'test.podspec' do
- before do
- file_name = 'test.podspec'
-
- create_file(file_name, '
- Pod::Spec.new do |s|
- s.name = "TensorFlowLiteC"
- ')
- visit_blob(file_name)
- end
+ context 'test.podspec' do
+ before do
+ file_name = 'test.podspec'
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
- end
+ create_file(file_name, '
+ Pod::Spec.new do |s|
+ s.name = "TensorFlowLiteC"
+ ')
+ visit_blob(file_name)
end
- context 'JSON.podspec.json' do
- before do
- file_name = 'JSON.podspec.json'
-
- create_file(file_name, '
- {
- "name": "JSON"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
end
end
+ end
- context 'requirements.txt' do
- before do
- file_name = 'requirements.txt'
-
- create_file(file_name, 'Project requirements')
- visit_blob(file_name)
- end
+ context 'JSON.podspec.json' do
+ before do
+ file_name = 'JSON.podspec.json'
+
+ create_file(file_name, '
+ {
+ "name": "JSON"
+ }
+ ')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using pip.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
end
end
+ end
- context 'yarn.lock' do
- before do
- file_name = 'yarn.lock'
+ context 'requirements.txt' do
+ before do
+ file_name = 'requirements.txt'
- create_file(file_name, '
- # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
- # yarn lockfile v1
- ')
- visit_blob(file_name)
- end
+ create_file(file_name, 'Project requirements')
+ visit_blob(file_name)
+ end
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Yarn.")
- end
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using pip.")
end
end
+ end
- context 'openapi.yml' do
- before do
- file_name = 'openapi.yml'
-
- create_file(file_name, '
- swagger: \'2.0\'
- info:
- title: Classic API Resource Documentation
- description: |
- <div class="foo-bar" style="background-color: red;" data-foo-bar="baz">
- <h1>Swagger API documentation</h1>
- </div>
- version: production
- basePath: /JSSResource/
- produces:
- - application/xml
- - application/json
- consumes:
- - application/xml
- - application/json
- security:
- - basicAuth: []
- paths:
- /accounts:
- get:
- responses:
- \'200\':
- description: No response was specified
- tags:
- - accounts
- operationId: findAccounts
- summary: Finds all accounts
- ')
- visit_blob(file_name, useUnsafeMarkdown: '1')
- click_button('Display rendered file')
+ context 'yarn.lock' do
+ before do
+ file_name = 'yarn.lock'
- wait_for_requests
- end
+ create_file(file_name, '
+ # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+ # yarn lockfile v1
+ ')
+ visit_blob(file_name)
+ end
- it 'removes `style`, `class`, and `data-*`` attributes from HTML' do
- expect(page).to have_css('h1', text: 'Swagger API documentation')
- expect(page).not_to have_css('.foo-bar')
- expect(page).not_to have_css('[style="background-color: red;"]')
- expect(page).not_to have_css('[data-foo-bar="baz"]')
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Yarn.")
end
end
end
- context 'realtime pipelines' do
+ context 'openapi.yml' do
before do
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'feature',
- branch_name: 'feature',
- commit_message: "Add ruby file",
- file_path: 'files/ruby/test.rb',
- file_content: "# Awesome content"
- ).execute
+ file_name = 'openapi.yml'
+
+ create_file(file_name, '
+ swagger: \'2.0\'
+ info:
+ title: Classic API Resource Documentation
+ description: |
+ <div class="foo-bar" style="background-color: red;" data-foo-bar="baz">
+ <h1>Swagger API documentation</h1>
+ </div>
+ version: production
+ basePath: /JSSResource/
+ produces:
+ - application/xml
+ - application/json
+ consumes:
+ - application/xml
+ - application/json
+ security:
+ - basicAuth: []
+ paths:
+ /accounts:
+ get:
+ responses:
+ \'200\':
+ description: No response was specified
+ tags:
+ - accounts
+ operationId: findAccounts
+ summary: Finds all accounts
+ ')
+ visit_blob(file_name, useUnsafeMarkdown: '1')
+ click_button('Display rendered file')
- create(:ci_pipeline, status: 'running', project: project, ref: 'feature', sha: project.commit('feature').sha)
- visit_blob('files/ruby/test.rb', ref: 'feature')
+ wait_for_requests
end
- it 'shows the realtime pipeline status' do
- page.within('.commit-actions') do
- expect(page).to have_css('.ci-status-icon')
- expect(page).to have_css('.ci-status-icon-running')
- expect(page).to have_css('.js-ci-status-icon-running')
- end
+ it 'removes `style`, `class`, and `data-*`` attributes from HTML' do
+ expect(page).to have_css('h1', text: 'Swagger API documentation')
+ expect(page).not_to have_css('.foo-bar')
+ expect(page).not_to have_css('[style="background-color: red;"]')
+ expect(page).not_to have_css('[data-foo-bar="baz"]')
end
end
+ end
- context 'for subgroups' do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
- let(:project) { create(:project, :public, :repository, group: subgroup) }
-
- it 'renders tree table without errors' do
- visit_blob('README.md')
+ context 'realtime pipelines' do
+ before do
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'feature',
+ branch_name: 'feature',
+ commit_message: "Add ruby file",
+ file_path: 'files/ruby/test.rb',
+ file_content: "# Awesome content"
+ ).execute
+
+ create(:ci_pipeline, status: 'running', project: project, ref: 'feature', sha: project.commit('feature').sha)
+ visit_blob('files/ruby/test.rb', ref: 'feature')
+ end
- expect(page).to have_selector('.file-content')
- expect(page).not_to have_selector('[data-testid="alert-danger"]')
+ it 'shows the realtime pipeline status' do
+ page.within('.commit-actions') do
+ expect(page).to have_css('.ci-status-icon')
+ expect(page).to have_css('.ci-status-icon-running')
+ expect(page).to have_css('.js-ci-status-icon-running')
end
+ end
+ end
- it 'displays a GPG badge' do
- visit_blob('CONTRIBUTING.md', ref: '33f3729a45c02fc67d00adb1b8bca394b0e761d9')
+ context 'for subgroups' do
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
+ let(:project) { create(:project, :public, :repository, group: subgroup) }
- expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
- expect(page).to have_selector '.gpg-status-box.invalid'
- end
+ it 'renders tree table without errors' do
+ visit_blob('README.md')
+
+ expect(page).to have_selector('.file-content')
+ expect(page).not_to have_selector('[data-testid="alert-danger"]')
end
- context 'on signed merge commit' do
- it 'displays a GPG badge' do
- visit_blob('conflicting-file.md', ref: '6101e87e575de14b38b4e1ce180519a813671e10')
+ it 'displays a GPG badge' do
+ visit_blob('CONTRIBUTING.md', ref: '33f3729a45c02fc67d00adb1b8bca394b0e761d9')
- expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
- expect(page).to have_selector '.gpg-status-box.invalid'
- end
+ expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
+ expect(page).to have_selector '.gpg-status-box.invalid'
end
+ end
- context 'when static objects external storage is enabled' do
- before do
- stub_application_setting(static_objects_external_storage_url: 'https://cdn.gitlab.com')
- end
+ context 'on signed merge commit' do
+ it 'displays a GPG badge' do
+ visit_blob('conflicting-file.md', ref: '6101e87e575de14b38b4e1ce180519a813671e10')
- context 'private project' do
- let_it_be(:project) { create(:project, :repository, :private) }
- let_it_be(:user) { create(:user, static_object_token: 'ABCD1234') }
+ expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
+ expect(page).to have_selector '.gpg-status-box.invalid'
+ end
+ end
- before do
- project.add_developer(user)
+ context 'when static objects external storage is enabled' do
+ before do
+ stub_application_setting(static_objects_external_storage_url: 'https://cdn.gitlab.com')
+ end
- sign_in(user)
- visit_blob('README.md')
- end
+ context 'private project' do
+ let_it_be(:project) { create(:project, :repository, :private) }
+ let_it_be(:user) { create(:user, static_object_token: 'ABCD1234') }
- it 'shows open raw and download buttons with external storage URL prepended and user token appended to their href' do
- path = project_raw_path(project, 'master/README.md')
- raw_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}"
- download_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}&inline=false"
+ before do
+ project.add_developer(user)
- aggregate_failures do
- expect(page).to have_link 'Open raw', href: raw_uri
- expect(page).to have_link 'Download', href: download_uri
- end
- end
+ sign_in(user)
+ visit_blob('README.md')
end
- context 'public project' do
- before do
- visit_blob('README.md')
- end
+ it 'shows open raw and download buttons with external storage URL prepended and user token appended to their href' do
+ path = project_raw_path(project, 'master/README.md')
+ raw_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}"
+ download_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}&inline=false"
- it 'shows open raw and download buttons with external storage URL prepended to their href' do
- path = project_raw_path(project, 'master/README.md')
- raw_uri = "https://cdn.gitlab.com#{path}"
- download_uri = "https://cdn.gitlab.com#{path}?inline=false"
-
- aggregate_failures do
- expect(page).to have_link 'Open raw', href: raw_uri
- expect(page).to have_link 'Download', href: download_uri
- end
+ aggregate_failures do
+ expect(page).to have_link 'Open raw', href: raw_uri
+ expect(page).to have_link 'Download', href: download_uri
end
end
end
- end
-
- context 'with refactor_blob_viewer feature flag disabled' do
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- context 'binary file that appears to be text in the first 1024 bytes' do
- # We need to unsre that this test runs with the refactor_blob_viewer feature flag enabled
- # This will be addressed in https://gitlab.com/gitlab-org/gitlab/-/issues/351559
+ context 'public project' do
before do
- visit_blob('encoding/binary-1.bin', ref: 'binary-encoding')
+ visit_blob('README.md')
end
- it 'displays the blob' do
+
+ it 'shows open raw and download buttons with external storage URL prepended to their href' do
+ path = project_raw_path(project, 'master/README.md')
+ raw_uri = "https://cdn.gitlab.com#{path}"
+ download_uri = "https://cdn.gitlab.com#{path}?inline=false"
+
aggregate_failures do
- # shows a download link
- expect(page).to have_link('Download (23.8 KB)')
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
- # The specs below verify an arguably incorrect result, but since we only
- # learn that the file is not actually text once the text viewer content
- # is loaded asynchronously, there is no straightforward way to get these
- # synchronously loaded elements to display correctly.
- #
- # Clicking the copy button will result in nothing being copied.
- # Clicking the raw button will result in the binary file being downloaded,
- # as expected.
- # shows an enabled copy button, incorrectly
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- # shows a raw button, incorrectly
- expect(page).to have_link('Open raw')
+ expect(page).to have_link 'Open raw', href: raw_uri
+ expect(page).to have_link 'Download', href: download_uri
end
end
end
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index d906bb396be..727f9aa486e 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -233,6 +233,8 @@ RSpec.describe 'Branches' do
end
context 'with one or more pipeline', :js do
+ let(:project) { create(:project, :public, :empty_repo) }
+
before do
sha = create_file(branch_name: "branch")
create(:ci_pipeline,
diff --git a/spec/features/projects/ci/secure_files_spec.rb b/spec/features/projects/ci/secure_files_spec.rb
index a0e9d663d35..412330eb5d6 100644
--- a/spec/features/projects/ci/secure_files_spec.rb
+++ b/spec/features/projects/ci/secure_files_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Secure Files', :js do
it 'user sees the Secure Files list component' do
visit project_ci_secure_files_path(project)
- expect(page).to have_content('There are no records to show')
+ expect(page).to have_content('There are no secure files yet.')
end
it 'prompts the user to confirm before deleting a file' do
@@ -37,7 +37,7 @@ RSpec.describe 'Secure Files', :js do
it 'displays an uploaded file in the file list' do
visit project_ci_secure_files_path(project)
- expect(page).to have_content('There are no records to show')
+ expect(page).to have_content('There are no secure files yet.')
page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
click_button 'Upload File'
diff --git a/spec/features/projects/commits/multi_view_diff_spec.rb b/spec/features/projects/commits/multi_view_diff_spec.rb
index 282112a3767..5af2e367aed 100644
--- a/spec/features/projects/commits/multi_view_diff_spec.rb
+++ b/spec/features/projects/commits/multi_view_diff_spec.rb
@@ -46,28 +46,28 @@ RSpec.describe 'Multiple view Diffs', :js do
end
context 'opening a diff with ipynb' do
- it 'loads the rendered diff as hidden' do
+ it 'loads the raw diff as hidden' do
diff = page.find('.diff-file, .file-holder', match: :first)
- expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
- expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
+ expect(diff).not_to have_selector '[data-diff-toggle-entity="rawViewer"]'
+ expect(diff).to have_selector '[data-diff-toggle-entity="renderedViewer"]'
- expect(classes_for_element(diff, 'toHide', visible: false)).to include('hidden')
- expect(classes_for_element(diff, 'toShow')).not_to include('hidden')
+ expect(classes_for_element(diff, 'rawViewer', visible: false)).to include('hidden')
+ expect(classes_for_element(diff, 'renderedViewer')).not_to include('hidden')
- expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
- expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
+ expect(classes_for_element(diff, 'renderedButton')).to include('selected')
+ expect(classes_for_element(diff, 'rawButton')).not_to include('selected')
end
- it 'displays the rendered diff and hides after selection changes' do
+ it 'displays the raw diff and hides after selection changes' do
diff = page.find('.diff-file, .file-holder', match: :first)
- diff.find('[data-diff-toggle-entity="toShowBtn"]').click
+ diff.find('[data-diff-toggle-entity="rawButton"]').click
- expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
- expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
+ expect(diff).to have_selector '[data-diff-toggle-entity="rawViewer"]'
+ expect(diff).not_to have_selector '[data-diff-toggle-entity="renderedViewer"]'
- expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
- expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
+ expect(classes_for_element(diff, 'renderedButton')).not_to include('selected')
+ expect(classes_for_element(diff, 'rawButton')).to include('selected')
end
it 'transforms the diff' do
diff --git a/spec/features/projects/deploy_keys_spec.rb b/spec/features/projects/deploy_keys_spec.rb
index bf705cf875b..06462263f5a 100644
--- a/spec/features/projects/deploy_keys_spec.rb
+++ b/spec/features/projects/deploy_keys_spec.rb
@@ -3,22 +3,38 @@
require 'spec_helper'
RSpec.describe 'Project deploy keys', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project_empty_repo) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project_empty_repo) }
+ let_it_be(:deploy_keys_project) { create(:deploy_keys_project, project: project) }
+ let_it_be(:deploy_key) { deploy_keys_project.deploy_key }
before do
project.add_maintainer(user)
sign_in(user)
end
+ context 'editing key' do
+ it 'shows fingerprints' do
+ visit edit_project_deploy_key_path(project, deploy_key)
+
+ expect(page).to have_content('Fingerprint (SHA256)')
+ expect(find('#deploy_key_fingerprint_sha256').value).to eq(deploy_key.fingerprint_sha256)
+
+ if Gitlab::FIPS.enabled?
+ expect(page).not_to have_content('Fingerprint (MD5)')
+ else
+ expect(page).to have_content('Fingerprint (MD5)')
+ expect(find('#deploy_key_fingerprint').value).to eq(deploy_key.fingerprint)
+ end
+ end
+ end
+
describe 'removing key' do
before do
- create(:deploy_keys_project, project: project)
+ visit project_settings_repository_path(project)
end
it 'removes association between project and deploy key' do
- visit project_settings_repository_path(project)
-
page.within(find('.rspec-deploy-keys-settings')) do
expect(page).to have_selector('.deploy-key', count: 1)
diff --git a/spec/features/projects/diffs/diff_show_spec.rb b/spec/features/projects/diffs/diff_show_spec.rb
index 56506ada3ce..dcd6f1239bb 100644
--- a/spec/features/projects/diffs/diff_show_spec.rb
+++ b/spec/features/projects/diffs/diff_show_spec.rb
@@ -169,8 +169,8 @@ RSpec.describe 'Diff file viewer', :js, :with_clean_rails_cache do
wait_for_requests
end
- it 'shows there is no preview' do
- expect(page).to have_content('No preview for this file type')
+ it 'shows that file was added' do
+ expect(page).to have_content('File added')
end
end
end
diff --git a/spec/features/projects/environments_pod_logs_spec.rb b/spec/features/projects/environments_pod_logs_spec.rb
deleted file mode 100644
index 531eae1d638..00000000000
--- a/spec/features/projects/environments_pod_logs_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Environment > Pod Logs', :js, :kubeclient do
- include KubernetesHelpers
-
- let(:pod_names) { %w(kube-pod) }
- let(:pod_name) { pod_names.first }
- let(:project) { create(:project, :repository) }
- let(:environment) { create(:environment, project: project) }
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
-
- before do
- cluster = create(:cluster, :provided_by_gcp, environment_scope: '*', projects: [project])
- create(:deployment, :success, environment: environment)
-
- stub_kubeclient_pods(environment.deployment_namespace)
- stub_kubeclient_logs(pod_name, environment.deployment_namespace, container: 'container-0')
- stub_kubeclient_deployments(environment.deployment_namespace)
- stub_kubeclient_ingresses(environment.deployment_namespace)
- stub_kubeclient_nodes_and_nodes_metrics(cluster.platform.api_url)
-
- sign_in(project.first_owner)
- end
-
- it "shows environments in dropdown" do
- create(:environment, project: project)
-
- visit project_logs_path(environment.project, environment_name: environment.name, pod_name: pod_name)
-
- wait_for_requests
-
- page.within('.js-environments-dropdown') do
- toggle = find(".dropdown-toggle:not([disabled])")
-
- expect(toggle).to have_content(environment.name)
-
- toggle.click
-
- dropdown_items = find(".dropdown-menu").all(".dropdown-item")
- expect(dropdown_items.first).to have_content(environment.name)
- expect(dropdown_items.size).to eq(2)
- end
- end
-
- context 'with logs', :use_clean_rails_memory_store_caching do
- it "shows pod logs", :sidekiq_might_not_need_inline do
- visit project_logs_path(environment.project, environment_name: environment.name, pod_name: pod_name)
-
- wait_for_requests
-
- page.within('.qa-pods-dropdown') do # rubocop:disable QA/SelectorUsage
- find(".dropdown-toggle:not([disabled])").click
-
- dropdown_items = find(".dropdown-menu").all(".dropdown-item:not([disabled])")
- expect(dropdown_items.size).to eq(1)
-
- dropdown_items.each_with_index do |item, i|
- expect(item.text).to eq(pod_names[i])
- end
- end
- expect(page).to have_content("kube-pod | Log 1")
- expect(page).to have_content("kube-pod | Log 2")
- expect(page).to have_content("kube-pod | Log 3")
- end
- end
-end
diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb
index 3a0cc61d9c6..dd1635c900e 100644
--- a/spec/features/projects/files/dockerfile_dropdown_spec.rb
+++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb
@@ -26,6 +26,6 @@ RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', :js do
wait_for_requests
expect(page).to have_css('.dockerfile-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(editor_get_value).to have_content('COPY ./ /usr/local/apache2/htdocs/')
+ expect(find('.monaco-editor')).to have_content('COPY ./ /usr/local/apache2/htdocs/')
end
end
diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb
index 4a92216f46c..a86adf951d8 100644
--- a/spec/features/projects/files/gitignore_dropdown_spec.rb
+++ b/spec/features/projects/files/gitignore_dropdown_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitignore file', :js do
wait_for_requests
expect(page).to have_css('.gitignore-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(editor_get_value).to have_content('/.bundle')
- expect(editor_get_value).to have_content('config/initializers/secret_token.rb')
+ expect(find('.monaco-editor')).to have_content('/.bundle')
+ expect(find('.monaco-editor')).to have_content('config/initializers/secret_token.rb')
end
end
diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
index cdf6c219ea5..46ac0dee7eb 100644
--- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
@@ -30,8 +30,8 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
wait_for_requests
expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(editor_get_value).to have_content('This file is a template, and might need editing before it works on your project')
- expect(editor_get_value).to have_content('jekyll build -d test')
+ expect(find('.monaco-editor')).to have_content('This file is a template, and might need editing before it works on your project')
+ expect(find('.monaco-editor')).to have_content('jekyll build -d test')
end
context 'when template param is provided' do
@@ -41,8 +41,8 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
wait_for_requests
expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(editor_get_value).to have_content('This file is a template, and might need editing before it works on your project')
- expect(editor_get_value).to have_content('jekyll build -d test')
+ expect(find('.monaco-editor')).to have_content('This file is a template, and might need editing before it works on your project')
+ expect(find('.monaco-editor')).to have_content('jekyll build -d test')
end
end
@@ -53,7 +53,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
wait_for_requests
expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
- expect(editor_get_value).to have_content('')
+ expect(find('.monaco-editor')).to have_content('')
end
end
@@ -64,7 +64,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
it 'leaves the editor empty' do
wait_for_requests
- expect(editor_get_value).to have_content('')
+ expect(find('.monaco-editor')).to have_content('')
end
end
end
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index 0e87622d3c2..6b1e60db5b1 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -22,8 +22,10 @@ RSpec.describe 'Projects > Files > Project owner sees a link to create a license
select_template('MIT License')
- expect(ide_editor_value).to have_content('MIT License')
- expect(ide_editor_value).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
+ file_content = "Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}"
+
+ expect(find('.monaco-editor')).to have_content('MIT License')
+ expect(find('.monaco-editor')).to have_content(file_content)
ide_commit
@@ -33,7 +35,7 @@ RSpec.describe 'Projects > Files > Project owner sees a link to create a license
license_file = project.repository.blob_at('master', 'LICENSE').data
expect(license_file).to have_content('MIT License')
- expect(license_file).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
+ expect(license_file).to have_content(file_content)
end
def select_template(template)
diff --git a/spec/features/projects/jobs/permissions_spec.rb b/spec/features/projects/jobs/permissions_spec.rb
index a904ba770dd..b6019944071 100644
--- a/spec/features/projects/jobs/permissions_spec.rb
+++ b/spec/features/projects/jobs/permissions_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe 'Project Jobs Permissions' do
let_it_be(:job) { create(:ci_build, :running, :coverage, :trace_artifact, pipeline: pipeline) }
before do
- stub_feature_flags(jobs_table_vue: false)
-
sign_in(user)
project.enable_ci
@@ -96,8 +94,8 @@ RSpec.describe 'Project Jobs Permissions' do
end
it_behaves_like 'project jobs page responds with status', 200 do
- it 'renders job' do
- page.within('.build') do
+ it 'renders job', :js do
+ page.within('[data-testid="jobs-table"]') do
expect(page).to have_content("##{job.id}")
.and have_content(job.sha[0..7])
.and have_content(job.ref)
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index 6a2d2c36521..6a0cfcde812 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -90,4 +90,27 @@ RSpec.describe 'User browses a job', :js do
end
end
end
+
+ context 'job log search' do
+ before do
+ visit(project_job_path(project, build))
+ wait_for_all_requests
+ end
+
+ it 'searches for supplied substring' do
+ find('[data-testid="job-log-search-box"] input').set('GroupsHelper')
+
+ find('[data-testid="search-button"]').click
+
+ expect(page).to have_content('26 results found for GroupsHelper')
+ end
+
+ it 'shows no results for supplied substring' do
+ find('[data-testid="job-log-search-box"] input').set('YouWontFindMe')
+
+ find('[data-testid="search-button"]').click
+
+ expect(page).to have_content('No search results found')
+ end
+ end
end
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index 07b7a54974a..bb44b70bb3a 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -9,48 +9,11 @@ def visit_jobs_page
end
RSpec.describe 'User browses jobs' do
- describe 'with jobs_table_vue feature flag turned off' do
- let!(:build) { create(:ci_build, :coverage, pipeline: pipeline) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') }
- let(:project) { create(:project, :repository, namespace: user.namespace) }
- let(:user) { create(:user) }
-
- before do
- stub_feature_flags(jobs_table_vue: false)
- project.add_maintainer(user)
- project.enable_ci
- build.update!(coverage_regex: '/Coverage (\d+)%/')
-
- sign_in(user)
-
- visit(project_jobs_path(project))
- end
-
- it 'shows the coverage' do
- page.within('td.coverage') do
- expect(page).to have_content('99.9%')
- end
- end
-
- context 'with a failed job' do
- let!(:build) { create(:ci_build, :coverage, :failed, pipeline: pipeline) }
-
- it 'displays a tooltip with the failure reason' do
- page.within('.ci-table') do
- failed_job_link = page.find('.ci-failed')
- expect(failed_job_link[:title]).to eq('Failed - (unknown failure)')
- end
- end
- end
- end
-
- describe 'with jobs_table_vue feature flag turned on', :js do
+ describe 'Jobs', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do
- stub_feature_flags(jobs_table_vue: true)
-
project.add_maintainer(user)
project.enable_ci
@@ -135,6 +98,26 @@ RSpec.describe 'User browses jobs' do
end
end
+ context 'with a coverage job' do
+ let!(:job) do
+ create(:ci_build, :coverage, pipeline: pipeline)
+ end
+
+ before do
+ job.update!(coverage_regex: '/Coverage (\d+)%/')
+
+ visit_jobs_page
+
+ wait_for_requests
+ end
+
+ it 'shows the coverage' do
+ page.within('[data-testid="job-coverage"]') do
+ expect(page).to have_content('99.9%')
+ end
+ end
+ end
+
context 'with a scheduled job' do
let!(:scheduled_job) { create(:ci_build, :scheduled, pipeline: pipeline, name: 'build') }
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index f0d41c1dd11..84c75752bc1 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -20,7 +20,6 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
before do
- stub_feature_flags(jobs_table_vue: false)
project.add_role(user, user_access_level)
sign_in(user)
end
@@ -29,9 +28,11 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'with no jobs' do
before do
visit project_jobs_path(project)
+
+ wait_for_requests
end
- it 'shows the empty state page' do
+ it 'shows the empty state page', :js do
expect(page).to have_content('Use jobs to automate your tasks')
expect(page).to have_link('Create CI/CD configuration file', href: project_ci_pipeline_editor_path(project))
end
@@ -40,59 +41,6 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'with a job' do
let!(:job) { create(:ci_build, pipeline: pipeline) }
- context "Pending scope" do
- before do
- visit project_jobs_path(project, scope: :pending)
- end
-
- it "shows Pending tab jobs" do
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Pending')
- expect(page).to have_content job.short_sha
- expect(page).to have_content job.ref
- expect(page).to have_content job.name
- end
- end
-
- context "Running scope" do
- before do
- job.run!
- visit project_jobs_path(project, scope: :running)
- end
-
- it "shows Running tab jobs" do
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Running')
- expect(page).to have_content job.short_sha
- expect(page).to have_content job.ref
- expect(page).to have_content job.name
- end
- end
-
- context "Finished scope" do
- before do
- job.run!
- visit project_jobs_path(project, scope: :finished)
- end
-
- it "shows Finished tab jobs" do
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Finished')
- expect(page).to have_content('Use jobs to automate your tasks')
- end
- end
-
- context "All jobs" do
- before do
- project.builds.running_or_pending.each(&:success)
- visit project_jobs_path(project)
- end
-
- it "shows All tab jobs" do
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'All')
- expect(page).to have_content job.short_sha
- expect(page).to have_content job.ref
- expect(page).to have_content job.name
- end
- end
-
context "when visiting old URL" do
let(:jobs_url) do
project_jobs_path(project)
@@ -1207,22 +1155,4 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it { expect(page.status_code).to eq(404) }
end
end
-
- describe "GET /:project/jobs/:id/status" do
- context "Job from project" do
- before do
- visit status_project_job_path(project, job)
- end
-
- it { expect(page.status_code).to eq(200) }
- end
-
- context "Job from other project" do
- before do
- visit status_project_job_path(project, job2)
- end
-
- it { expect(page.status_code).to eq(404) }
- end
- end
end
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index bd0874316ac..c92e8bc2954 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
end
it 'changes expiration date' do
- project.team.add_users([new_member.id], :developer, expires_at: three_days_from_now)
+ project.team.add_members([new_member.id], :developer, expires_at: three_days_from_now)
visit project_project_members_path(project)
page.within find_member_row(new_member) do
@@ -46,7 +46,7 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
end
it 'clears expiration date' do
- project.team.add_users([new_member.id], :developer, expires_at: five_days_from_now)
+ project.team.add_members([new_member.id], :developer, expires_at: five_days_from_now)
visit project_project_members_path(project)
page.within find_member_row(new_member) do
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 023601b0b1e..e07a5d09405 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe 'Project navbar' do
stub_config(pages: { enabled: true })
insert_after_sub_nav_item(
- _('CI/CD'),
+ _('Packages & Registries'),
within: _('Settings'),
new_sub_nav_item_name: _('Pages')
)
@@ -60,18 +60,22 @@ RSpec.describe 'Project navbar' do
it_behaves_like 'verified navigation bar'
end
+ context 'when package registry is available' do
+ before do
+ stub_config(packages: { enabled: true })
+
+ visit project_path(project)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
+
context 'when container registry is available' do
before do
stub_config(registry: { enabled: true })
insert_container_nav
- insert_after_sub_nav_item(
- _('CI/CD'),
- within: _('Settings'),
- new_sub_nav_item_name: _('Packages & Registries')
- )
-
visit project_path(project)
end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index a1e92a79516..9d2d1454d77 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -61,15 +61,15 @@ RSpec.describe 'New project', :js do
expect(page).to have_link('GitLab export')
end
- describe 'github import option' do
+ shared_examples 'renders importer link' do |params|
context 'with user namespace' do
before do
visit new_project_path
click_link 'Import project'
end
- it 'renders link to github importer' do
- expect(page).to have_link(href: new_import_github_path)
+ it "renders link to #{params[:name]} importer" do
+ expect(page).to have_link(href: Rails.application.routes.url_helpers.send(params[:route]))
end
end
@@ -82,21 +82,56 @@ RSpec.describe 'New project', :js do
click_link 'Import project'
end
- it 'renders link to github importer including namespace id' do
- expect(page).to have_link(href: new_import_github_path(namespace_id: group.id))
+ it "renders link to #{params[:name]} importer including namespace id" do
+ expect(page).to have_link(href: Rails.application.routes.url_helpers.send(params[:route], namespace_id: group.id))
end
end
end
- describe 'manifest import option' do
- before do
- visit new_project_path
+ describe 'importer links' do
+ shared_examples 'link to importers' do
+ let(:importer_routes) do
+ {
+ 'github': :new_import_github_path,
+ 'bitbucket': :status_import_bitbucket_path,
+ 'bitbucket server': :status_import_bitbucket_server_path,
+ 'gitlab.com': :status_import_gitlab_path,
+ 'fogbugz': :new_import_fogbugz_path,
+ 'gitea': :new_import_gitea_path,
+ 'manifest': :new_import_manifest_path,
+ 'phabricator': :new_import_phabricator_path
+ }
+ end
+
+ it 'renders links to several importers', :aggregate_failures do
+ importer_routes.each_value do |route|
+ expect(page).to have_link(href: Rails.application.routes.url_helpers.send(route, link_params))
+ end
+ end
+ end
- click_link 'Import project'
+ context 'with user namespace' do
+ let(:link_params) { {} }
+
+ before do
+ visit new_project_path
+ click_link 'Import project'
+ end
+
+ include_examples 'link to importers'
end
- it 'has Manifest file' do
- expect(page).to have_link('Manifest file')
+ context 'with group namespace' do
+ let(:group) { create(:group, :private) }
+ let(:link_params) { { namespace_id: group.id } }
+
+ before do
+ group.add_owner(user)
+ visit new_project_path(namespace_id: group.id)
+ click_link 'Import project'
+ end
+
+ include_examples 'link to importers'
end
end
diff --git a/spec/features/projects/pipelines/legacy_pipelines_spec.rb b/spec/features/projects/pipelines/legacy_pipelines_spec.rb
index 3f89e344c51..15d889933bf 100644
--- a/spec/features/projects/pipelines/legacy_pipelines_spec.rb
+++ b/spec/features/projects/pipelines/legacy_pipelines_spec.rb
@@ -357,6 +357,10 @@ RSpec.describe 'Pipelines', :js do
end
it 'enqueues the delayed job', :js do
+ find('[data-testid="mini-pipeline-graph-dropdown"]').click
+
+ within('[data-testid="mini-pipeline-graph-dropdown"]') { find('.ci-status-icon-pending') }
+
expect(delayed_job.reload).to be_pending
end
end
diff --git a/spec/features/projects/releases/user_creates_release_spec.rb b/spec/features/projects/releases/user_creates_release_spec.rb
index 9e428a0623d..10c4395da81 100644
--- a/spec/features/projects/releases/user_creates_release_spec.rb
+++ b/spec/features/projects/releases/user_creates_release_spec.rb
@@ -111,6 +111,27 @@ RSpec.describe 'User creates release', :js do
end
end
+ context 'when tag name supplied in the parameters' do
+ let(:new_page_url) { new_project_release_path(project, tag_name: 'v1.1.0') }
+
+ it 'creates release with preselected tag' do
+ page.within '[data-testid="tag-name-field"]' do
+ expect(page).to have_text('v1.1.0')
+ end
+
+ expect(page).not_to have_selector('[data-testid="create-from-field"]')
+
+ fill_release_title("test release")
+ click_button('Create release')
+
+ wait_for_all_requests
+
+ release = project.releases.last
+
+ expect(release.tag).to eq('v1.1.0')
+ end
+ end
+
def fill_out_form_and_submit
select_new_tag_name(tag_name)
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index ff28d59ed08..9468540736f 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration policy', :js do
+RSpec.describe 'Project > Settings > Packages & Registries > Container registry tag expiration policy', :js do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
@@ -23,14 +23,15 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
it 'shows available section' do
subject
- settings_block = find('[data-testid="registry-settings-app"]')
+ settings_block = find('[data-testid="container-expiration-policy-project-settings"]')
expect(settings_block).to have_text 'Clean up image tags'
end
it 'saves cleanup policy submit the form' do
subject
- within '[data-testid="registry-settings-app"]' do
+ within '[data-testid="container-expiration-policy-project-settings"]' do
+ click_button('Expand')
select('Every day', from: 'Run cleanup')
select('50 tags per image name', from: 'Keep the most recent:')
fill_in('Keep tags matching:', with: 'stable')
@@ -48,7 +49,8 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
it 'does not save cleanup policy submit form with invalid regex' do
subject
- within '[data-testid="registry-settings-app"]' do
+ within '[data-testid="container-expiration-policy-project-settings"]' do
+ click_button('Expand')
fill_in('Remove tags matching:', with: '*-production')
submit_button = find('[data-testid="save-button"')
@@ -73,7 +75,8 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
it 'displays the related section' do
subject
- within '[data-testid="registry-settings-app"]' do
+ within '[data-testid="container-expiration-policy-project-settings"]' do
+ click_button('Expand')
expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
end
end
@@ -87,7 +90,8 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
it 'does not display the related section' do
subject
- within '[data-testid="registry-settings-app"]' do
+ within '[data-testid="container-expiration-policy-project-settings"]' do
+ click_button('Expand')
expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
end
end
@@ -100,7 +104,7 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
it 'does not exists' do
subject
- expect(page).not_to have_selector('[data-testid="registry-settings-app"]')
+ expect(page).not_to have_selector('[data-testid="container-expiration-policy-project-settings"]')
end
end
@@ -110,7 +114,7 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
it 'does not exists' do
subject
- expect(page).not_to have_selector('[data-testid="registry-settings-app"]')
+ expect(page).not_to have_selector('[data-testid="container-expiration-policy-project-settings"]')
end
end
end
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 72ada356225..ddfed73e2ca 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -179,7 +179,6 @@ RSpec.describe 'Projects > Settings > Repository settings' do
expect(page).to have_css(".js-mirror-url-hidden[value=\"#{ssh_url}\"]", visible: false)
select 'SSH public key', from: 'Authentication method'
-
select_direction
Sidekiq::Testing.fake! do
diff --git a/spec/features/projects/settings/secure_files_settings_spec.rb b/spec/features/projects/settings/secure_files_settings_spec.rb
deleted file mode 100644
index c7c9cafc420..00000000000
--- a/spec/features/projects/settings/secure_files_settings_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Secure Files Settings' do
- let_it_be(:maintainer) { create(:user) }
- let_it_be(:project) { create(:project, creator_id: maintainer.id) }
-
- before_all do
- project.add_maintainer(maintainer)
- end
-
- context 'when the :ci_secure_files feature flag is enabled' do
- before do
- stub_feature_flags(ci_secure_files: true)
-
- sign_in(user)
- visit project_settings_ci_cd_path(project)
- end
-
- context 'authenticated user with admin permissions' do
- let(:user) { maintainer }
-
- it 'shows the secure files settings' do
- expect(page).to have_content('Secure Files')
- end
- end
- end
-
- context 'when the :ci_secure_files feature flag is disabled' do
- before do
- stub_feature_flags(ci_secure_files: false)
-
- sign_in(user)
- visit project_settings_ci_cd_path(project)
- end
-
- context 'authenticated user with admin permissions' do
- let(:user) { maintainer }
-
- it 'does not shows the secure files settings' do
- expect(page).not_to have_content('Secure Files')
- end
- end
- end
-end
diff --git a/spec/features/projects/settings/secure_files_spec.rb b/spec/features/projects/settings/secure_files_spec.rb
new file mode 100644
index 00000000000..ee38acf1953
--- /dev/null
+++ b/spec/features/projects/settings/secure_files_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Secure Files', :js do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(ci_secure_files_read_only: false)
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ context 'when the :ci_secure_files feature flag is enabled' do
+ before do
+ stub_feature_flags(ci_secure_files: true)
+
+ visit project_settings_ci_cd_path(project)
+ end
+
+ context 'authenticated user with admin permissions' do
+ it 'shows the secure files settings' do
+ expect(page).to have_content('Secure Files')
+ end
+ end
+ end
+
+ context 'when the :ci_secure_files feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_secure_files: false)
+
+ visit project_settings_ci_cd_path(project)
+ end
+
+ context 'authenticated user with admin permissions' do
+ it 'does not shows the secure files settings' do
+ expect(page).not_to have_content('Secure Files')
+ end
+ end
+ end
+
+ it 'user sees the Secure Files list component' do
+ visit project_settings_ci_cd_path(project)
+
+ within '#js-secure-files' do
+ expect(page).to have_content('There are no secure files yet.')
+ end
+ end
+
+ it 'prompts the user to confirm before deleting a file' do
+ file = create(:ci_secure_file, project: project)
+
+ visit project_settings_ci_cd_path(project)
+
+ within '#js-secure-files' do
+ expect(page).to have_content(file.name)
+
+ find('button.btn-danger-secondary').click
+ end
+
+ expect(page).to have_content("Delete #{file.name}?")
+
+ click_on('Delete secure file')
+
+ visit project_settings_ci_cd_path(project)
+
+ within '#js-secure-files' do
+ expect(page).not_to have_content(file.name)
+ end
+ end
+
+ it 'displays an uploaded file in the file list' do
+ visit project_settings_ci_cd_path(project)
+
+ within '#js-secure-files' do
+ expect(page).to have_content('There are no secure files yet.')
+
+ page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
+ click_button 'Upload File'
+ end
+
+ expect(page).to have_content('upload-keystore.jks')
+ end
+ end
+
+ it 'displays an error when a duplicate file upload is attempted' do
+ create(:ci_secure_file, project: project, name: 'upload-keystore.jks')
+ visit project_settings_ci_cd_path(project)
+
+ within '#js-secure-files' do
+ expect(page).to have_content('upload-keystore.jks')
+
+ page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
+ click_button 'Upload File'
+ end
+
+ expect(page).to have_content('A file with this name already exists.')
+ end
+ end
+end
diff --git a/spec/features/projects/settings/visibility_settings_spec.rb b/spec/features/projects/settings/visibility_settings_spec.rb
index becb30c02b7..fc78b5b5769 100644
--- a/spec/features/projects/settings/visibility_settings_spec.rb
+++ b/spec/features/projects/settings/visibility_settings_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Projects > Settings > Visibility settings', :js do
visibility_select_container = find('.project-visibility-setting')
expect(visibility_select_container.find('select').value).to eq project.visibility_level.to_s
- expect(visibility_select_container).to have_content 'The project can be accessed by anyone, regardless of authentication.'
+ expect(visibility_select_container).to have_content 'Accessible by anyone, regardless of authentication.'
end
it 'project visibility description updates on change' do
@@ -25,7 +25,7 @@ RSpec.describe 'Projects > Settings > Visibility settings', :js do
visibility_select.select('Private')
expect(visibility_select.value).to eq '0'
- expect(visibility_select_container).to have_content 'Access must be granted explicitly to each user.'
+ expect(visibility_select_container).to have_content 'Only accessible by project members. Membership must be explicitly granted to each user.'
end
context 'merge requests select' do
@@ -86,7 +86,7 @@ RSpec.describe 'Projects > Settings > Visibility settings', :js do
visibility_select_container = find('.project-visibility-setting')
expect(visibility_select_container).to have_selector 'select[name="project[visibility_level]"]:disabled'
- expect(visibility_select_container).to have_content 'The project can be accessed by anyone, regardless of authentication.'
+ expect(visibility_select_container).to have_content 'Accessible by anyone, regardless of authentication.'
end
context 'disable email notifications' do
diff --git a/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
index 59f1bc94226..262885e09b3 100644
--- a/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
+++ b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Project > Show > User interacts with auto devops implicitly enab
let(:user) { create(:user) }
before do
- project.add_user(user, role)
+ project.add_member(user, role)
sign_in(user)
end
diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
index 552f068ecc7..fb2f0539558 100644
--- a/spec/features/projects/show/user_sees_collaboration_links_spec.rb
+++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Projects > Show > Collaboration links', :js do
with_them do
before do
project.project_feature.update!({ merge_requests_access_level: merge_requests_access_level })
- project.add_user(user, user_level)
+ project.add_member(user, user_level)
visit project_path(project)
end
diff --git a/spec/features/projects/tags/user_edits_tags_spec.rb b/spec/features/projects/tags/user_edits_tags_spec.rb
index 17080043b6d..c8438b73dc3 100644
--- a/spec/features/projects/tags/user_edits_tags_spec.rb
+++ b/spec/features/projects/tags/user_edits_tags_spec.rb
@@ -5,17 +5,58 @@ require 'spec_helper'
RSpec.describe 'Project > Tags', :js do
include DropzoneHelper
- let(:user) { create(:user) }
- let(:role) { :developer }
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:role) { :developer }
+ let_it_be(:project) { create(:project, :repository) }
before do
sign_in(user)
project.add_role(user, role)
end
+ shared_examples "can create and update release" do
+ it 'can create new release' do
+ visit page_url
+ page.find("a[href=\"#{new_project_release_path(project, tag_name: 'v1.1.0')}\"]").click
+
+ fill_in "Release notes", with: "new release from tag"
+ expect(page).not_to have_field("Create from")
+ click_button "Create release"
+
+ expect(page).to have_current_path(project_release_path(project, 'v1.1.0'))
+ expect(Release.last.description).to eq("new release from tag")
+ end
+
+ it 'can edit existing release' do
+ release = create(:release, project: project, tag: 'v1.1.0')
+
+ visit page_url
+ page.find("a[href=\"#{edit_project_release_path(project, release)}\"]").click
+
+ fill_in "Release notes", with: "updated release desc"
+ click_button "Save changes"
+
+ expect(page).to have_current_path(project_release_path(project, 'v1.1.0'))
+ expect(release.reload.description).to eq("updated release desc")
+ end
+ end
+
+ context 'when visiting tags index page' do
+ let(:page_url) { project_tags_path(project) }
+
+ include_examples "can create and update release"
+ end
+
+ context 'when visiting individual tag page' do
+ let(:page_url) { project_tag_path(project, 'v1.1.0') }
+
+ include_examples "can create and update release"
+ end
+
+ # TODO: remove most of these together with FF https://gitlab.com/gitlab-org/gitlab/-/issues/366244
describe 'when opening project tags' do
before do
+ stub_feature_flags(edit_tag_release_notes_via_release_page: false)
visit project_tags_path(project)
end
diff --git a/spec/features/projects/tracings_spec.rb b/spec/features/projects/tracings_spec.rb
deleted file mode 100644
index b79a0427ef6..00000000000
--- a/spec/features/projects/tracings_spec.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Tracings Content Security Policy' do
- include ContentSecurityPolicyHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- subject { response_headers['Content-Security-Policy'] }
-
- before_all do
- project.add_maintainer(user)
- end
-
- before do
- sign_in(user)
- end
-
- context 'when there is no global config' do
- before do
- setup_csp_for_controller(Projects::TracingsController)
- end
-
- it 'does not add CSP directives' do
- visit project_tracing_path(project)
-
- is_expected.to be_blank
- end
- end
-
- context 'when a global CSP config exists' do
- before do
- csp = ActionDispatch::ContentSecurityPolicy.new do |p|
- p.frame_src 'https://global-policy.com'
- end
-
- setup_existing_csp_for_controller(Projects::TracingsController, csp)
- end
-
- context 'when external_url is set' do
- let!(:project_tracing_setting) { create(:project_tracing_setting, project: project) }
-
- it 'overwrites frame-src' do
- visit project_tracing_path(project)
-
- is_expected.to eq("frame-src https://example.com")
- end
- end
-
- context 'when external_url is not set' do
- it 'uses global policy' do
- visit project_tracing_path(project)
-
- is_expected.to eq("frame-src https://global-policy.com")
- end
- end
- end
-end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index db64f84aa76..f6f9c7f0d3c 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -440,6 +440,99 @@ RSpec.describe 'Project' do
end
end
+ describe 'storage_enforcement_banner', :js do
+ let_it_be(:group) { create(:group) }
+ let_it_be_with_refind(:user) { create(:user) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ before do
+ group.add_maintainer(user)
+ sign_in(user)
+ end
+
+ context 'with storage_enforcement_date set' do
+ let_it_be(:storage_enforcement_date) { Date.today + 30 }
+
+ before do
+ allow_next_found_instance_of(Group) do |grp|
+ allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+ end
+
+ it 'displays the banner in the project page' do
+ visit project_path(project)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+
+ context 'when in a subgroup project page' do
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, namespace: subgroup) }
+
+ it 'displays the banner' do
+ visit project_path(project)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+ end
+
+ context 'when in a user namespace project page' do
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ before do
+ allow_next_found_instance_of(Namespaces::UserNamespace) do |namspace|
+ allow(namspace).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+ end
+
+ it 'displays the banner' do
+ visit project_path(project)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+ end
+
+ it 'does not display the banner in a paid group project page' do
+ allow_next_found_instance_of(Group) do |grp|
+ allow(grp).to receive(:paid?).and_return(true)
+ end
+ visit project_path(project)
+ expect_page_not_to_have_storage_enforcement_banner
+ end
+
+ it 'does not display the banner if user has previously closed unless threshold has changed' do
+ visit project_path(project)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ find('.js-storage-enforcement-banner [data-testid="close-icon"]').click
+ wait_for_requests
+ page.refresh
+ expect_page_not_to_have_storage_enforcement_banner
+
+ storage_enforcement_date = Date.today + 13
+ allow_next_found_instance_of(Group) do |grp|
+ allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+ page.refresh
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+ end
+
+ context 'with storage_enforcement_date not set' do
+ # This test should break and be rewritten after the implementation of the storage_enforcement_date
+ # TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
+ it 'does not display the banner in the group page' do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
+ visit project_path(project)
+ expect_page_not_to_have_storage_enforcement_banner
+ end
+ end
+ end
+
+ def expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ expect(page).to have_text "From #{storage_enforcement_date} storage limits will apply to this namespace"
+ end
+
+ def expect_page_not_to_have_storage_enforcement_banner
+ expect(page).not_to have_text "storage limits will apply to this namespace"
+ end
+
def remove_with_confirm(button_text, confirm_with, confirm_button_text = 'Confirm')
click_button button_text
fill_in 'confirm_name_input', with: confirm_with
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb
deleted file mode 100644
index e8c026a254e..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
- include TreeHelper
-
- let(:project) { create(:project, :public, :repository) }
- let(:path) { 'CHANGELOG' }
- let(:sha) { project.repository.commit.sha }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- describe 'On a file(blob)' do
- def get_absolute_url(path = "")
- "http://#{page.server.host}:#{page.server.port}#{path}"
- end
-
- def visit_blob(fragment = nil)
- visit project_blob_path(project, tree_join('master', path), anchor: fragment)
- end
-
- describe 'Click "Permalink" button' do
- it 'works with no initial line number fragment hash' do
- visit_blob
-
- expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path))))
- end
-
- it 'maintains intitial fragment hash' do
- fragment = "L3"
-
- visit_blob(fragment)
-
- expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: fragment)))
- end
-
- it 'changes fragment hash if line number clicked' do
- ending_fragment = "L5"
-
- visit_blob
-
- find('#L3').click
- find("##{ending_fragment}").click
-
- expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
- end
-
- it 'with initial fragment hash, changes fragment hash if line number clicked' do
- fragment = "L1"
- ending_fragment = "L5"
-
- visit_blob(fragment)
-
- find('#L3').click
- find("##{ending_fragment}").click
-
- expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
- end
- end
-
- describe 'Click "Blame" button' do
- it 'works with no initial line number fragment hash' do
- visit_blob
-
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path))))
- end
-
- it 'maintains intitial fragment hash' do
- fragment = "L3"
-
- visit_blob(fragment)
-
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: fragment)))
- end
-
- it 'changes fragment hash if line number clicked' do
- ending_fragment = "L5"
-
- visit_blob
-
- find('#L3').click
- find("##{ending_fragment}").click
-
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
- end
-
- it 'with initial fragment hash, changes fragment hash if line number clicked' do
- fragment = "L1"
- ending_fragment = "L5"
-
- visit_blob(fragment)
-
- find('#L3').click
- find("##{ending_fragment}").click
-
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
- end
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb
deleted file mode 100644
index 5574b4da383..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb
+++ /dev/null
@@ -1,1201 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'File blob', :js do
- include MobileHelpers
-
- let(:project) { create(:project, :public, :repository) }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- def visit_blob(path, anchor: nil, ref: 'master', **additional_args)
- visit project_blob_path(project, File.join(ref, path), anchor: anchor, **additional_args)
-
- wait_for_requests
- end
-
- def create_file(file_name, content)
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add #{file_name}",
- file_path: file_name,
- file_content: <<-SPEC.strip_heredoc
- #{content}
- SPEC
- ).execute
- end
-
- context 'Ruby file' do
- before do
- visit_blob('files/ruby/popen.rb')
-
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows highlighted Ruby code
- expect(page).to have_css(".js-syntax-highlight")
- expect(page).to have_content("require 'fileutils'")
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
- end
- end
-
- it 'displays file actions on all screen sizes' do
- file_actions_selector = '.file-actions'
-
- resize_screen_sm
- expect(page).to have_selector(file_actions_selector, visible: true)
-
- resize_screen_xs
- expect(page).to have_selector(file_actions_selector, visible: true)
- end
- end
-
- context 'Markdown file' do
- context 'visiting directly' do
- before do
- visit_blob('files/markdown/ruby-style-guide.md')
-
- wait_for_requests
- end
-
- it 'displays the blob using the rich viewer' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows rendered Markdown
- expect(page).to have_link("PEP-8")
-
- # shows a viewer switcher
- expect(page).to have_selector('.js-blob-viewer-switcher')
-
- # shows a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
- end
- end
-
- context 'switching to the simple viewer' do
- before do
- find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
-
- wait_for_requests
- end
-
- it 'displays the blob using the simple viewer' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
-
- # shows highlighted Markdown code
- expect(page).to have_css(".js-syntax-highlight")
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
-
- context 'switching to the rich viewer again' do
- before do
- find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
-
- wait_for_requests
- end
-
- it 'displays the blob using the rich viewer' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
- end
- end
- end
-
- context 'when ref switch' do
- def switch_ref_to(ref_name)
- first('.qa-branches-select').click # rubocop:disable QA/SelectorUsage
-
- page.within '.project-refs-form' do
- click_link ref_name
- wait_for_requests
- end
- end
-
- it 'displays single highlighted line number of different ref' do
- visit_blob('files/js/application.js', anchor: 'L1')
-
- switch_ref_to('feature')
-
- page.within '.blob-content' do
- expect(find_by_id('LC1')[:class]).to include("hll")
- end
- end
-
- it 'displays multiple highlighted line numbers of different ref' do
- visit_blob('files/js/application.js', anchor: 'L1-3')
-
- switch_ref_to('feature')
-
- page.within '.blob-content' do
- expect(find_by_id('LC1')[:class]).to include("hll")
- expect(find_by_id('LC2')[:class]).to include("hll")
- expect(find_by_id('LC3')[:class]).to include("hll")
- end
- end
-
- it 'displays no highlighted number of different ref' do
- Files::UpdateService.new(
- project,
- project.first_owner,
- commit_message: 'Update',
- start_branch: 'feature',
- branch_name: 'feature',
- file_path: 'files/js/application.js',
- file_content: 'new content'
- ).execute
-
- project.commit('feature').diffs.diff_files.first
-
- visit_blob('files/js/application.js', anchor: 'L3')
- switch_ref_to('feature')
-
- page.within '.blob-content' do
- expect(page).not_to have_css('.hll')
- end
- end
-
- context 'successfully change ref of similar name' do
- before do
- project.repository.create_branch('dev')
- project.repository.create_branch('development')
- end
-
- it 'switch ref from longer to shorter ref name' do
- visit_blob('files/js/application.js', ref: 'development')
- switch_ref_to('dev')
-
- aggregate_failures do
- expect(page.find('.file-title-name').text).to eq('application.js')
- expect(page).not_to have_css('flash-container')
- end
- end
-
- it 'switch ref from shorter to longer ref name' do
- visit_blob('files/js/application.js', ref: 'dev')
- switch_ref_to('development')
-
- aggregate_failures do
- expect(page.find('.file-title-name').text).to eq('application.js')
- expect(page).not_to have_css('flash-container')
- end
- end
- end
-
- it 'successfully changes ref when the ref name matches the project name' do
- project.repository.create_branch(project.name)
-
- visit_blob('files/js/application.js', ref: project.name)
- switch_ref_to('master')
-
- aggregate_failures do
- expect(page.find('.file-title-name').text).to eq('application.js')
- expect(page).not_to have_css('flash-container')
- end
- end
- end
-
- context 'visiting with a line number anchor' do
- before do
- visit_blob('files/markdown/ruby-style-guide.md', anchor: 'L1')
- end
-
- it 'displays the blob using the simple viewer' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
-
- # highlights the line in question
- expect(page).to have_selector('#LC1.hll')
-
- # shows highlighted Markdown code
- expect(page).to have_css(".js-syntax-highlight")
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
- end
- end
-
- context 'Markdown rendering' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add RedCarpet and CommonMark Markdown ",
- file_path: 'files/commonmark/file.md',
- file_content: "1. one\n - sublist\n"
- ).execute
- end
-
- context 'when rendering default markdown' do
- before do
- visit_blob('files/commonmark/file.md')
-
- wait_for_requests
- end
-
- it 'renders using CommonMark' do
- aggregate_failures do
- expect(page).to have_content("sublist")
- expect(page).not_to have_xpath("//ol//li//ul")
- end
- end
- end
- end
-
- context 'Markdown file (stored in LFS)' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add Markdown in LFS",
- file_path: 'files/lfs/file.md',
- file_content: project.repository.blob_at('master', 'files/lfs/lfs_object.iso').data
- ).execute
- end
-
- context 'when LFS is enabled on the project' do
- before do
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- project.update_attribute(:lfs_enabled, true)
-
- visit_blob('files/lfs/file.md')
-
- wait_for_requests
- end
-
- it 'displays an error' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows an error message
- expect(page).to have_content('The rendered file could not be displayed because it is stored in LFS. You can download it instead.')
-
- # shows a viewer switcher
- expect(page).to have_selector('.js-blob-viewer-switcher')
-
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
-
- context 'switching to the simple viewer' do
- before do
- find('.js-blob-viewer-switcher .js-blob-viewer-switch-btn[data-viewer=simple]').click
-
- wait_for_requests
- end
-
- it 'displays an error' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
-
- # shows an error message
- expect(page).to have_content('The source could not be displayed because it is stored in LFS. You can download it instead.')
-
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
- end
- end
- end
- end
-
- context 'when LFS is disabled on the project' do
- before do
- visit_blob('files/lfs/file.md')
-
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows text
- expect(page).to have_content('size 1575078')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
- end
- end
- end
- end
-
- context 'PDF file' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add PDF",
- file_path: 'files/test.pdf',
- file_content: project.repository.blob_at('add-pdf-file', 'files/pdf/test.pdf').data
- ).execute
-
- visit_blob('files/test.pdf')
-
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows rendered PDF
- expect(page).to have_selector('.js-pdf-viewer')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
- end
-
- context 'Jupiter Notebook file' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add Jupiter Notebook",
- file_path: 'files/basic.ipynb',
- file_content: project.repository.blob_at('add-ipython-files', 'files/ipython/basic.ipynb').data
- ).execute
-
- visit_blob('files/basic.ipynb')
-
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows rendered notebook
- expect(page).to have_selector('.js-notebook-viewer-mounted')
-
- # does show a viewer switcher
- expect(page).to have_selector('.js-blob-viewer-switcher')
-
- # show a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
-
- # shows a download button
- expect(page).to have_link('Download')
-
- # shows the rendered notebook
- expect(page).to have_content('test')
- end
- end
- end
-
- context 'ISO file (stored in LFS)' do
- context 'when LFS is enabled on the project' do
- before do
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- project.update_attribute(:lfs_enabled, true)
-
- visit_blob('files/lfs/lfs_object.iso')
-
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows a download link
- expect(page).to have_link('Download (1.5 MB)')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
- end
-
- context 'when LFS is disabled on the project' do
- before do
- visit_blob('files/lfs/lfs_object.iso')
-
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows text
- expect(page).to have_content('size 1575078')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
- end
- end
- end
- end
-
- context 'ZIP file' do
- before do
- visit_blob('Gemfile.zip')
-
- wait_for_requests
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows a download link
- expect(page).to have_link('Download (2.11 KB)')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
- end
-
- context 'empty file' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add empty file",
- file_path: 'files/empty.md',
- file_content: ''
- ).execute
-
- visit_blob('files/empty.md')
-
- wait_for_requests
- end
-
- it 'displays an error' do
- aggregate_failures do
- # shows an error message
- expect(page).to have_content('Empty file')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # does not show a copy button
- expect(page).not_to have_selector('.js-copy-blob-source-btn')
-
- # does not show a download or raw button
- expect(page).not_to have_link('Download')
- expect(page).not_to have_link('Open raw')
- end
- end
- end
-
- context 'binary file that appears to be text in the first 1024 bytes' do
- before do
- visit_blob('encoding/binary-1.bin', ref: 'binary-encoding')
- end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows a download link
- expect(page).to have_link('Download (23.8 KB)')
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # The specs below verify an arguably incorrect result, but since we only
- # learn that the file is not actually text once the text viewer content
- # is loaded asynchronously, there is no straightforward way to get these
- # synchronously loaded elements to display correctly.
- #
- # Clicking the copy button will result in nothing being copied.
- # Clicking the raw button will result in the binary file being downloaded,
- # as expected.
-
- # shows an enabled copy button, incorrectly
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
-
- # shows a raw button, incorrectly
- expect(page).to have_link('Open raw')
- end
- end
- end
-
- context 'files with auxiliary viewers' do
- describe '.gitlab-ci.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab-ci.yml",
- file_path: '.gitlab-ci.yml',
- file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
- ).execute
-
- visit_blob('.gitlab-ci.yml')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that configuration is valid
- expect(page).to have_content('This GitLab CI configuration is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- describe '.gitlab/route-map.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/route-map.yml",
- file_path: '.gitlab/route-map.yml',
- file_content: <<-MAP.strip_heredoc
- # Team data
- - source: 'data/team.yml'
- public: 'team/'
- MAP
- ).execute
-
- visit_blob('.gitlab/route-map.yml')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that map is valid
- expect(page).to have_content('This Route Map is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- describe '.gitlab/dashboards/custom-dashboard.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
- file_path: '.gitlab/dashboards/custom-dashboard.yml',
- file_content: file_content
- ).execute
- end
-
- context 'with metrics_dashboard_exhaustive_validations feature flag off' do
- before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
- end
-
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
- end
-
- context 'with metrics_dashboard_exhaustive_validations feature flag on' do
- before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
- end
-
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("root is missing required keys: panel_groups")
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
- end
- end
-
- context 'LICENSE' do
- before do
- visit_blob('LICENSE')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows license
- expect(page).to have_content('This project is licensed under the MIT License.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
- end
- end
- end
-
- context '*.gemspec' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add activerecord.gemspec",
- file_path: 'activerecord.gemspec',
- file_content: <<-SPEC.strip_heredoc
- Gem::Specification.new do |s|
- s.platform = Gem::Platform::RUBY
- s.name = "activerecord"
- end
- SPEC
- ).execute
-
- visit_blob('activerecord.gemspec')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows names of dependency manager and package
- expect(page).to have_content('This project manages its dependencies using RubyGems.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
- end
- end
- end
-
- context 'CONTRIBUTING.md' do
- before do
- file_name = 'CONTRIBUTING.md'
-
- create_file(file_name, '## Contribution guidelines')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("After you've reviewed these contribution guidelines, you'll be all set to contribute to this project.")
- end
- end
- end
-
- context 'CHANGELOG.md' do
- before do
- file_name = 'CHANGELOG.md'
-
- create_file(file_name, '## Changelog for v1.0.0')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("To find the state of this project's repository at the time of any of these versions, check out the tags.")
- end
- end
- end
-
- context 'Cargo.toml' do
- before do
- file_name = 'Cargo.toml'
-
- create_file(file_name, '
- [package]
- name = "hello_world" # the name of the package
- version = "0.1.0" # the current version, obeying semver
- authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Cargo.")
- end
- end
- end
-
- context 'Cartfile' do
- before do
- file_name = 'Cartfile'
-
- create_file(file_name, '
- gitlab "Alamofire/Alamofire" == 4.9.0
- gitlab "Alamofire/AlamofireImage" ~> 3.4
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Carthage.")
- end
- end
- end
-
- context 'composer.json' do
- before do
- file_name = 'composer.json'
-
- create_file(file_name, '
- {
- "license": "MIT"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Composer.")
- end
- end
- end
-
- context 'Gemfile' do
- before do
- file_name = 'Gemfile'
-
- create_file(file_name, '
- source "https://rubygems.org"
-
- # Gems here
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Bundler.")
- end
- end
- end
-
- context 'Godeps.json' do
- before do
- file_name = 'Godeps.json'
-
- create_file(file_name, '
- {
- "GoVersion": "go1.6"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using godep.")
- end
- end
- end
-
- context 'go.mod' do
- before do
- file_name = 'go.mod'
-
- create_file(file_name, '
- module example.com/mymodule
-
- go 1.14
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Go Modules.")
- end
- end
- end
-
- context 'package.json' do
- before do
- file_name = 'package.json'
-
- create_file(file_name, '
- {
- "name": "my-awesome-package",
- "version": "1.0.0"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using npm.")
- end
- end
- end
-
- context 'podfile' do
- before do
- file_name = 'podfile'
-
- create_file(file_name, 'platform :ios, "8.0"')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
- end
- end
-
- context 'test.podspec' do
- before do
- file_name = 'test.podspec'
-
- create_file(file_name, '
- Pod::Spec.new do |s|
- s.name = "TensorFlowLiteC"
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
- end
- end
-
- context 'JSON.podspec.json' do
- before do
- file_name = 'JSON.podspec.json'
-
- create_file(file_name, '
- {
- "name": "JSON"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
- end
- end
-
- context 'requirements.txt' do
- before do
- file_name = 'requirements.txt'
-
- create_file(file_name, 'Project requirements')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using pip.")
- end
- end
- end
-
- context 'yarn.lock' do
- before do
- file_name = 'yarn.lock'
-
- create_file(file_name, '
- # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
- # yarn lockfile v1
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Yarn.")
- end
- end
- end
-
- context 'openapi.yml' do
- before do
- file_name = 'openapi.yml'
-
- create_file(file_name, '
- swagger: \'2.0\'
- info:
- title: Classic API Resource Documentation
- description: |
- <div class="foo-bar" style="background-color: red;" data-foo-bar="baz">
- <h1>Swagger API documentation</h1>
- </div>
- version: production
- basePath: /JSSResource/
- produces:
- - application/xml
- - application/json
- consumes:
- - application/xml
- - application/json
- security:
- - basicAuth: []
- paths:
- /accounts:
- get:
- responses:
- \'200\':
- description: No response was specified
- tags:
- - accounts
- operationId: findAccounts
- summary: Finds all accounts
- ')
- visit_blob(file_name, useUnsafeMarkdown: '1')
- click_button('Display rendered file')
-
- wait_for_requests
- end
-
- it 'removes `style`, `class`, and `data-*`` attributes from HTML' do
- expect(page).to have_css('h1', text: 'Swagger API documentation')
- expect(page).not_to have_css('.foo-bar')
- expect(page).not_to have_css('[style="background-color: red;"]')
- expect(page).not_to have_css('[data-foo-bar="baz"]')
- end
- end
- end
-
- context 'realtime pipelines' do
- before do
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'feature',
- branch_name: 'feature',
- commit_message: "Add ruby file",
- file_path: 'files/ruby/test.rb',
- file_content: "# Awesome content"
- ).execute
-
- create(:ci_pipeline, status: 'running', project: project, ref: 'feature', sha: project.commit('feature').sha)
- visit_blob('files/ruby/test.rb', ref: 'feature')
- end
-
- it 'shows the realtime pipeline status' do
- page.within('.commit-actions') do
- expect(page).to have_css('.ci-status-icon')
- expect(page).to have_css('.ci-status-icon-running')
- expect(page).to have_css('.js-ci-status-icon-running')
- end
- end
- end
-
- context 'for subgroups' do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
- let(:project) { create(:project, :public, :repository, group: subgroup) }
-
- it 'renders tree table without errors' do
- visit_blob('README.md')
-
- expect(page).to have_selector('.file-content')
- expect(page).not_to have_selector('[data-testid="alert-danger"]')
- end
-
- it 'displays a GPG badge' do
- visit_blob('CONTRIBUTING.md', ref: '33f3729a45c02fc67d00adb1b8bca394b0e761d9')
-
- expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
- expect(page).to have_selector '.gpg-status-box.invalid'
- end
- end
-
- context 'on signed merge commit' do
- it 'displays a GPG badge' do
- visit_blob('conflicting-file.md', ref: '6101e87e575de14b38b4e1ce180519a813671e10')
-
- expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
- expect(page).to have_selector '.gpg-status-box.invalid'
- end
- end
-
- context 'when static objects external storage is enabled' do
- before do
- stub_application_setting(static_objects_external_storage_url: 'https://cdn.gitlab.com')
- end
-
- context 'private project' do
- let_it_be(:project) { create(:project, :repository, :private) }
- let_it_be(:user) { create(:user) }
-
- before do
- project.add_developer(user)
-
- sign_in(user)
- visit_blob('README.md')
- end
-
- it 'shows open raw and download buttons with external storage URL prepended and user token appended to their href' do
- path = project_raw_path(project, 'master/README.md')
- raw_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}"
- download_uri = "https://cdn.gitlab.com#{path}?inline=false&token=#{user.static_object_token}"
-
- aggregate_failures do
- expect(page).to have_link 'Open raw', href: raw_uri
- expect(page).to have_link 'Download', href: download_uri
- end
- end
- end
-
- context 'public project' do
- before do
- visit_blob('README.md')
- end
-
- it 'shows open raw and download buttons with external storage URL prepended to their href' do
- path = project_raw_path(project, 'master/README.md')
- raw_uri = "https://cdn.gitlab.com#{path}"
- download_uri = "https://cdn.gitlab.com#{path}?inline=false"
-
- aggregate_failures do
- expect(page).to have_link 'Open raw', href: raw_uri
- expect(page).to have_link 'Download', href: download_uri
- end
- end
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb
deleted file mode 100644
index f5b9947b29e..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb
+++ /dev/null
@@ -1,213 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Editing file blob', :js do
- include TreeHelper
- include BlobSpecHelpers
-
- let(:project) { create(:project, :public, :repository) }
- let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
- let(:branch) { 'master' }
- let(:file_path) { project.repository.ls_files(project.repository.root_ref)[1] }
- let(:readme_file_path) { 'README.md' }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- context 'as a developer' do
- let(:user) { create(:user) }
- let(:role) { :developer }
-
- before do
- project.add_role(user, role)
- sign_in(user)
- end
-
- def edit_and_commit(commit_changes: true, is_diff: false)
- set_default_button('edit')
- refresh
- wait_for_requests
-
- if is_diff
- first('.js-diff-more-actions').click
- click_link('Edit in single-file editor')
- else
- click_link('Edit')
- end
-
- fill_editor(content: 'class NextFeature\\nend\\n')
-
- if commit_changes
- click_button 'Commit changes'
- end
- end
-
- def fill_editor(content: 'class NextFeature\\nend\\n')
- wait_for_requests
- execute_script("monaco.editor.getModels()[0].setValue('#{content}')")
- end
-
- context 'from MR diff' do
- before do
- visit diffs_project_merge_request_path(project, merge_request)
- edit_and_commit(is_diff: true)
- end
-
- it 'returns me to the mr' do
- expect(page).to have_content(merge_request.title)
- end
- end
-
- it 'updates the content of file with a number as file path' do
- project.repository.create_file(user, '1', 'test', message: 'testing', branch_name: branch)
- visit project_blob_path(project, tree_join(branch, '1'))
-
- edit_and_commit
-
- expect(page).to have_content 'NextFeature'
- end
-
- it 'editing a template file in a sub directory does not change path' do
- project.repository.create_file(user, 'ci/.gitlab-ci.yml', 'test', message: 'testing', branch_name: branch)
- visit project_edit_blob_path(project, tree_join(branch, 'ci/.gitlab-ci.yml'))
-
- expect(find_by_id('file_path').value).to eq('ci/.gitlab-ci.yml')
- end
-
- it 'updating file path updates syntax highlighting' do
- visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
- expect(find('#editor')['data-mode-id']).to eq('markdown')
-
- find('#file_path').send_keys('foo.txt') do
- expect(find('#editor')['data-mode-id']).to eq('plaintext')
- end
- end
-
- context 'from blob file path' do
- before do
- visit project_blob_path(project, tree_join(branch, file_path))
- end
-
- it 'updates content' do
- edit_and_commit
-
- expect(page).to have_content 'successfully committed'
- expect(page).to have_content 'NextFeature'
- end
-
- it 'previews content' do
- edit_and_commit(commit_changes: false)
- click_link 'Preview changes'
- wait_for_requests
-
- old_line_count = page.all('.line_holder.old').size
- new_line_count = page.all('.line_holder.new').size
-
- expect(old_line_count).to be > 0
- expect(new_line_count).to be > 0
- end
- end
-
- context 'when rendering the preview' do
- it 'renders content with CommonMark' do
- visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
- fill_editor(content: '1. one\\n - sublist\\n')
- click_link 'Preview'
- wait_for_requests
-
- # the above generates two separate lists (not embedded) in CommonMark
- expect(page).to have_content('sublist')
- expect(page).not_to have_xpath('//ol//li//ul')
- end
- end
- end
-
- context 'visit blob edit' do
- context 'redirects to sign in and returns' do
- context 'as developer' do
- let(:user) { create(:user) }
-
- before do
- project.add_developer(user)
- visit project_edit_blob_path(project, tree_join(branch, file_path))
- end
-
- it 'redirects to sign in and returns' do
- expect(page).to have_current_path(new_user_session_path)
-
- gitlab_sign_in(user)
-
- expect(page).to have_current_path(project_edit_blob_path(project, tree_join(branch, file_path)))
- end
- end
-
- context 'as guest' do
- let(:user) { create(:user) }
-
- before do
- visit project_edit_blob_path(project, tree_join(branch, file_path))
- end
-
- it 'redirects to sign in and returns' do
- expect(page).to have_current_path(new_user_session_path)
-
- gitlab_sign_in(user)
-
- expect(page).to have_current_path(project_blob_path(project, tree_join(branch, file_path)))
- end
- end
- end
-
- context 'as developer' do
- let(:user) { create(:user) }
- let(:protected_branch) { 'protected-branch' }
-
- before do
- project.add_developer(user)
- project.repository.add_branch(user, protected_branch, 'master')
- create(:protected_branch, project: project, name: protected_branch)
- sign_in(user)
- end
-
- context 'on some branch' do
- before do
- visit project_edit_blob_path(project, tree_join(branch, file_path))
- end
-
- it 'shows blob editor with same branch' do
- expect(page).to have_current_path(project_edit_blob_path(project, tree_join(branch, file_path)))
- expect(find('.js-branch-name').value).to eq(branch)
- end
- end
-
- context 'with protected branch' do
- it 'shows blob editor with patch branch' do
- freeze_time do
- visit project_edit_blob_path(project, tree_join(protected_branch, file_path))
-
- epoch = Time.zone.now.strftime('%s%L').last(5)
-
- expect(find('.js-branch-name').value).to eq "#{user.username}-protected-branch-patch-#{epoch}"
- end
- end
- end
- end
-
- context 'as maintainer' do
- let(:user) { create(:user) }
-
- before do
- project.add_maintainer(user)
- sign_in(user)
- visit project_edit_blob_path(project, tree_join(branch, file_path))
- end
-
- it 'shows blob editor with same branch' do
- expect(page).to have_current_path(project_edit_blob_path(project, tree_join(branch, file_path)))
- expect(find('.js-branch-name').value).to eq(branch)
- end
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb
deleted file mode 100644
index fe0b217992e..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Blob shortcuts', :js do
- include TreeHelper
- let(:project) { create(:project, :public, :repository) }
- let(:path) { project.repository.ls_files(project.repository.root_ref)[0] }
- let(:sha) { project.repository.commit.sha }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- describe 'On a file(blob)', :js do
- def get_absolute_url(path = "")
- "http://#{page.server.host}:#{page.server.port}#{path}"
- end
-
- def visit_blob(fragment = nil)
- visit project_blob_path(project, tree_join('master', path), anchor: fragment)
- end
-
- describe 'pressing "y"' do
- it 'redirects to permalink with commit sha' do
- visit_blob
- wait_for_requests
-
- find('body').native.send_key('y')
-
- expect(page).to have_current_path(get_absolute_url(project_blob_path(project, tree_join(sha, path))), url: true)
- end
-
- it 'maintains fragment hash when redirecting' do
- fragment = "L1"
- visit_blob(fragment)
- wait_for_requests
-
- find('body').native.send_key('y')
-
- expect(page).to have_current_path(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: fragment)), url: true)
- end
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
deleted file mode 100644
index fe38659f60b..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User creates new blob', :js do
- include WebIdeSpecHelpers
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :empty_repo) }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- shared_examples 'creating a file' do
- it 'allows the user to add a new file in Web IDE' do
- visit project_path(project)
-
- click_link 'New file'
-
- wait_for_requests
-
- ide_create_new_file('dummy-file', content: "Hello world\n")
-
- ide_commit
-
- expect(page).to have_content('All changes are committed')
- expect(project.repository.blob_at('master', 'dummy-file').data).to eql("Hello world\n")
- end
- end
-
- describe 'as a maintainer' do
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- it_behaves_like 'creating a file'
- end
-
- describe 'as an admin' do
- let(:user) { create(:user, :admin) }
-
- before do
- sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
- end
-
- it_behaves_like 'creating a file'
- end
-
- describe 'as a developer' do
- before do
- project.add_developer(user)
- sign_in(user)
- visit project_path(project)
- end
-
- it 'does not allow pushing to the default branch' do
- expect(page).not_to have_content('New file')
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
deleted file mode 100644
index 4290df08e66..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled', :js do
- include CookieHelper
-
- let(:project) { create(:project, :empty_repo) }
- let(:user) { project.first_owner }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- describe 'viewing the new blob page' do
- before do
- sign_in(user)
- end
-
- context 'when the page is loaded from the link using the suggest_gitlab_ci_yml param' do
- before do
- visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master', suggest_gitlab_ci_yml: 'true')
- end
-
- it 'pre-fills .gitlab-ci.yml for file name' do
- file_name = page.find_by_id('file_name')
-
- expect(file_name.value).to have_content('.gitlab-ci.yml')
- end
-
- it 'chooses the .gitlab-ci.yml Template Type' do
- template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
-
- expect(template_type.text).to have_content('.gitlab-ci.yml')
- end
-
- it 'displays suggest_gitlab_ci_yml popover' do
- page.find(:css, '.gitlab-ci-yml-selector').click
-
- popover_selector = '.suggest-gitlab-ci-yml'
-
- expect(page).to have_css(popover_selector, visible: true)
-
- page.within(popover_selector) do
- expect(page).to have_content('1/2: Choose a template')
- end
- end
-
- it 'sets the commit cookie when the Commit button is clicked' do
- click_button 'Commit changes'
-
- expect(get_cookie("suggest_gitlab_ci_yml_commit_#{project.id}")).to be_present
- end
- end
-
- context 'when the page is visited without the param' do
- before do
- visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master')
- end
-
- it 'does not pre-fill .gitlab-ci.yml for file name' do
- file_name = page.find_by_id('file_name')
-
- expect(file_name.value).not_to have_content('.gitlab-ci.yml')
- end
-
- it 'does not choose the .gitlab-ci.yml Template Type' do
- template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
-
- expect(template_type.text).to have_content('Select a template type')
- end
-
- it 'does not display suggest_gitlab_ci_yml popover' do
- popover_selector = '.b-popover.suggest-gitlab-ci-yml'
-
- expect(page).not_to have_css(popover_selector, visible: true)
- end
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb
deleted file mode 100644
index a00e1eaa551..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User views pipeline editor button on root ci config file', :js do
- include BlobSpecHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public, :repository) }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- end
-
- context "when the ci config is the root file" do
- before do
- project.add_developer(user)
- sign_in(user)
- end
-
- it 'shows the button to the Pipeline Editor' do
- project.update!(ci_config_path: '.my-config.yml')
- project.repository.create_file(user, project.ci_config_path_or_default, 'test', message: 'testing', branch_name: 'master')
- visit project_blob_path(project, File.join('master', '.my-config.yml'))
-
- expect(page).to have_content('Edit in pipeline editor')
- end
-
- it 'does not shows the Pipeline Editor button' do
- project.repository.create_file(user, '.my-sub-config.yml', 'test', message: 'testing', branch_name: 'master')
- visit project_blob_path(project, File.join('master', '.my-sub-config.yml'))
-
- expect(page).not_to have_content('Edit in pipeline editor')
- end
- end
-
- context "when user cannot collaborate" do
- before do
- sign_in(user)
- end
- it 'does not shows the Pipeline Editor button' do
- visit project_blob_path(project, File.join('master', '.my-config.yml'))
- expect(page).not_to have_content('Edit in pipeline editor')
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb
deleted file mode 100644
index c32fb1aa4d3..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > User wants to edit a file' do
- let(:project) { create(:project, :repository) }
- let(:user) { project.first_owner }
- let(:commit_params) do
- {
- start_branch: project.default_branch,
- branch_name: project.default_branch,
- commit_message: "Committing First Update",
- file_path: ".gitignore",
- file_content: "First Update",
- last_commit_sha: Gitlab::Git::Commit.last_for_path(project.repository, project.default_branch,
- ".gitignore").sha
- }
- end
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- sign_in user
- visit project_edit_blob_path(project,
- File.join(project.default_branch, '.gitignore'))
- end
-
- it 'file has been updated since the user opened the edit page' do
- Files::UpdateService.new(project, user, commit_params).execute
-
- click_button 'Commit changes'
-
- expect(page).to have_content 'Someone edited the file the same time you did.'
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb
deleted file mode 100644
index 9ba5f5a9b57..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > Find file keyboard shortcuts', :js do
- let(:project) { create(:project, :repository) }
- let(:user) { project.first_owner }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- sign_in user
-
- visit project_find_file_path(project, project.repository.root_ref)
-
- wait_for_requests
- end
-
- it 'opens file when pressing enter key' do
- fill_in 'file_find', with: 'CHANGELOG'
-
- find('#file_find').native.send_keys(:enter)
-
- expect(page).to have_selector('.blob-content-holder')
-
- page.within('.js-file-title') do
- expect(page).to have_content('CHANGELOG')
- end
- end
-
- it 'navigates files with arrow keys' do
- fill_in 'file_find', with: 'application.'
-
- find('#file_find').native.send_keys(:down)
- find('#file_find').native.send_keys(:enter)
-
- expect(page).to have_selector('.blob-content-holder')
-
- page.within('.js-file-title') do
- expect(page).to have_content('application.js')
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb
deleted file mode 100644
index ab920504100..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > Project owner creates a license file', :js do
- let(:project) { create(:project, :repository) }
- let(:project_maintainer) { project.first_owner }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- project.repository.delete_file(project_maintainer, 'LICENSE',
- message: 'Remove LICENSE', branch_name: 'master')
- sign_in(project_maintainer)
- visit project_path(project)
- end
-
- it 'project maintainer creates a license file manually from a template' do
- visit project_tree_path(project, project.repository.root_ref)
- find('.add-to-tree').click
- click_link 'New file'
-
- fill_in :file_name, with: 'LICENSE'
-
- expect(page).to have_selector('.license-selector')
-
- select_template('MIT License')
-
- file_content = first('.file-editor')
- expect(file_content).to have_content('MIT License')
- expect(file_content).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
-
- fill_in :commit_message, with: 'Add a LICENSE file', visible: true
- click_button 'Commit changes'
-
- expect(page).to have_current_path(
- project_blob_path(project, 'master/LICENSE'), ignore_query: true)
- expect(page).to have_content('MIT License')
- expect(page).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
- end
-
- it 'project maintainer creates a license file from the "Add license" link' do
- click_link 'Add LICENSE'
-
- expect(page).to have_content('New file')
- expect(page).to have_current_path(
- project_new_blob_path(project, 'master'), ignore_query: true)
- expect(find('#file_name').value).to eq('LICENSE')
- expect(page).to have_selector('.license-selector')
-
- select_template('MIT License')
-
- file_content = first('.file-editor')
- expect(file_content).to have_content('MIT License')
- expect(file_content).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
-
- fill_in :commit_message, with: 'Add a LICENSE file', visible: true
- click_button 'Commit changes'
-
- expect(page).to have_current_path(
- project_blob_path(project, 'master/LICENSE'), ignore_query: true)
- expect(page).to have_content('MIT License')
- expect(page).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
- end
-
- def select_template(template)
- page.within('.js-license-selector-wrap') do
- click_button 'Apply a template'
- click_link template
- wait_for_requests
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb
deleted file mode 100644
index 5abdad905fd..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb
+++ /dev/null
@@ -1,377 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe "User browses files", :js do
- include RepoHelpers
-
- let(:fork_message) do
- "You're not allowed to make changes to this project directly. "\
- "A fork of this project has been created that you can make changes in, so you can submit a merge request."
- end
-
- let(:project) { create(:project, :repository, name: "Shop") }
- let(:project2) { create(:project, :repository, name: "Another Project", path: "another-project") }
- let(:tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
- let(:user) { project.first_owner }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- sign_in(user)
- end
-
- it "shows last commit for current directory", :js do
- visit(tree_path_root_ref)
-
- click_link("files")
-
- last_commit = project.repository.last_commit_for_path(project.default_branch, "files")
-
- page.within(".commit-detail") do
- expect(page).to have_content(last_commit.short_id).and have_content(last_commit.author_name)
- end
- end
-
- context "when browsing the master branch", :js do
- before do
- visit(tree_path_root_ref)
- end
-
- it "shows files from a repository" do
- expect(page).to have_content("VERSION")
- .and have_content(".gitignore")
- .and have_content("LICENSE")
- end
-
- it "shows the `Browse Directory` link" do
- click_link("files")
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('files')
- end
-
- click_link("History")
-
- expect(page).to have_link("Browse Directory").and have_no_link("Browse Code")
- end
-
- it "shows the `Browse File` link" do
- page.within(".tree-table") do
- click_link("README.md")
- end
-
- click_link("History")
-
- expect(page).to have_link("Browse File").and have_no_link("Browse Files")
- end
-
- it "shows the `Browse Files` link" do
- click_link("History")
-
- expect(page).to have_link("Browse Files").and have_no_link("Browse Directory")
- end
-
- it "redirects to the permalink URL" do
- click_link(".gitignore")
- click_link("Permalink")
-
- permalink_path = project_blob_path(project, "#{project.repository.commit.sha}/.gitignore")
-
- expect(page).to have_current_path(permalink_path, ignore_query: true)
- end
- end
-
- context "when browsing the `markdown` branch", :js do
- context "when browsing the root" do
- before do
- visit(project_tree_path(project, "markdown"))
- end
-
- it "shows correct files and links" do
- expect(page).to have_current_path(project_tree_path(project, "markdown"), ignore_query: true)
- expect(page).to have_content("README.md")
- .and have_content("CHANGELOG")
- .and have_content("Welcome to GitLab GitLab is a free project and repository management application")
- .and have_link("GitLab API doc")
- .and have_link("GitLab API website")
- .and have_link("Rake tasks")
- .and have_link("backup and restore procedure")
- .and have_link("GitLab API doc directory")
- .and have_link("Maintenance")
- .and have_header_with_correct_id_and_link(2, "Application details", "application-details")
- .and have_link("empty", href: "")
- .and have_link("#id", href: "#id")
- .and have_link("/#id", href: project_blob_path(project, "markdown/README.md", anchor: "id"))
- .and have_link("README.md#id", href: project_blob_path(project, "markdown/README.md", anchor: "id"))
- .and have_link("d/README.md#id", href: project_blob_path(project, "markdown/db/README.md", anchor: "id"))
- end
-
- it "shows correct content of file" do
- click_link("GitLab API doc")
-
- expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/README.md"), ignore_query: true)
- expect(page).to have_content("All API requests require authentication")
- .and have_content("Contents")
- .and have_link("Users")
- .and have_link("Rake tasks")
- .and have_header_with_correct_id_and_link(1, "GitLab API", "gitlab-api")
-
- click_link("Users")
-
- expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/users.md"), ignore_query: true)
- expect(page).to have_content("Get a list of users.")
-
- page.go_back
-
- click_link("Rake tasks")
-
- expect(page).to have_current_path(project_tree_path(project, "markdown/doc/raketasks"), ignore_query: true)
- expect(page).to have_content("backup_restore.md").and have_content("maintenance.md")
-
- click_link("maintenance.md")
-
- expect(page).to have_current_path(project_blob_path(project, "markdown/doc/raketasks/maintenance.md"), ignore_query: true)
- expect(page).to have_content("bundle exec rake gitlab:env:info RAILS_ENV=production")
-
- click_link("shop")
-
- page.within(".tree-table") do
- click_link("README.md")
- end
-
- page.go_back
-
- page.within(".tree-table") do
- click_link("d")
- end
-
- expect(page).to have_link("..", href: project_tree_path(project, "markdown/"))
-
- page.within(".tree-table") do
- click_link("README.md")
- end
-
- expect(page).to have_link("empty", href: "")
- end
-
- it "shows correct content of directory" do
- click_link("GitLab API doc directory")
-
- expect(page).to have_current_path(project_tree_path(project, "markdown/doc/api"), ignore_query: true)
- expect(page).to have_content("README.md").and have_content("users.md")
-
- click_link("Users")
-
- expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/users.md"), ignore_query: true)
- expect(page).to have_content("List users").and have_content("Get a list of users.")
- end
- end
- end
-
- context 'when commit message has markdown', :js do
- before do
- project.repository.create_file(user, 'index', 'test', message: ':star: testing', branch_name: 'master')
-
- visit(project_tree_path(project, "master"))
- end
-
- it 'renders emojis' do
- expect(page).to have_selector('gl-emoji', count: 2)
- end
- end
-
- context "when browsing a `improve/awesome` branch", :js do
- before do
- visit(project_tree_path(project, "improve/awesome"))
- end
-
- it "shows files from a repository" do
- expect(page).to have_content("VERSION")
- .and have_content(".gitignore")
- .and have_content("LICENSE")
-
- click_link("files")
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('files')
- end
-
- click_link("html")
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('html')
- end
-
- expect(page).to have_link('500.html')
- end
- end
-
- context "when browsing a `Ääh-test-utf-8` branch", :js do
- before do
- project.repository.create_branch('Ääh-test-utf-8', project.repository.root_ref)
- visit(project_tree_path(project, "Ääh-test-utf-8"))
- end
-
- it "shows files from a repository" do
- expect(page).to have_content("VERSION")
- .and have_content(".gitignore")
- .and have_content("LICENSE")
-
- click_link("files")
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('files')
- end
-
- click_link("html")
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('html')
- end
-
- expect(page).to have_link('500.html')
- end
- end
-
- context "when browsing a `test-#` branch", :js do
- before do
- project.repository.create_branch('test-#', project.repository.root_ref)
- visit(project_tree_path(project, "test-#"))
- end
-
- it "shows files from a repository" do
- expect(page).to have_content("VERSION")
- .and have_content(".gitignore")
- .and have_content("LICENSE")
-
- click_link("files")
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('files')
- end
-
- click_link("html")
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('html')
- end
-
- expect(page).to have_link('500.html')
- end
- end
-
- context "when browsing a specific ref", :js do
- let(:ref) { project_tree_path(project, "6d39438") }
-
- before do
- visit(ref)
- end
-
- it "shows files from a repository for `6d39438`" do
- expect(page).to have_current_path(ref, ignore_query: true)
- expect(page).to have_content(".gitignore").and have_content("LICENSE")
- end
-
- it "shows files from a repository with apostroph in its name" do
- first(".js-project-refs-dropdown").click
-
- page.within(".project-refs-form") do
- click_link("'test'")
- end
-
- expect(page).to have_selector(".dropdown-toggle-text", text: "'test'")
-
- visit(project_tree_path(project, "'test'"))
-
- expect(page).not_to have_selector(".tree-commit .animation-container")
- end
-
- it "shows the code with a leading dot in the directory" do
- first(".js-project-refs-dropdown").click
-
- page.within(".project-refs-form") do
- click_link("fix")
- end
-
- visit(project_tree_path(project, "fix/.testdir"))
-
- expect(page).not_to have_selector(".tree-commit .animation-container")
- end
-
- it "does not show the permalink link" do
- click_link(".gitignore")
-
- expect(page).not_to have_link("permalink")
- end
- end
-
- context "when browsing a file content", :js do
- before do
- visit(tree_path_root_ref)
- wait_for_requests
-
- click_link(".gitignore")
- end
-
- it "shows a file content" do
- expect(page).to have_content("*.rbc")
- end
-
- it "is possible to blame" do
- click_link("Blame")
-
- expect(page).to have_content("*.rb")
- .and have_content("Dmitriy Zaporozhets")
- .and have_content("Initial commit")
- .and have_content("Ignore DS files")
-
- previous_commit_anchor = "//a[@title='Ignore DS files']/parent::span/following-sibling::span/a"
- find(:xpath, previous_commit_anchor).click
-
- expect(page).to have_content("*.rb")
- .and have_content("Dmitriy Zaporozhets")
- .and have_content("Initial commit")
-
- expect(page).not_to have_content("Ignore DS files")
- end
- end
-
- context "when browsing a file with pathspec characters" do
- let(:filename) { ':wq' }
- let(:newrev) { project.repository.commit('master').sha }
-
- before do
- create_file_in_repo(project, 'master', 'master', filename, 'Test file')
- path = File.join('master', filename)
-
- visit(project_blob_path(project, path))
- wait_for_requests
- end
-
- it "shows raw file content in a new tab" do
- new_tab = window_opened_by {click_link 'Open raw'}
-
- within_window new_tab do
- expect(page).to have_content("Test file")
- end
- end
- end
-
- context "when browsing a raw file" do
- before do
- visit(tree_path_root_ref)
- wait_for_requests
-
- click_link(".gitignore")
- wait_for_requests
- end
-
- it "shows raw file content in a new tab" do
- new_tab = window_opened_by {click_link 'Open raw'}
-
- within_window new_tab do
- expect(page).to have_content("*.rbc")
- end
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb
deleted file mode 100644
index 2d9b6b3a903..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb
+++ /dev/null
@@ -1,86 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > User browses LFS files' do
- let(:project) { create(:project, :repository) }
- let(:user) { project.first_owner }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- sign_in(user)
- end
-
- context 'when LFS is disabled', :js do
- before do
- allow_next_found_instance_of(Project) do |project|
- allow(project).to receive(:lfs_enabled?).and_return(false)
- end
-
- visit project_tree_path(project, 'lfs')
- wait_for_requests
- end
-
- it 'is possible to see raw content of LFS pointer' do
- click_link 'files'
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('files')
- end
-
- click_link 'lfs'
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('lfs')
- end
-
- click_link 'lfs_object.iso'
-
- expect(page).to have_content 'version https://git-lfs.github.com/spec/v1'
- expect(page).to have_content 'oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897'
- expect(page).to have_content 'size 1575078'
- expect(page).not_to have_content 'Download (1.5 MB)'
- end
- end
-
- context 'when LFS is enabled', :js do
- before do
- allow_next_found_instance_of(Project) do |project|
- allow(project).to receive(:lfs_enabled?).and_return(true)
- end
-
- visit project_tree_path(project, 'lfs')
- wait_for_requests
- end
-
- it 'shows an LFS object' do
- click_link('files')
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_link('files')
- end
-
- click_link('lfs')
- click_link('lfs_object.iso')
-
- expect(page).to have_content('Download (1.5 MB)')
- expect(page).not_to have_content('version https://git-lfs.github.com/spec/v1')
- expect(page).not_to have_content('oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897')
- expect(page).not_to have_content('size 1575078')
-
- page.within('.content') do
- expect(page).to have_content('Delete')
- expect(page).to have_content('History')
- expect(page).to have_content('Permalink')
- expect(page).to have_content('Replace')
- expect(page).to have_link('Download')
-
- expect(page).not_to have_content('Annotate')
- expect(page).not_to have_content('Blame')
-
- expect(page).not_to have_selector(:link_or_button, text: /^Edit$/)
- expect(page).to have_selector(:link_or_button, 'Open in Web IDE')
- end
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb
deleted file mode 100644
index d503c9b1192..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > User deletes files', :js do
- let(:fork_message) do
- "You're not allowed to make changes to this project directly. "\
- "A fork of this project has been created that you can make changes in, so you can submit a merge request."
- end
-
- let(:project) { create(:project, :repository, name: 'Shop') }
- let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
- let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
- let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
- let(:user) { create(:user) }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- sign_in(user)
- end
-
- context 'when an user has write access' do
- before do
- project.add_maintainer(user)
- visit(project_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'deletes the file', :js do
- click_link('.gitignore')
-
- expect(page).to have_content('.gitignore')
-
- click_on('Delete')
- fill_in(:commit_message, with: 'New commit message', visible: true)
- click_button('Delete file')
-
- expect(page).to have_current_path(project_tree_path(project, 'master/'), ignore_query: true)
- expect(page).not_to have_content('.gitignore')
- end
- end
-
- context 'when an user does not have write access', :js do
- before do
- project2.add_reporter(user)
- visit(project2_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'deletes the file in a forked project', :js, :sidekiq_might_not_need_inline do
- click_link('.gitignore')
-
- expect(page).to have_content('.gitignore')
-
- click_on('Delete')
-
- expect(page).to have_link('Fork')
- expect(page).to have_button('Cancel')
-
- click_link('Fork')
-
- expect(page).to have_content(fork_message)
-
- click_on('Delete')
- fill_in(:commit_message, with: 'New commit message', visible: true)
- click_button('Delete file')
-
- fork = user.fork_of(project2.reload)
-
- expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
- expect(page).to have_content('New commit message')
- end
- end
-end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb
deleted file mode 100644
index 7a70d67d8ca..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb
+++ /dev/null
@@ -1,226 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > User edits files', :js do
- include ProjectForksHelper
- include BlobSpecHelpers
-
- let(:project) { create(:project, :repository, name: 'Shop') }
- let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
- let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
- let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
- let(:user) { create(:user) }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- sign_in(user)
- end
-
- after do
- unset_default_button
- end
-
- shared_examples 'unavailable for an archived project' do
- it 'does not show the edit link for an archived project', :js do
- project.update!(archived: true)
- visit project_tree_path(project, project.repository.root_ref)
-
- click_link('.gitignore')
-
- aggregate_failures 'available edit buttons' do
- expect(page).not_to have_text('Edit')
- expect(page).not_to have_text('Web IDE')
-
- expect(page).not_to have_text('Replace')
- expect(page).not_to have_text('Delete')
- end
- end
- end
-
- context 'when an user has write access', :js do
- before do
- project.add_maintainer(user)
- visit(project_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'inserts a content of a file' do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
-
- expect(editor_value).to eq('*.rbca')
- end
-
- it 'does not show the edit link if a file is binary' do
- binary_file = File.join(project.repository.root_ref, 'files/images/logo-black.png')
- visit(project_blob_path(project, binary_file))
- wait_for_requests
-
- page.within '.content' do
- expect(page).not_to have_link('edit')
- end
- end
-
- it 'commits an edited file' do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
- fill_in(:commit_message, with: 'New commit message', visible: true)
- click_button('Commit changes')
-
- expect(page).to have_current_path(project_blob_path(project, 'master/.gitignore'), ignore_query: true)
-
- wait_for_requests
-
- expect(page).to have_content('*.rbca')
- end
-
- it 'commits an edited file to a new branch' do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
-
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
- fill_in(:commit_message, with: 'New commit message', visible: true)
- fill_in(:branch_name, with: 'new_branch_name', visible: true)
- click_button('Commit changes')
-
- expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
-
- click_link('Changes')
-
- expect(page).to have_content('*.rbca')
- end
-
- it 'shows the diff of an edited file' do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
- click_link('Preview changes')
-
- expect(page).to have_css('.line_holder.new')
- end
-
- it_behaves_like 'unavailable for an archived project'
- end
-
- context 'when an user does not have write access', :js do
- before do
- project2.add_reporter(user)
- visit(project2_tree_path_root_ref)
- wait_for_requests
- end
-
- def expect_fork_prompt
- expect(page).to have_selector(:link_or_button, 'Fork')
- expect(page).to have_selector(:link_or_button, 'Cancel')
- expect(page).to have_content(
- "You can’t edit files directly in this project. "\
- "Fork this project and submit a merge request with your changes."
- )
- end
-
- def expect_fork_status
- expect(page).to have_content(
- "You're not allowed to make changes to this project directly. "\
- "A fork of this project has been created that you can make changes in, so you can submit a merge request."
- )
- end
-
- it 'inserts a content of a file in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
-
- expect_fork_prompt
-
- click_link_or_button('Fork project')
-
- expect_fork_status
-
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
-
- expect(editor_value).to eq('*.rbca')
- end
-
- it 'commits an edited file in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
-
- expect_fork_prompt
- click_link_or_button('Fork project')
-
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
- fill_in(:commit_message, with: 'New commit message', visible: true)
- click_button('Commit changes')
-
- fork = user.fork_of(project2.reload)
-
- expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
-
- wait_for_requests
-
- expect(page).to have_content('New commit message')
- end
-
- context 'when the user already had a fork of the project', :js do
- let!(:forked_project) { fork_project(project2, user, namespace: user.namespace, repository: true) }
-
- before do
- visit(project2_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'links to the forked project for editing', :sidekiq_might_not_need_inline do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
-
- expect(page).not_to have_link('Fork project')
-
- find('#editor')
- set_editor_value('*.rbca')
- fill_in(:commit_message, with: 'Another commit', visible: true)
- click_button('Commit changes')
-
- fork = user.fork_of(project2)
-
- expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
-
- wait_for_requests
-
- expect(page).to have_content('Another commit')
- expect(page).to have_content("From #{forked_project.full_path}")
- expect(page).to have_content("into #{project2.full_path}")
- end
-
- it_behaves_like 'unavailable for an archived project' do
- let(:project) { project2 }
- end
- end
- end
-end
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 7350a54e8df..1523586ab26 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -153,6 +153,7 @@ RSpec.describe 'User uses header search field', :js do
it 'displays search options' do
fill_in_search('test')
+
expect(page).to have_selector(scoped_search_link('test', search_code: true))
expect(page).to have_selector(scoped_search_link('test', group_id: group.id, search_code: true))
expect(page).to have_selector(scoped_search_link('test', project_id: project.id, group_id: group.id, search_code: true))
@@ -167,6 +168,7 @@ RSpec.describe 'User uses header search field', :js do
it 'displays search options' do
fill_in_search('test')
+ sleep 0.5
expect(page).to have_selector(scoped_search_link('test', search_code: true, repository_ref: 'master'))
expect(page).not_to have_selector(scoped_search_link('test', search_code: true, group_id: project.namespace_id, repository_ref: 'master'))
expect(page).to have_selector(scoped_search_link('test', search_code: true, project_id: project.id, repository_ref: 'master'))
@@ -184,7 +186,7 @@ RSpec.describe 'User uses header search field', :js do
fill_in_search('Feature')
within(dashboard_search_options_popup_menu) do
- expect(page).to have_text('"Feature" in all GitLab')
+ expect(page).to have_text('Feature in all GitLab')
expect(page).to have_no_text('Feature Flags')
end
end
diff --git a/spec/features/tags/developer_updates_tag_spec.rb b/spec/features/tags/developer_updates_tag_spec.rb
index b2fc28b8493..531ed91c057 100644
--- a/spec/features/tags/developer_updates_tag_spec.rb
+++ b/spec/features/tags/developer_updates_tag_spec.rb
@@ -2,6 +2,7 @@
require 'spec_helper'
+# TODO: remove this file together with FF https://gitlab.com/gitlab-org/gitlab/-/issues/366244
RSpec.describe 'Developer updates tag' do
let(:user) { create(:user) }
let(:group) { create(:group) }
@@ -10,6 +11,7 @@ RSpec.describe 'Developer updates tag' do
before do
project.add_developer(user)
sign_in(user)
+ stub_feature_flags(edit_tag_release_notes_via_release_page: false)
visit project_tags_path(project)
end
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index 3fe276ce162..5317f586390 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Unsubscribe links', :sidekiq_might_not_need_inline do
let(:mail) { ActionMailer::Base.deliveries.last }
let(:body) { Capybara::Node::Simple.new(mail.default_part_body.to_s) }
let(:header_link) { mail.header['List-Unsubscribe'].to_s[1..-2] } # Strip angle brackets
- let(:body_link) { body.find_link('unsubscribe')['href'] }
+ let(:body_link) { body.find_link('Unsubscribe')['href'] }
before do
perform_enqueued_jobs { issue }
diff --git a/spec/features/users/email_verification_on_login_spec.rb b/spec/features/users/email_verification_on_login_spec.rb
new file mode 100644
index 00000000000..0833f7f6f8e
--- /dev/null
+++ b/spec/features/users/email_verification_on_login_spec.rb
@@ -0,0 +1,357 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting do
+ include EmailHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ let(:require_email_verification_enabled) { user }
+
+ before do
+ stub_feature_flags(require_email_verification: require_email_verification_enabled)
+ end
+
+ shared_examples 'email verification required' do
+ before do
+ allow(Gitlab::AppLogger).to receive(:info)
+ end
+
+ it 'requires email verification before being able to access GitLab' do
+ perform_enqueued_jobs do
+ # When logging in
+ gitlab_sign_in(user)
+ expect_log_message(message: "Account Locked: username=#{user.username}")
+ expect_log_message('Instructions Sent')
+
+ # Expect the user to be locked and the unlock_token to be set
+ user.reload
+ expect(user.locked_at).not_to be_nil
+ expect(user.unlock_token).not_to be_nil
+
+ # Expect to see the verification form on the login page
+ expect(page).to have_current_path(new_user_session_path)
+ expect(page).to have_content('Help us protect your account')
+
+ # Expect an instructions email to be sent with a code
+ code = expect_instructions_email_and_extract_code
+
+ # Signing in again prompts for the code and doesn't send a new one
+ gitlab_sign_in(user)
+ expect(page).to have_current_path(new_user_session_path)
+ expect(page).to have_content('Help us protect your account')
+
+ # Verify the code
+ verify_code(code)
+ expect_log_message('Successful')
+ expect_log_message(message: "Successful Login: username=#{user.username} "\
+ "ip=127.0.0.1 method=standard admin=false")
+
+ # Expect the user to be unlocked
+ expect_user_to_be_unlocked
+
+ # Expect a confirmation page with a meta refresh tag for 3 seconds to the root
+ expect(page).to have_current_path(users_successful_verification_path)
+ expect(page).to have_content('Verification successful')
+ expect(page).to have_selector("meta[http-equiv='refresh'][content='3; url=#{root_path}']", visible: false)
+ end
+ end
+
+ describe 'resending a new code' do
+ it 'resends a new code' do
+ perform_enqueued_jobs do
+ # When logging in
+ gitlab_sign_in(user)
+
+ # Expect an instructions email to be sent with a code
+ code = expect_instructions_email_and_extract_code
+
+ # Request a new code
+ click_link 'Resend code'
+ expect_log_message('Instructions Sent', 2)
+ new_code = expect_instructions_email_and_extract_code
+
+ # Verify the old code is different from the new code
+ expect(code).not_to eq(new_code)
+ end
+ end
+
+ it 'rate limits resends' do
+ # When logging in
+ gitlab_sign_in(user)
+
+ # It shows a resend button
+ expect(page).to have_link 'Resend code'
+
+ # Resend more than the rate limited amount of times
+ 10.times do
+ click_link 'Resend code'
+ end
+
+ # Expect the link to be gone
+ expect(page).not_to have_link 'Resend code'
+
+ # Wait for 1 hour
+ travel 1.hour
+
+ # Now it's visible again
+ gitlab_sign_in(user)
+ expect(page).to have_link 'Resend code'
+ end
+ end
+
+ describe 'verification errors' do
+ it 'rate limits verifications' do
+ perform_enqueued_jobs do
+ # When logging in
+ gitlab_sign_in(user)
+
+ # Expect an instructions email to be sent with a code
+ code = expect_instructions_email_and_extract_code
+
+ # Verify an invalid token more than the rate limited amount of times
+ 11.times do
+ verify_code('123456')
+ end
+
+ # Expect an error message
+ expect_log_message('Failed Attempt', reason: 'rate_limited')
+ expect(page).to have_content("You've reached the maximum amount of tries. "\
+ 'Wait 10 minutes or resend a new code and try again.')
+
+ # Wait for 10 minutes
+ travel 10.minutes
+
+ # Now it works again
+ verify_code(code)
+ expect_log_message('Successful')
+ end
+ end
+
+ it 'verifies invalid codes' do
+ # When logging in
+ gitlab_sign_in(user)
+
+ # Verify an invalid code
+ verify_code('123456')
+
+ # Expect an error message
+ expect_log_message('Failed Attempt', reason: 'invalid')
+ expect(page).to have_content('The code is incorrect. Enter it again, or resend a new code.')
+ end
+
+ it 'verifies expired codes' do
+ perform_enqueued_jobs do
+ # When logging in
+ gitlab_sign_in(user)
+
+ # Expect an instructions email to be sent with a code
+ code = expect_instructions_email_and_extract_code
+
+ # Wait for the code to expire before verifying
+ travel VerifiesWithEmail::TOKEN_VALID_FOR_MINUTES.minutes + 1.second
+ verify_code(code)
+
+ # Expect an error message
+ expect_log_message('Failed Attempt', reason: 'expired')
+ expect(page).to have_content('The code has expired. Resend a new code and try again.')
+ end
+ end
+ end
+ end
+
+ shared_examples 'no email verification required' do |**login_args|
+ it 'does not lock the user and redirects to the root page after logging in' do
+ gitlab_sign_in(user, **login_args)
+
+ expect_user_to_be_unlocked
+
+ expect(page).to have_current_path(root_path)
+ end
+ end
+
+ shared_examples 'no email verification required when 2fa enabled or ff disabled' do
+ context 'when 2FA is enabled' do
+ let_it_be(:user) { create(:user, :two_factor) }
+
+ it_behaves_like 'no email verification required', two_factor_auth: true
+ end
+
+ context 'when the feature flag is disabled' do
+ let(:require_email_verification_enabled) { false }
+
+ it_behaves_like 'no email verification required'
+ end
+ end
+
+ describe 'when failing to login the maximum allowed number of times' do
+ before do
+ # See comment in RequireEmailVerification::MAXIMUM_ATTEMPTS on why this is divided by 2
+ (RequireEmailVerification::MAXIMUM_ATTEMPTS / 2).times do
+ gitlab_sign_in(user, password: 'wrong_password')
+ end
+ end
+
+ it 'locks the user, but does not set the unlock token', :aggregate_failures do
+ user.reload
+ expect(user.locked_at).not_to be_nil
+ expect(user.unlock_token).to be_nil # The unlock token is only set after logging in with valid credentials
+ expect(user.failed_attempts).to eq(RequireEmailVerification::MAXIMUM_ATTEMPTS)
+ end
+
+ it_behaves_like 'email verification required'
+ it_behaves_like 'no email verification required when 2fa enabled or ff disabled'
+
+ describe 'when waiting for the auto unlock time' do
+ before do
+ travel User::UNLOCK_IN + 1.second
+ end
+
+ it_behaves_like 'no email verification required'
+ end
+ end
+
+ describe 'when no previous authentication event exists' do
+ it_behaves_like 'no email verification required'
+ end
+
+ describe 'when a previous authentication event exists for another ip address' do
+ before do
+ create(:authentication_event, :successful, user: user, ip_address: '1.2.3.4')
+ end
+
+ it_behaves_like 'email verification required'
+ it_behaves_like 'no email verification required when 2fa enabled or ff disabled'
+ end
+
+ describe 'when a previous authentication event exists for the same ip address' do
+ before do
+ create(:authentication_event, :successful, user: user)
+ end
+
+ it_behaves_like 'no email verification required'
+ end
+
+ describe 'rate limiting password guessing' do
+ before do
+ 5.times { gitlab_sign_in(user, password: 'wrong_password') }
+ gitlab_sign_in(user)
+ end
+
+ it 'shows an error message on on the login page' do
+ expect(page).to have_current_path(new_user_session_path)
+ expect(page).to have_content('Maximum login attempts exceeded. Wait 10 minutes and try again.')
+ end
+ end
+
+ describe 'inconsistent states' do
+ context 'when the feature flag is toggled off after being prompted for a verification token' do
+ before do
+ create(:authentication_event, :successful, user: user, ip_address: '1.2.3.4')
+ end
+
+ it 'still accepts the token' do
+ perform_enqueued_jobs do
+ # The user is prompted for a verification code
+ gitlab_sign_in(user)
+ expect(page).to have_content('Help us protect your account')
+ code = expect_instructions_email_and_extract_code
+
+ # We toggle the feature flag off
+ stub_feature_flags(require_email_verification: false)
+
+ # Resending and veryfying the code work as expected
+ click_link 'Resend code'
+ new_code = expect_instructions_email_and_extract_code
+
+ verify_code(code)
+ expect(page).to have_content('The code is incorrect. Enter it again, or resend a new code.')
+
+ travel VerifiesWithEmail::TOKEN_VALID_FOR_MINUTES.minutes + 1.second
+
+ verify_code(new_code)
+ expect(page).to have_content('The code has expired. Resend a new code and try again.')
+
+ click_link 'Resend code'
+ another_code = expect_instructions_email_and_extract_code
+
+ verify_code(another_code)
+ expect_user_to_be_unlocked
+ expect(page).to have_current_path(users_successful_verification_path)
+ end
+ end
+ end
+
+ context 'when the feature flag is toggled on after Devise sent unlock instructions' do
+ let(:require_email_verification_enabled) { false }
+
+ before do
+ perform_enqueued_jobs do
+ (User.maximum_attempts / 2).times do
+ gitlab_sign_in(user, password: 'wrong_password')
+ end
+ end
+ end
+
+ it 'the unlock link still works' do
+ # The user is locked and unlock instructions are sent
+ expect(page).to have_content('Invalid login or password.')
+ user.reload
+ expect(user.locked_at).not_to be_nil
+ expect(user.unlock_token).not_to be_nil
+ mail = find_email_for(user)
+
+ expect(mail.to).to match_array([user.email])
+ expect(mail.subject).to eq('Unlock instructions')
+ unlock_url = mail.body.parts.first.to_s[/http.*/]
+
+ # We toggle the feature flag on
+ stub_feature_flags(require_email_verification: true)
+
+ # Unlocking works as expected
+ visit unlock_url
+ expect_user_to_be_unlocked
+ expect(page).to have_current_path(new_user_session_path)
+ expect(page).to have_content('Your account has been unlocked successfully')
+
+ gitlab_sign_in(user)
+ expect(page).to have_current_path(root_path)
+ end
+ end
+ end
+
+ def expect_user_to_be_unlocked
+ user.reload
+
+ aggregate_failures do
+ expect(user.locked_at).to be_nil
+ expect(user.unlock_token).to be_nil
+ expect(user.failed_attempts).to eq(0)
+ end
+ end
+
+ def expect_instructions_email_and_extract_code
+ mail = find_email_for(user)
+ expect(mail.to).to match_array([user.email])
+ expect(mail.subject).to eq('Verify your identity')
+ code = mail.body.parts.first.to_s[/\d{#{VerifiesWithEmail::TOKEN_LENGTH}}/]
+ reset_delivered_emails!
+ code
+ end
+
+ def verify_code(code)
+ fill_in 'Verification code', with: code
+ click_button 'Verify code'
+ end
+
+ def expect_log_message(event = nil, times = 1, reason: '', message: nil)
+ expect(Gitlab::AppLogger).to have_received(:info)
+ .exactly(times).times
+ .with(message || hash_including(message: 'Email Verification',
+ event: event,
+ username: user.username,
+ ip: '127.0.0.1',
+ reason: reason))
+ end
+end
diff --git a/spec/features/users/google_analytics_csp_spec.rb b/spec/features/users/google_analytics_csp_spec.rb
new file mode 100644
index 00000000000..46a9b3be22f
--- /dev/null
+++ b/spec/features/users/google_analytics_csp_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Google Analytics 4 content security policy' do
+ it 'includes the GA4 content security policy headers' do
+ visit root_path
+
+ expect(response_headers['Content-Security-Policy']).to include(
+ '*.googletagmanager.com',
+ '*.google-analytics.com',
+ '*.analytics.google.com'
+ )
+ end
+end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index efb7c98d63a..3ba3650b608 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -579,9 +579,9 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
context 'group setting' do
before do
group1 = create :group, name: 'Group 1', require_two_factor_authentication: true
- group1.add_user(user, GroupMember::DEVELOPER)
+ group1.add_member(user, GroupMember::DEVELOPER)
group2 = create :group, name: 'Group 2', require_two_factor_authentication: true
- group2.add_user(user, GroupMember::DEVELOPER)
+ group2.add_member(user, GroupMember::DEVELOPER)
end
context 'with grace period defined' do
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index cb395846b96..2a444dad486 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -9,6 +9,12 @@ RSpec.describe 'User page' do
subject(:visit_profile) { visit(user_path(user)) }
+ it 'shows user id' do
+ subject
+
+ expect(page).to have_content("User ID: #{user.id}")
+ end
+
context 'with public profile' do
it 'shows all the tabs' do
subject
diff --git a/spec/finders/autocomplete/users_finder_spec.rb b/spec/finders/autocomplete/users_finder_spec.rb
index 9b3421d1b4f..de031041e18 100644
--- a/spec/finders/autocomplete/users_finder_spec.rb
+++ b/spec/finders/autocomplete/users_finder_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Autocomplete::UsersFinder do
let_it_be(:group) { create(:group, :public) }
before_all do
- group.add_users([user1], GroupMember::DEVELOPER)
+ group.add_members([user1], GroupMember::DEVELOPER)
end
it { is_expected.to match_array([user1]) }
diff --git a/spec/finders/branches_finder_spec.rb b/spec/finders/branches_finder_spec.rb
index 11b7ab08fb2..9314f616c44 100644
--- a/spec/finders/branches_finder_spec.rb
+++ b/spec/finders/branches_finder_spec.rb
@@ -181,17 +181,17 @@ RSpec.describe BranchesFinder do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(feature_conflict fix))
+ expect(result.map(&:name)).to eq(%w(feature_conflict few-commits))
end
end
context 'by next page_token and per_page' do
- let(:params) { { page_token: 'fix', per_page: 2 } }
+ let(:params) { { page_token: 'few-commits', per_page: 2 } }
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(flatten-dir gitattributes))
+ expect(result.map(&:name)).to eq(%w(fix flatten-dir))
end
end
@@ -254,7 +254,7 @@ RSpec.describe BranchesFinder do
it 'falls back to default execute and ignore paginations' do
result = subject
- expect(result.map(&:name)).to eq(%w(feature feature_conflict fix flatten-dir))
+ expect(result.map(&:name)).to eq(%w(feature feature_conflict few-commits fix flatten-dir))
end
end
end
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index e7ec4f01995..aeab5a51766 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -49,6 +49,67 @@ RSpec.describe Ci::RunnersFinder do
end
end
+ context 'by upgrade status' do
+ let(:upgrade_status) {}
+
+ let_it_be(:runner1) { create(:ci_runner, version: 'a') }
+ let_it_be(:runner2) { create(:ci_runner, version: 'b') }
+ let_it_be(:runner3) { create(:ci_runner, version: 'c') }
+ let_it_be(:runner_version_recommended) do
+ create(:ci_runner_version, version: 'a', status: :recommended)
+ end
+
+ let_it_be(:runner_version_not_available) do
+ create(:ci_runner_version, version: 'b', status: :not_available)
+ end
+
+ let_it_be(:runner_version_available) do
+ create(:ci_runner_version, version: 'c', status: :available)
+ end
+
+ def execute
+ described_class.new(current_user: admin, params: { upgrade_status: upgrade_status }).execute
+ end
+
+ Ci::RunnerVersion.statuses.keys.map(&:to_sym).each do |status|
+ context "set to :#{status}" do
+ let(:upgrade_status) { status }
+
+ it "calls with_upgrade_status scope with corresponding :#{status} status" do
+ if [:available, :not_available, :recommended].include?(status)
+ expected_result = Ci::Runner.with_upgrade_status(status)
+ end
+
+ expect(Ci::Runner).to receive(:with_upgrade_status).with(status).and_call_original
+
+ result = execute
+
+ expect(result).to match_array(expected_result) if expected_result
+ end
+ end
+ end
+
+ context 'set to an invalid value' do
+ let(:upgrade_status) { :some_invalid_status }
+
+ it 'does not call with_upgrade_status' do
+ expect(Ci::Runner).not_to receive(:with_upgrade_status)
+
+ expect(execute).to match_array(Ci::Runner.all)
+ end
+ end
+
+ context 'set to nil' do
+ let(:upgrade_status) { nil }
+
+ it 'does not call with_upgrade_status' do
+ expect(Ci::Runner).not_to receive(:with_upgrade_status)
+
+ expect(execute).to match_array(Ci::Runner.all)
+ end
+ end
+ end
+
context 'by status' do
Ci::Runner::AVAILABLE_STATUSES.each do |status|
it "calls the corresponding :#{status} scope on Ci::Runner" do
diff --git a/spec/finders/contributed_projects_finder_spec.rb b/spec/finders/contributed_projects_finder_spec.rb
index 86d3e7f8f19..e8ce02122a1 100644
--- a/spec/finders/contributed_projects_finder_spec.rb
+++ b/spec/finders/contributed_projects_finder_spec.rb
@@ -23,9 +23,15 @@ RSpec.describe ContributedProjectsFinder do
end
describe 'activity without a current user' do
- subject { finder.execute }
+ it 'does only return public projects' do
+ projects = finder.execute
+ expect(projects).to match_array([public_project])
+ end
- it { is_expected.to match_array([public_project]) }
+ it 'does return all projects when visibility gets ignored' do
+ projects = finder.execute(ignore_visibility: true)
+ expect(projects).to match_array([private_project, internal_project, public_project])
+ end
end
describe 'activity with a current user' do
diff --git a/spec/finders/groups/user_groups_finder_spec.rb b/spec/finders/groups/user_groups_finder_spec.rb
index a4a9b8d16d0..9339741da79 100644
--- a/spec/finders/groups/user_groups_finder_spec.rb
+++ b/spec/finders/groups/user_groups_finder_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Groups::UserGroupsFinder do
let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer') }
let_it_be(:public_developer_group) { create(:group, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') }
let_it_be(:public_maintainer_group) { create(:group, name: 'a public maintainer', path: 'a-public-maintainer') }
+ let_it_be(:public_owner_group) { create(:group, name: 'a public owner', path: 'a-public-owner') }
subject { described_class.new(current_user, target_user, arguments).execute }
@@ -21,12 +22,14 @@ RSpec.describe Groups::UserGroupsFinder do
private_maintainer_group.add_maintainer(user)
public_developer_group.add_developer(user)
public_maintainer_group.add_maintainer(user)
+ public_owner_group.add_owner(user)
end
it 'returns all groups where the user is a direct member' do
is_expected.to match(
[
public_maintainer_group,
+ public_owner_group,
private_maintainer_group,
public_developer_group,
guest_group
@@ -53,6 +56,7 @@ RSpec.describe Groups::UserGroupsFinder do
is_expected.to match(
[
public_maintainer_group,
+ public_owner_group,
private_maintainer_group,
public_developer_group
]
@@ -73,6 +77,32 @@ RSpec.describe Groups::UserGroupsFinder do
end
end
+ context 'when permission is :transfer_projects' do
+ let(:arguments) { { permission_scope: :transfer_projects } }
+
+ specify do
+ is_expected.to match(
+ [
+ public_maintainer_group,
+ public_owner_group,
+ private_maintainer_group
+ ]
+ )
+ end
+
+ context 'when search is provided' do
+ let(:arguments) { { permission_scope: :transfer_projects, search: 'owner' } }
+
+ specify do
+ is_expected.to match(
+ [
+ public_owner_group
+ ]
+ )
+ end
+ end
+ end
+
context 'when search is provided' do
let(:arguments) { { search: 'maintainer' } }
diff --git a/spec/finders/joined_groups_finder_spec.rb b/spec/finders/joined_groups_finder_spec.rb
index 058db735708..feb1b11d159 100644
--- a/spec/finders/joined_groups_finder_spec.rb
+++ b/spec/finders/joined_groups_finder_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe JoinedGroupsFinder do
context 'if profile visitor is in one of the private group projects' do
before do
project = create(:project, :private, group: private_group, name: 'B', path: 'B')
- project.add_user(profile_visitor, Gitlab::Access::DEVELOPER)
+ project.add_member(profile_visitor, Gitlab::Access::DEVELOPER)
end
it 'shows group' do
diff --git a/spec/finders/packages/conan/package_finder_spec.rb b/spec/finders/packages/conan/package_finder_spec.rb
index 6848786818b..f25a62225a8 100644
--- a/spec/finders/packages/conan/package_finder_spec.rb
+++ b/spec/finders/packages/conan/package_finder_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe ::Packages::Conan::PackageFinder do
before do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.string_options[visibility.to_s])
- project.add_user(user, role) unless role == :anonymous
+ project.add_member(user, role) unless role == :anonymous
end
it { is_expected.to eq(expected_packages) }
diff --git a/spec/finders/packages/group_packages_finder_spec.rb b/spec/finders/packages/group_packages_finder_spec.rb
index 954db6481cd..90a8cd3c57f 100644
--- a/spec/finders/packages/group_packages_finder_spec.rb
+++ b/spec/finders/packages/group_packages_finder_spec.rb
@@ -98,8 +98,8 @@ RSpec.describe Packages::GroupPackagesFinder do
)
unless role == :anonymous
- project.add_user(user, role)
- subproject.add_user(user, role)
+ project.add_member(user, role)
+ subproject.add_member(user, role)
end
end
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index d26180bbf94..3bef4d85b33 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -177,6 +177,35 @@ RSpec.describe ProjectsFinder do
end
end
+ describe 'filter by topic_id' do
+ let_it_be(:topic1) { create(:topic) }
+ let_it_be(:topic2) { create(:topic) }
+
+ before do
+ public_project.reload
+ public_project.topics << topic1
+ public_project.save!
+ end
+
+ context 'topic with assigned projects' do
+ let(:params) { { topic_id: topic1.id } }
+
+ it { is_expected.to eq([public_project]) }
+ end
+
+ context 'topic without assigned projects' do
+ let(:params) { { topic_id: topic2.id } }
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'non-existing topic' do
+ let(:params) { { topic_id: non_existing_record_id } }
+
+ it { is_expected.to eq([]) }
+ end
+ end
+
describe 'filter by personal' do
let!(:personal_project) { create(:project, namespace: user.namespace) }
let(:params) { { personal: true } }
diff --git a/spec/finders/user_recent_events_finder_spec.rb b/spec/finders/user_recent_events_finder_spec.rb
index d7f7bb9cebe..dbd52ec4d18 100644
--- a/spec/finders/user_recent_events_finder_spec.rb
+++ b/spec/finders/user_recent_events_finder_spec.rb
@@ -18,266 +18,248 @@ RSpec.describe UserRecentEventsFinder do
subject(:finder) { described_class.new(current_user, project_owner, nil, params) }
- shared_examples 'UserRecentEventsFinder examples' do
- describe '#execute' do
- context 'when profile is public' do
- it 'returns all the events' do
- expect(finder.execute).to include(private_event, internal_event, public_event)
- end
- end
-
- context 'when profile is private' do
- it 'returns no event' do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, project_owner).and_return(false)
-
- expect(finder.execute).to be_empty
- end
+ describe '#execute' do
+ context 'when profile is public' do
+ it 'returns all the events' do
+ expect(finder.execute).to include(private_event, internal_event, public_event)
end
+ end
- it 'does not include the events if the user cannot read cross project' do
+ context 'when profile is private' do
+ it 'returns no event' do
allow(Ability).to receive(:allowed?).and_call_original
- expect(Ability).to receive(:allowed?).with(current_user, :read_cross_project) { false }
+ allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, project_owner).and_return(false)
expect(finder.execute).to be_empty
end
+ end
+
+ it 'does not include the events if the user cannot read cross project' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ expect(Ability).to receive(:allowed?).with(current_user, :read_cross_project) { false }
- context 'events from multiple users' do
- let_it_be(:second_user, reload: true) { create(:user) }
- let_it_be(:private_project_second_user) { create(:project, :private, creator: second_user) }
+ expect(finder.execute).to be_empty
+ end
- let_it_be(:internal_project_second_user) { create(:project, :internal, creator: second_user) }
- let_it_be(:public_project_second_user) { create(:project, :public, creator: second_user) }
- let_it_be(:private_event_second_user) { create(:event, project: private_project_second_user, author: second_user) }
- let_it_be(:internal_event_second_user) { create(:event, project: internal_project_second_user, author: second_user) }
- let_it_be(:public_event_second_user) { create(:event, project: public_project_second_user, author: second_user) }
+ context 'events from multiple users' do
+ let_it_be(:second_user, reload: true) { create(:user) }
+ let_it_be(:private_project_second_user) { create(:project, :private, creator: second_user) }
- it 'includes events from all users', :aggregate_failures do
- events = described_class.new(current_user, [project_owner, second_user], nil, params).execute
+ let_it_be(:internal_project_second_user) { create(:project, :internal, creator: second_user) }
+ let_it_be(:public_project_second_user) { create(:project, :public, creator: second_user) }
+ let_it_be(:private_event_second_user) { create(:event, project: private_project_second_user, author: second_user) }
+ let_it_be(:internal_event_second_user) { create(:event, project: internal_project_second_user, author: second_user) }
+ let_it_be(:public_event_second_user) { create(:event, project: public_project_second_user, author: second_user) }
- expect(events).to include(private_event, internal_event, public_event)
- expect(events).to include(private_event_second_user, internal_event_second_user, public_event_second_user)
- expect(events.size).to eq(6)
- end
+ it 'includes events from all users', :aggregate_failures do
+ events = described_class.new(current_user, [project_owner, second_user], nil, params).execute
- context 'selected events' do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:push_event1) { create(:push_event, project: public_project, author: project_owner) }
- let_it_be(:push_event2) { create(:push_event, project: public_project_second_user, author: second_user) }
- let_it_be(:merge_event1) { create(:event, :merged, target_type: MergeRequest.to_s, project: public_project, author: project_owner) }
- let_it_be(:merge_event2) { create(:event, :merged, target_type: MergeRequest.to_s, project: public_project_second_user, author: second_user) }
- let_it_be(:comment_event1) { create(:event, :commented, target_type: Note.to_s, project: public_project, author: project_owner) }
- let_it_be(:comment_event2) { create(:event, :commented, target_type: DiffNote.to_s, project: public_project, author: project_owner) }
- let_it_be(:comment_event3) { create(:event, :commented, target_type: DiscussionNote.to_s, project: public_project_second_user, author: second_user) }
- let_it_be(:issue_event1) { create(:event, :created, project: public_project, target: issue, author: project_owner) }
- let_it_be(:issue_event2) { create(:event, :updated, project: public_project, target: issue, author: project_owner) }
- let_it_be(:issue_event3) { create(:event, :closed, project: public_project_second_user, target: issue, author: second_user) }
- let_it_be(:wiki_event1) { create(:wiki_page_event, project: public_project, author: project_owner) }
- let_it_be(:wiki_event2) { create(:wiki_page_event, project: public_project_second_user, author: second_user) }
- let_it_be(:design_event1) { create(:design_event, project: public_project, author: project_owner) }
- let_it_be(:design_event2) { create(:design_updated_event, project: public_project_second_user, author: second_user) }
-
- where(:event_filter, :ordered_expected_events) do
- EventFilter.new(EventFilter::PUSH) | lazy { [push_event1, push_event2] }
- EventFilter.new(EventFilter::MERGED) | lazy { [merge_event1, merge_event2] }
- EventFilter.new(EventFilter::COMMENTS) | lazy { [comment_event1, comment_event2, comment_event3] }
- EventFilter.new(EventFilter::TEAM) | lazy { [private_event, internal_event, public_event, private_event_second_user, internal_event_second_user, public_event_second_user] }
- EventFilter.new(EventFilter::ISSUE) | lazy { [issue_event1, issue_event2, issue_event3] }
- EventFilter.new(EventFilter::WIKI) | lazy { [wiki_event1, wiki_event2] }
- EventFilter.new(EventFilter::DESIGNS) | lazy { [design_event1, design_event2] }
- end
+ expect(events).to include(private_event, internal_event, public_event)
+ expect(events).to include(private_event_second_user, internal_event_second_user, public_event_second_user)
+ expect(events.size).to eq(6)
+ end
+
+ context 'selected events' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:push_event1) { create(:push_event, project: public_project, author: project_owner) }
+ let_it_be(:push_event2) { create(:push_event, project: public_project_second_user, author: second_user) }
+ let_it_be(:merge_event1) { create(:event, :merged, target_type: MergeRequest.to_s, project: public_project, author: project_owner) }
+ let_it_be(:merge_event2) { create(:event, :merged, target_type: MergeRequest.to_s, project: public_project_second_user, author: second_user) }
+ let_it_be(:comment_event1) { create(:event, :commented, target_type: Note.to_s, project: public_project, author: project_owner) }
+ let_it_be(:comment_event2) { create(:event, :commented, target_type: DiffNote.to_s, project: public_project, author: project_owner) }
+ let_it_be(:comment_event3) { create(:event, :commented, target_type: DiscussionNote.to_s, project: public_project_second_user, author: second_user) }
+ let_it_be(:issue_event1) { create(:event, :created, project: public_project, target: issue, author: project_owner) }
+ let_it_be(:issue_event2) { create(:event, :updated, project: public_project, target: issue, author: project_owner) }
+ let_it_be(:issue_event3) { create(:event, :closed, project: public_project_second_user, target: issue, author: second_user) }
+ let_it_be(:wiki_event1) { create(:wiki_page_event, project: public_project, author: project_owner) }
+ let_it_be(:wiki_event2) { create(:wiki_page_event, project: public_project_second_user, author: second_user) }
+ let_it_be(:design_event1) { create(:design_event, project: public_project, author: project_owner) }
+ let_it_be(:design_event2) { create(:design_updated_event, project: public_project_second_user, author: second_user) }
+
+ where(:event_filter, :ordered_expected_events) do
+ EventFilter.new(EventFilter::PUSH) | lazy { [push_event1, push_event2] }
+ EventFilter.new(EventFilter::MERGED) | lazy { [merge_event1, merge_event2] }
+ EventFilter.new(EventFilter::COMMENTS) | lazy { [comment_event1, comment_event2, comment_event3] }
+ EventFilter.new(EventFilter::TEAM) | lazy { [private_event, internal_event, public_event, private_event_second_user, internal_event_second_user, public_event_second_user] }
+ EventFilter.new(EventFilter::ISSUE) | lazy { [issue_event1, issue_event2, issue_event3] }
+ EventFilter.new(EventFilter::WIKI) | lazy { [wiki_event1, wiki_event2] }
+ EventFilter.new(EventFilter::DESIGNS) | lazy { [design_event1, design_event2] }
+ end
- with_them do
- it 'only returns selected events from all users (id DESC)' do
- events = described_class.new(current_user, [project_owner, second_user], event_filter, params).execute
+ with_them do
+ it 'only returns selected events from all users (id DESC)' do
+ events = described_class.new(current_user, [project_owner, second_user], event_filter, params).execute
- expect(events).to eq(ordered_expected_events.reverse)
- end
+ expect(events).to eq(ordered_expected_events.reverse)
end
end
+ end
- it 'does not include events from users with private profile', :aggregate_failures do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, second_user).and_return(false)
+ it 'does not include events from users with private profile', :aggregate_failures do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, second_user).and_return(false)
- events = described_class.new(current_user, [project_owner, second_user], nil, params).execute
+ events = described_class.new(current_user, [project_owner, second_user], nil, params).execute
- expect(events).to contain_exactly(private_event, internal_event, public_event)
- end
+ expect(events).to contain_exactly(private_event, internal_event, public_event)
+ end
- context 'with pagination params' do
- using RSpec::Parameterized::TableSyntax
+ context 'with pagination params' do
+ using RSpec::Parameterized::TableSyntax
- where(:limit, :offset, :ordered_expected_events) do
- nil | nil | lazy { [public_event_second_user, internal_event_second_user, private_event_second_user, public_event, internal_event, private_event] }
- 2 | nil | lazy { [public_event_second_user, internal_event_second_user] }
- nil | 4 | lazy { [internal_event, private_event] }
- 2 | 2 | lazy { [private_event_second_user, public_event] }
- end
+ where(:limit, :offset, :ordered_expected_events) do
+ nil | nil | lazy { [public_event_second_user, internal_event_second_user, private_event_second_user, public_event, internal_event, private_event] }
+ 2 | nil | lazy { [public_event_second_user, internal_event_second_user] }
+ nil | 4 | lazy { [internal_event, private_event] }
+ 2 | 2 | lazy { [private_event_second_user, public_event] }
+ end
- with_them do
- let(:params) { { limit: limit, offset: offset }.compact }
+ with_them do
+ let(:params) { { limit: limit, offset: offset }.compact }
- it 'returns paginated events sorted by id (DESC)' do
- events = described_class.new(current_user, [project_owner, second_user], nil, params).execute
+ it 'returns paginated events sorted by id (DESC)' do
+ events = described_class.new(current_user, [project_owner, second_user], nil, params).execute
- expect(events).to eq(ordered_expected_events)
- end
+ expect(events).to eq(ordered_expected_events)
end
end
end
+ end
+
+ context 'filter activity events' do
+ let_it_be(:push_event) { create(:push_event, project: public_project, author: project_owner) }
+ let_it_be(:merge_event) { create(:event, :merged, project: public_project, author: project_owner) }
+ let_it_be(:issue_event) { create(:event, :closed, project: public_project, target: issue, author: project_owner) }
+ let_it_be(:comment_event) { create(:event, :commented, project: public_project, author: project_owner) }
+ let_it_be(:wiki_event) { create(:wiki_page_event, project: public_project, author: project_owner) }
+ let_it_be(:design_event) { create(:design_event, project: public_project, author: project_owner) }
+ let_it_be(:team_event) { create(:event, :joined, project: public_project, author: project_owner) }
+
+ it 'includes all events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::ALL)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
+
+ expect(events).to include(private_event, internal_event, public_event)
+ expect(events).to include(push_event, merge_event, issue_event, comment_event, wiki_event, design_event, team_event)
+ expect(events.size).to eq(10)
+ end
- context 'filter activity events' do
- let_it_be(:push_event) { create(:push_event, project: public_project, author: project_owner) }
- let_it_be(:merge_event) { create(:event, :merged, project: public_project, author: project_owner) }
- let_it_be(:issue_event) { create(:event, :closed, project: public_project, target: issue, author: project_owner) }
- let_it_be(:comment_event) { create(:event, :commented, project: public_project, author: project_owner) }
- let_it_be(:wiki_event) { create(:wiki_page_event, project: public_project, author: project_owner) }
- let_it_be(:design_event) { create(:design_event, project: public_project, author: project_owner) }
- let_it_be(:team_event) { create(:event, :joined, project: public_project, author: project_owner) }
+ context 'when unknown filter is given' do
+ it 'includes returns all events', :aggregate_failures do
+ event_filter = EventFilter.new('unknown')
+ allow(event_filter).to receive(:filter).and_return('unknown')
- it 'includes all events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::ALL)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ events = described_class.new(current_user, [project_owner], event_filter, params).execute
expect(events).to include(private_event, internal_event, public_event)
expect(events).to include(push_event, merge_event, issue_event, comment_event, wiki_event, design_event, team_event)
expect(events.size).to eq(10)
end
+ end
- context 'when unknown filter is given' do
- it 'includes returns all events', :aggregate_failures do
- event_filter = EventFilter.new('unknown')
- allow(event_filter).to receive(:filter).and_return('unknown')
+ it 'only includes push events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::PUSH)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
- events = described_class.new(current_user, [project_owner], event_filter, params).execute
+ expect(events).to include(push_event)
+ expect(events.size).to eq(1)
+ end
- expect(events).to include(private_event, internal_event, public_event)
- expect(events).to include(push_event, merge_event, issue_event, comment_event, wiki_event, design_event, team_event)
- expect(events.size).to eq(10)
- end
- end
+ it 'only includes merge events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::MERGED)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
- it 'only includes push events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::PUSH)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ expect(events).to include(merge_event)
+ expect(events.size).to eq(1)
+ end
- expect(events).to include(push_event)
- expect(events.size).to eq(1)
- end
+ it 'only includes issue events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::ISSUE)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
- it 'only includes merge events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::MERGED)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ expect(events).to include(issue_event)
+ expect(events.size).to eq(1)
+ end
- expect(events).to include(merge_event)
- expect(events.size).to eq(1)
- end
+ it 'only includes comments events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::COMMENTS)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
- it 'only includes issue events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::ISSUE)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ expect(events).to include(comment_event)
+ expect(events.size).to eq(1)
+ end
- expect(events).to include(issue_event)
- expect(events.size).to eq(1)
- end
+ it 'only includes wiki events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::WIKI)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
- it 'only includes comments events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::COMMENTS)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ expect(events).to include(wiki_event)
+ expect(events.size).to eq(1)
+ end
- expect(events).to include(comment_event)
- expect(events.size).to eq(1)
- end
+ it 'only includes design events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::DESIGNS)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
- it 'only includes wiki events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::WIKI)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ expect(events).to include(design_event)
+ expect(events.size).to eq(1)
+ end
- expect(events).to include(wiki_event)
- expect(events.size).to eq(1)
- end
+ it 'only includes team events', :aggregate_failures do
+ event_filter = EventFilter.new(EventFilter::TEAM)
+ events = described_class.new(current_user, project_owner, event_filter, params).execute
- it 'only includes design events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::DESIGNS)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ expect(events).to include(private_event, internal_event, public_event, team_event)
+ expect(events.size).to eq(4)
+ end
+ end
- expect(events).to include(design_event)
- expect(events.size).to eq(1)
- end
+ describe 'issue activity events' do
+ let(:issue) { create(:issue, project: public_project) }
+ let(:note) { create(:note_on_issue, noteable: issue, project: public_project) }
+ let!(:event_a) { create(:event, :commented, target: note, author: project_owner) }
+ let!(:event_b) { create(:event, :closed, target: issue, author: project_owner) }
- it 'only includes team events', :aggregate_failures do
- event_filter = EventFilter.new(EventFilter::TEAM)
- events = described_class.new(current_user, project_owner, event_filter, params).execute
+ it 'includes all issue related events', :aggregate_failures do
+ events = finder.execute
- expect(events).to include(private_event, internal_event, public_event, team_event)
- expect(events.size).to eq(4)
- end
+ expect(events).to include(event_a)
+ expect(events).to include(event_b)
end
+ end
- describe 'issue activity events' do
- let(:issue) { create(:issue, project: public_project) }
- let(:note) { create(:note_on_issue, noteable: issue, project: public_project) }
- let!(:event_a) { create(:event, :commented, target: note, author: project_owner) }
- let!(:event_b) { create(:event, :closed, target: issue, author: project_owner) }
-
- it 'includes all issue related events', :aggregate_failures do
- events = finder.execute
+ context 'limits' do
+ before do
+ stub_const("#{described_class}::DEFAULT_LIMIT", 1)
+ stub_const("#{described_class}::MAX_LIMIT", 3)
+ end
- expect(events).to include(event_a)
- expect(events).to include(event_b)
+ context 'when limit is not set' do
+ it 'returns events limited to DEFAULT_LIMIT' do
+ expect(finder.execute.size).to eq(described_class::DEFAULT_LIMIT)
end
end
- context 'limits' do
- before do
- stub_const("#{described_class}::DEFAULT_LIMIT", 1)
- stub_const("#{described_class}::MAX_LIMIT", 3)
- end
+ context 'when limit is set' do
+ let(:limit) { 2 }
- context 'when limit is not set' do
- it 'returns events limited to DEFAULT_LIMIT' do
- expect(finder.execute.size).to eq(described_class::DEFAULT_LIMIT)
- end
+ it 'returns events limited to specified limit' do
+ expect(finder.execute.size).to eq(limit)
end
+ end
- context 'when limit is set' do
- let(:limit) { 2 }
+ context 'when limit is set to a number that exceeds maximum limit' do
+ let(:limit) { 4 }
- it 'returns events limited to specified limit' do
- expect(finder.execute.size).to eq(limit)
- end
+ before do
+ create(:event, project: public_project, author: project_owner)
end
- context 'when limit is set to a number that exceeds maximum limit' do
- let(:limit) { 4 }
-
- before do
- create(:event, project: public_project, author: project_owner)
- end
-
- it 'returns events limited to MAX_LIMIT' do
- expect(finder.execute.size).to eq(described_class::MAX_LIMIT)
- end
+ it 'returns events limited to MAX_LIMIT' do
+ expect(finder.execute.size).to eq(described_class::MAX_LIMIT)
end
end
end
end
-
- context 'when the optimized_followed_users_queries FF is on' do
- before do
- stub_feature_flags(optimized_followed_users_queries: true)
- end
-
- it_behaves_like 'UserRecentEventsFinder examples'
- end
-
- context 'when the optimized_followed_users_queries FF is off' do
- before do
- stub_feature_flags(optimized_followed_users_queries: false)
- end
-
- it_behaves_like 'UserRecentEventsFinder examples'
- end
end
diff --git a/spec/fixtures/api/schemas/entities/commit.json b/spec/fixtures/api/schemas/entities/commit.json
index 324702e3f94..ed08c35f89b 100644
--- a/spec/fixtures/api/schemas/entities/commit.json
+++ b/spec/fixtures/api/schemas/entities/commit.json
@@ -11,7 +11,7 @@
"author"
],
"properties": {
- "author_gravatar_url": { "type": "string" },
+ "author_gravatar_url": { "type": [ "string", "null" ] },
"commit_url": { "type": "string" },
"commit_path": { "type": "string" },
"author": {
diff --git a/spec/fixtures/api/schemas/entities/github/user.json b/spec/fixtures/api/schemas/entities/github/user.json
index 3d772a0c648..23db912ad5c 100644
--- a/spec/fixtures/api/schemas/entities/github/user.json
+++ b/spec/fixtures/api/schemas/entities/github/user.json
@@ -5,7 +5,7 @@
"id": { "type": "integer" },
"login": { "type": "string" },
"url": { "type": "string" },
- "avatar_url": { "type": "string" },
+ "avatar_url": { "type": [ "string", "null" ] },
"html_url": { "type": "string" }
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/entities/member.json b/spec/fixtures/api/schemas/entities/member.json
index 88f7d87b269..24a4863df9b 100644
--- a/spec/fixtures/api/schemas/entities/member.json
+++ b/spec/fixtures/api/schemas/entities/member.json
@@ -62,7 +62,7 @@
"required": ["email", "avatar_url", "can_resend", "user_state"],
"properties": {
"email": { "type": "string" },
- "avatar_url": { "type": "string" },
+ "avatar_url": { "type": [ "string", "null" ] },
"can_resend": { "type": "boolean" },
"user_state": { "type": "string" }
},
diff --git a/spec/fixtures/api/schemas/entities/merge_request_basic.json b/spec/fixtures/api/schemas/entities/merge_request_basic.json
index b061176f6a7..bda9f6573ea 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_basic.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_basic.json
@@ -7,6 +7,7 @@
"source_branch_exists": { "type": "boolean" },
"merge_error": { "type": ["string", "null"] },
"rebase_in_progress": { "type": "boolean" },
+ "should_be_rebased": { "type": "boolean" },
"allow_collaboration": { "type": "boolean"},
"allow_maintainer_to_push": { "type": "boolean"},
"assignees": {
diff --git a/spec/fixtures/api/schemas/entities/note_user_entity.json b/spec/fixtures/api/schemas/entities/note_user_entity.json
index e2bbaad7201..f5d28dd7b71 100644
--- a/spec/fixtures/api/schemas/entities/note_user_entity.json
+++ b/spec/fixtures/api/schemas/entities/note_user_entity.json
@@ -11,7 +11,7 @@
"properties": {
"id": { "type": "integer" },
"state": { "type": "string" },
- "avatar_url": { "type": "string" },
+ "avatar_url": { "type": [ "string", "null" ] },
"path": { "type": "string" },
"name": { "type": "string" },
"username": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/entities/user.json b/spec/fixtures/api/schemas/entities/user.json
index 3252a37c82a..984b7184d36 100644
--- a/spec/fixtures/api/schemas/entities/user.json
+++ b/spec/fixtures/api/schemas/entities/user.json
@@ -12,7 +12,7 @@
"properties": {
"id": { "type": "integer" },
"state": { "type": "string" },
- "avatar_url": { "type": "string" },
+ "avatar_url": { "type": [ "string", "null" ] },
"web_url": { "type": "string" },
"path": { "type": "string" },
"name": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/commit/basic.json b/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
index 227b5a20af3..da7a9fcb7f3 100644
--- a/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
@@ -19,7 +19,7 @@
"id": { "type": ["string", "null"] },
"short_id": { "type": ["string", "null"] },
"title": { "type": "string" },
- "created_at": { "type": "string", "format": "date-time" },
+ "created_at": { "type": "string" },
"parent_ids": {
"type": ["array", "null"],
"items": {
@@ -30,10 +30,10 @@
"message": { "type": "string" },
"author_name": { "type": "string" },
"author_email": { "type": "string" },
- "authored_date": { "type": "string", "format": "date-time" },
+ "authored_date": { "type": "string" },
"committer_name": { "type": "string" },
"committer_email": { "type": "string" },
- "committed_date": { "type": "string", "format": "date-time" },
+ "committed_date": { "type": "string" },
"web_url": { "type": "string" }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/deploy_key.json b/spec/fixtures/api/schemas/public_api/v4/deploy_key.json
index 3dbdfcc95a1..99e57a4c218 100644
--- a/spec/fixtures/api/schemas/public_api/v4/deploy_key.json
+++ b/spec/fixtures/api/schemas/public_api/v4/deploy_key.json
@@ -6,7 +6,7 @@
"created_at",
"expires_at",
"key",
- "fingerprint",
+ "fingerprint_sha256",
"projects_with_write_access"
],
"properties": {
@@ -16,6 +16,7 @@
"expires_at": { "type": ["string", "null"], "format": "date-time" },
"key": { "type": "string" },
"fingerprint": { "type": "string" },
+ "fingerprint_sha256": { "type": "string" },
"projects_with_write_access": {
"type": "array",
"items": { "$ref": "project/identity.json" }
diff --git a/spec/fixtures/api/schemas/public_api/v4/environment.json b/spec/fixtures/api/schemas/public_api/v4/environment.json
index 3a4c18343e3..21888d88598 100644
--- a/spec/fixtures/api/schemas/public_api/v4/environment.json
+++ b/spec/fixtures/api/schemas/public_api/v4/environment.json
@@ -25,10 +25,7 @@
"state": { "type": "string" },
"created_at": { "type": "string", "format": "date-time" },
"updated_at": { "type": "string", "format": "date-time" },
- "project": { "$ref": "project.json" },
- "enable_advanced_logs_querying": { "type": "boolean" },
- "logs_api_path": { "type": "string" },
- "gitlab_managed_apps_logs_path": { "type": "string" }
+ "project": { "$ref": "project.json" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/metadata.json b/spec/fixtures/api/schemas/public_api/v4/metadata.json
new file mode 100644
index 00000000000..fd219b95df8
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/metadata.json
@@ -0,0 +1,26 @@
+{
+ "type": "object",
+ "required": [
+ "version",
+ "revision",
+ "kas"
+ ],
+ "properties": {
+ "version": { "type": "string" },
+ "revision": { "type": "string" },
+ "kas": {
+ "type": "object",
+ "required": [
+ "enabled",
+ "externalUrl",
+ "version"
+ ],
+ "properties": {
+ "enabled": { "type": "boolean" },
+ "externalUrl": { "type": ["string", "null"] },
+ "version": { "type": ["string", "null"] }
+ }
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/single_version.json b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/single_version.json
new file mode 100644
index 00000000000..689b9427f38
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/single_version.json
@@ -0,0 +1,55 @@
+{
+ "type": "object",
+ "required": [
+ "name",
+ "provider",
+ "providers",
+ "root",
+ "source",
+ "submodules",
+ "version",
+ "versions"
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "provider": {
+ "type": "string"
+ },
+ "providers": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "root": {
+ "type": "object",
+ "required": [
+ "dependencies"
+ ],
+ "properties": {
+ "dependencies": {
+ "type": "array",
+ "maxItems": 0
+ }
+ }
+ },
+ "source": {
+ "type": "string"
+ },
+ "submodules": {
+ "type": "array",
+ "maxItems": 0
+ },
+ "version": {
+ "type": "string"
+ },
+ "versions": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/fixtures/api/schemas/public_api/v4/project_hook.json b/spec/fixtures/api/schemas/public_api/v4/project_hook.json
new file mode 100644
index 00000000000..6070f3a55f9
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/project_hook.json
@@ -0,0 +1,62 @@
+{
+ "type": "object",
+ "required": [
+ "id",
+ "url",
+ "created_at",
+ "push_events",
+ "push_events_branch_filter",
+ "tag_push_events",
+ "merge_requests_events",
+ "repository_update_events",
+ "enable_ssl_verification",
+ "project_id",
+ "issues_events",
+ "confidential_issues_events",
+ "note_events",
+ "confidential_note_events",
+ "pipeline_events",
+ "wiki_page_events",
+ "job_events",
+ "deployment_events",
+ "releases_events",
+ "alert_status",
+ "disabled_until",
+ "url_variables"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "project_id": { "type": "integer" },
+ "url": { "type": "string" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "push_events": { "type": "boolean" },
+ "push_events_branch_filter": { "type": ["string", "null"] },
+ "tag_push_events": { "type": "boolean" },
+ "merge_requests_events": { "type": "boolean" },
+ "repository_update_events": { "type": "boolean" },
+ "enable_ssl_verification": { "type": "boolean" },
+ "issues_events": { "type": "boolean" },
+ "confidential_issues_events": { "type": ["boolean", "null"] },
+ "note_events": { "type": "boolean" },
+ "confidential_note_events": { "type": ["boolean", "null"] },
+ "pipeline_events": { "type": "boolean" },
+ "wiki_page_events": { "type": "boolean" },
+ "job_events": { "type": "boolean" },
+ "deployment_events": { "type": "boolean" },
+ "releases_events": { "type": "boolean" },
+ "alert_status": { "type": "string", "enum": ["executable","disabled","temporarily_disabled"] },
+ "disabled_until": { "type": ["string", "null"] },
+ "url_variables": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["key"],
+ "properties": {
+ "key": { "type": "string" }
+ }
+ }
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/project_hooks.json b/spec/fixtures/api/schemas/public_api/v4/project_hooks.json
new file mode 100644
index 00000000000..8c542ebe3ad
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/project_hooks.json
@@ -0,0 +1,10 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "$ref": "./project_hook.json"
+ }
+ }
+}
+
diff --git a/spec/fixtures/api/schemas/public_api/v4/system_hook.json b/spec/fixtures/api/schemas/public_api/v4/system_hook.json
index 3fe3e0d658e..b6f56b948a0 100644
--- a/spec/fixtures/api/schemas/public_api/v4/system_hook.json
+++ b/spec/fixtures/api/schemas/public_api/v4/system_hook.json
@@ -10,7 +10,8 @@
"repository_update_events",
"enable_ssl_verification",
"alert_status",
- "disabled_until"
+ "disabled_until",
+ "url_variables"
],
"properties": {
"id": { "type": "integer" },
@@ -22,7 +23,18 @@
"repository_update_events": { "type": "boolean" },
"enable_ssl_verification": { "type": "boolean" },
"alert_status": { "type": "string", "enum": ["executable", "disabled", "temporarily_disabled"] },
- "disabled_until": { "type": ["string", "null"] }
+ "disabled_until": { "type": ["string", "null"] },
+ "url_variables": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["key"],
+ "properties": {
+ "key": { "type": "string" }
+ }
+ }
+ }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/user/basic.json b/spec/fixtures/api/schemas/public_api/v4/user/basic.json
index 2d815be32a6..d8286f0d84c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/user/basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/user/basic.json
@@ -13,7 +13,7 @@
"name": { "type": "string" },
"username": { "type": "string" },
"state": { "type": "string" },
- "avatar_url": { "type": "string" },
+ "avatar_url": { "type": [ "string", "null" ] },
"web_url": { "type": "string" }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/user/public.json b/spec/fixtures/api/schemas/public_api/v4/user/public.json
index 0955c70aef0..c4549e3ef63 100644
--- a/spec/fixtures/api/schemas/public_api/v4/user/public.json
+++ b/spec/fixtures/api/schemas/public_api/v4/user/public.json
@@ -39,7 +39,7 @@
"type": "string",
"enum": ["active", "blocked"]
},
- "avatar_url": { "type": "string" },
+ "avatar_url": { "type": [ "string", "null" ] },
"web_url": { "type": "string" },
"created_at": { "type": "string", "format": "date-time" },
"bio": { "type": ["string", "null"] },
diff --git a/spec/fixtures/csv_complex.csv b/spec/fixtures/csv_complex.csv
new file mode 100644
index 00000000000..60d8aa5d6f7
--- /dev/null
+++ b/spec/fixtures/csv_complex.csv
@@ -0,0 +1,6 @@
+title,description,due date
+Issue in 中文,Test description,
+"Hello","World",
+"Title with quote""","Description
+/assign @csv_assignee
+/estimate 1h",2022-06-28
diff --git a/spec/fixtures/emails/service_desk.eml b/spec/fixtures/emails/service_desk.eml
index 0db1270bc64..102f29542ae 100644
--- a/spec/fixtures/emails/service_desk.eml
+++ b/spec/fixtures/emails/service_desk.eml
@@ -6,6 +6,7 @@ Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
Date: Thu, 13 Jun 2013 17:03:48 -0400
From: Jake the Dog <jake@adventuretime.ooo>
To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+Cc: Carbon Copy <cc@example.com>, kk@example.org
Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
Subject: The message subject! @all
Mime-Version: 1.0
diff --git a/spec/fixtures/gitlab/git/gitattributes b/spec/fixtures/gitlab/git/gitattributes
new file mode 100644
index 00000000000..bfe9c8e4ed2
--- /dev/null
+++ b/spec/fixtures/gitlab/git/gitattributes
@@ -0,0 +1,16 @@
+# This is a comment, it should be ignored.
+
+*.txt text
+*.jpg -text
+*.sh eol=lf gitlab-language=shell
+*.haml.* gitlab-language=haml
+foo/bar.* foo
+*.cgi key=value?p1=v1&p2=v2
+/*.png gitlab-language=png
+*.binary binary
+/custom-highlighting/*.gitlab-custom gitlab-language=ruby
+/custom-highlighting/*.gitlab-cgi gitlab-language=erb?parent=json
+
+# This uses a tab instead of spaces to ensure the parser also supports this.
+*.md gitlab-language=markdown
+bla/bla.txt
diff --git a/spec/fixtures/gitlab/git/gitattributes_invalid b/spec/fixtures/gitlab/git/gitattributes_invalid
new file mode 100644
index 00000000000..57e0e8e0d3b
--- /dev/null
+++ b/spec/fixtures/gitlab/git/gitattributes_invalid
Binary files differ
diff --git a/spec/fixtures/gitlab/import_export/labels.tar.gz b/spec/fixtures/gitlab/import_export/labels.tar.gz
new file mode 100644
index 00000000000..8329dcf3b4a
--- /dev/null
+++ b/spec/fixtures/gitlab/import_export/labels.tar.gz
Binary files differ
diff --git a/spec/fixtures/glfm/example_snapshots/examples_index.yml b/spec/fixtures/glfm/example_snapshots/examples_index.yml
deleted file mode 100644
index 08f6f88af9e..00000000000
--- a/spec/fixtures/glfm/example_snapshots/examples_index.yml
+++ /dev/null
@@ -1,2017 +0,0 @@
----
-02_01__preliminaries__tabs__001:
- spec_txt_example_position: 1
- source_specification: commonmark
-02_01__preliminaries__tabs__002:
- spec_txt_example_position: 2
- source_specification: commonmark
-02_01__preliminaries__tabs__003:
- spec_txt_example_position: 3
- source_specification: commonmark
-02_01__preliminaries__tabs__004:
- spec_txt_example_position: 4
- source_specification: commonmark
-02_01__preliminaries__tabs__005:
- spec_txt_example_position: 5
- source_specification: commonmark
-02_01__preliminaries__tabs__006:
- spec_txt_example_position: 6
- source_specification: commonmark
-02_01__preliminaries__tabs__007:
- spec_txt_example_position: 7
- source_specification: commonmark
-02_01__preliminaries__tabs__008:
- spec_txt_example_position: 8
- source_specification: commonmark
-02_01__preliminaries__tabs__009:
- spec_txt_example_position: 9
- source_specification: commonmark
-02_01__preliminaries__tabs__010:
- spec_txt_example_position: 10
- source_specification: commonmark
-02_01__preliminaries__tabs__011:
- spec_txt_example_position: 11
- source_specification: commonmark
-03_01__blocks_and_inlines__precedence__001:
- spec_txt_example_position: 12
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__001:
- spec_txt_example_position: 13
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__002:
- spec_txt_example_position: 14
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__003:
- spec_txt_example_position: 15
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__004:
- spec_txt_example_position: 16
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__005:
- spec_txt_example_position: 17
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__006:
- spec_txt_example_position: 18
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__007:
- spec_txt_example_position: 19
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__008:
- spec_txt_example_position: 20
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__009:
- spec_txt_example_position: 21
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__010:
- spec_txt_example_position: 22
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__011:
- spec_txt_example_position: 23
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__012:
- spec_txt_example_position: 24
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__013:
- spec_txt_example_position: 25
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__014:
- spec_txt_example_position: 26
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__015:
- spec_txt_example_position: 27
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__016:
- spec_txt_example_position: 28
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__017:
- spec_txt_example_position: 29
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__018:
- spec_txt_example_position: 30
- source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__019:
- spec_txt_example_position: 31
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__001:
- spec_txt_example_position: 32
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__002:
- spec_txt_example_position: 33
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__003:
- spec_txt_example_position: 34
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__004:
- spec_txt_example_position: 35
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__005:
- spec_txt_example_position: 36
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__006:
- spec_txt_example_position: 37
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__007:
- spec_txt_example_position: 38
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__008:
- spec_txt_example_position: 39
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__009:
- spec_txt_example_position: 40
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__010:
- spec_txt_example_position: 41
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__011:
- spec_txt_example_position: 42
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__012:
- spec_txt_example_position: 43
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__013:
- spec_txt_example_position: 44
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__014:
- spec_txt_example_position: 45
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__015:
- spec_txt_example_position: 46
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__016:
- spec_txt_example_position: 47
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__017:
- spec_txt_example_position: 48
- source_specification: commonmark
-04_02__leaf_blocks__atx_headings__018:
- spec_txt_example_position: 49
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__001:
- spec_txt_example_position: 50
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__002:
- spec_txt_example_position: 51
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__003:
- spec_txt_example_position: 52
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__004:
- spec_txt_example_position: 53
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__005:
- spec_txt_example_position: 54
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__006:
- spec_txt_example_position: 55
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__007:
- spec_txt_example_position: 56
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__008:
- spec_txt_example_position: 57
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__009:
- spec_txt_example_position: 58
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__010:
- spec_txt_example_position: 59
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__011:
- spec_txt_example_position: 60
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__012:
- spec_txt_example_position: 61
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__013:
- spec_txt_example_position: 62
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__014:
- spec_txt_example_position: 63
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__015:
- spec_txt_example_position: 64
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__016:
- spec_txt_example_position: 65
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__017:
- spec_txt_example_position: 66
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__018:
- spec_txt_example_position: 67
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__019:
- spec_txt_example_position: 68
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__020:
- spec_txt_example_position: 69
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__021:
- spec_txt_example_position: 70
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__022:
- spec_txt_example_position: 71
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__023:
- spec_txt_example_position: 72
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__024:
- spec_txt_example_position: 73
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__025:
- spec_txt_example_position: 74
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__026:
- spec_txt_example_position: 75
- source_specification: commonmark
-04_03__leaf_blocks__setext_headings__027:
- spec_txt_example_position: 76
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__001:
- spec_txt_example_position: 77
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__002:
- spec_txt_example_position: 78
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__003:
- spec_txt_example_position: 79
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__004:
- spec_txt_example_position: 80
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__005:
- spec_txt_example_position: 81
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__006:
- spec_txt_example_position: 82
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__007:
- spec_txt_example_position: 83
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__008:
- spec_txt_example_position: 84
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__009:
- spec_txt_example_position: 85
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__010:
- spec_txt_example_position: 86
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__011:
- spec_txt_example_position: 87
- source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__012:
- spec_txt_example_position: 88
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__001:
- spec_txt_example_position: 89
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__002:
- spec_txt_example_position: 90
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__003:
- spec_txt_example_position: 91
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__004:
- spec_txt_example_position: 92
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__005:
- spec_txt_example_position: 93
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__006:
- spec_txt_example_position: 94
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__007:
- spec_txt_example_position: 95
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__008:
- spec_txt_example_position: 96
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__009:
- spec_txt_example_position: 97
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__010:
- spec_txt_example_position: 98
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__011:
- spec_txt_example_position: 99
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__012:
- spec_txt_example_position: 100
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__013:
- spec_txt_example_position: 101
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__014:
- spec_txt_example_position: 102
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__015:
- spec_txt_example_position: 103
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__016:
- spec_txt_example_position: 104
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__017:
- spec_txt_example_position: 105
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__018:
- spec_txt_example_position: 106
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__019:
- spec_txt_example_position: 107
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__020:
- spec_txt_example_position: 108
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__021:
- spec_txt_example_position: 109
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__022:
- spec_txt_example_position: 110
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__023:
- spec_txt_example_position: 111
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__024:
- spec_txt_example_position: 112
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__025:
- spec_txt_example_position: 113
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__026:
- spec_txt_example_position: 114
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__027:
- spec_txt_example_position: 115
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__028:
- spec_txt_example_position: 116
- source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__029:
- spec_txt_example_position: 117
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__001:
- spec_txt_example_position: 118
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__002:
- spec_txt_example_position: 119
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__003:
- spec_txt_example_position: 120
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__004:
- spec_txt_example_position: 121
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__005:
- spec_txt_example_position: 122
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__006:
- spec_txt_example_position: 123
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__007:
- spec_txt_example_position: 124
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__008:
- spec_txt_example_position: 125
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__009:
- spec_txt_example_position: 126
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__010:
- spec_txt_example_position: 127
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__011:
- spec_txt_example_position: 128
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__012:
- spec_txt_example_position: 129
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__013:
- spec_txt_example_position: 130
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__014:
- spec_txt_example_position: 131
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__015:
- spec_txt_example_position: 132
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__016:
- spec_txt_example_position: 133
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__017:
- spec_txt_example_position: 134
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__018:
- spec_txt_example_position: 135
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__019:
- spec_txt_example_position: 136
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__020:
- spec_txt_example_position: 137
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__021:
- spec_txt_example_position: 138
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__022:
- spec_txt_example_position: 139
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__023:
- spec_txt_example_position: 140
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__024:
- spec_txt_example_position: 141
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__025:
- spec_txt_example_position: 142
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__026:
- spec_txt_example_position: 143
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__027:
- spec_txt_example_position: 144
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__028:
- spec_txt_example_position: 145
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__029:
- spec_txt_example_position: 146
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__030:
- spec_txt_example_position: 147
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__031:
- spec_txt_example_position: 148
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__032:
- spec_txt_example_position: 149
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__033:
- spec_txt_example_position: 150
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__034:
- spec_txt_example_position: 151
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__035:
- spec_txt_example_position: 152
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__036:
- spec_txt_example_position: 153
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__037:
- spec_txt_example_position: 154
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__038:
- spec_txt_example_position: 155
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__039:
- spec_txt_example_position: 156
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__040:
- spec_txt_example_position: 157
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__041:
- spec_txt_example_position: 158
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__042:
- spec_txt_example_position: 159
- source_specification: commonmark
-04_06__leaf_blocks__html_blocks__043:
- spec_txt_example_position: 160
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__001:
- spec_txt_example_position: 161
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__002:
- spec_txt_example_position: 162
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__003:
- spec_txt_example_position: 163
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__004:
- spec_txt_example_position: 164
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__005:
- spec_txt_example_position: 165
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__006:
- spec_txt_example_position: 166
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__007:
- spec_txt_example_position: 167
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__008:
- spec_txt_example_position: 168
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__009:
- spec_txt_example_position: 169
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__010:
- spec_txt_example_position: 170
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__011:
- spec_txt_example_position: 171
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__012:
- spec_txt_example_position: 172
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__013:
- spec_txt_example_position: 173
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__014:
- spec_txt_example_position: 174
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__015:
- spec_txt_example_position: 175
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__016:
- spec_txt_example_position: 176
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__017:
- spec_txt_example_position: 177
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__018:
- spec_txt_example_position: 178
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__019:
- spec_txt_example_position: 179
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__020:
- spec_txt_example_position: 180
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__021:
- spec_txt_example_position: 181
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__022:
- spec_txt_example_position: 182
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__023:
- spec_txt_example_position: 183
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__024:
- spec_txt_example_position: 184
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__025:
- spec_txt_example_position: 185
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__026:
- spec_txt_example_position: 186
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__027:
- spec_txt_example_position: 187
- source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__028:
- spec_txt_example_position: 188
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__001:
- spec_txt_example_position: 189
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__002:
- spec_txt_example_position: 190
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__003:
- spec_txt_example_position: 191
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__004:
- spec_txt_example_position: 192
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__005:
- spec_txt_example_position: 193
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__006:
- spec_txt_example_position: 194
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__007:
- spec_txt_example_position: 195
- source_specification: commonmark
-04_08__leaf_blocks__paragraphs__008:
- spec_txt_example_position: 196
- source_specification: commonmark
-04_09__leaf_blocks__blank_lines__001:
- spec_txt_example_position: 197
- source_specification: commonmark
-04_10__leaf_blocks__tables_extension__001:
- spec_txt_example_position: 198
- source_specification: github
-04_10__leaf_blocks__tables_extension__002:
- spec_txt_example_position: 199
- source_specification: github
-04_10__leaf_blocks__tables_extension__003:
- spec_txt_example_position: 200
- source_specification: github
-04_10__leaf_blocks__tables_extension__004:
- spec_txt_example_position: 201
- source_specification: github
-04_10__leaf_blocks__tables_extension__005:
- spec_txt_example_position: 202
- source_specification: github
-04_10__leaf_blocks__tables_extension__006:
- spec_txt_example_position: 203
- source_specification: github
-04_10__leaf_blocks__tables_extension__007:
- spec_txt_example_position: 204
- source_specification: github
-04_10__leaf_blocks__tables_extension__008:
- spec_txt_example_position: 205
- source_specification: github
-05_01__container_blocks__block_quotes__001:
- spec_txt_example_position: 206
- source_specification: commonmark
-05_01__container_blocks__block_quotes__002:
- spec_txt_example_position: 207
- source_specification: commonmark
-05_01__container_blocks__block_quotes__003:
- spec_txt_example_position: 208
- source_specification: commonmark
-05_01__container_blocks__block_quotes__004:
- spec_txt_example_position: 209
- source_specification: commonmark
-05_01__container_blocks__block_quotes__005:
- spec_txt_example_position: 210
- source_specification: commonmark
-05_01__container_blocks__block_quotes__006:
- spec_txt_example_position: 211
- source_specification: commonmark
-05_01__container_blocks__block_quotes__007:
- spec_txt_example_position: 212
- source_specification: commonmark
-05_01__container_blocks__block_quotes__008:
- spec_txt_example_position: 213
- source_specification: commonmark
-05_01__container_blocks__block_quotes__009:
- spec_txt_example_position: 214
- source_specification: commonmark
-05_01__container_blocks__block_quotes__010:
- spec_txt_example_position: 215
- source_specification: commonmark
-05_01__container_blocks__block_quotes__011:
- spec_txt_example_position: 216
- source_specification: commonmark
-05_01__container_blocks__block_quotes__012:
- spec_txt_example_position: 217
- source_specification: commonmark
-05_01__container_blocks__block_quotes__013:
- spec_txt_example_position: 218
- source_specification: commonmark
-05_01__container_blocks__block_quotes__014:
- spec_txt_example_position: 219
- source_specification: commonmark
-05_01__container_blocks__block_quotes__015:
- spec_txt_example_position: 220
- source_specification: commonmark
-05_01__container_blocks__block_quotes__016:
- spec_txt_example_position: 221
- source_specification: commonmark
-05_01__container_blocks__block_quotes__017:
- spec_txt_example_position: 222
- source_specification: commonmark
-05_01__container_blocks__block_quotes__018:
- spec_txt_example_position: 223
- source_specification: commonmark
-05_01__container_blocks__block_quotes__019:
- spec_txt_example_position: 224
- source_specification: commonmark
-05_01__container_blocks__block_quotes__020:
- spec_txt_example_position: 225
- source_specification: commonmark
-05_01__container_blocks__block_quotes__021:
- spec_txt_example_position: 226
- source_specification: commonmark
-05_01__container_blocks__block_quotes__022:
- spec_txt_example_position: 227
- source_specification: commonmark
-05_01__container_blocks__block_quotes__023:
- spec_txt_example_position: 228
- source_specification: commonmark
-05_01__container_blocks__block_quotes__024:
- spec_txt_example_position: 229
- source_specification: commonmark
-05_01__container_blocks__block_quotes__025:
- spec_txt_example_position: 230
- source_specification: commonmark
-05_02__container_blocks__list_items__001:
- spec_txt_example_position: 231
- source_specification: commonmark
-05_02__container_blocks__list_items__002:
- spec_txt_example_position: 232
- source_specification: commonmark
-05_02__container_blocks__list_items__003:
- spec_txt_example_position: 233
- source_specification: commonmark
-05_02__container_blocks__list_items__004:
- spec_txt_example_position: 234
- source_specification: commonmark
-05_02__container_blocks__list_items__005:
- spec_txt_example_position: 235
- source_specification: commonmark
-05_02__container_blocks__list_items__006:
- spec_txt_example_position: 236
- source_specification: commonmark
-05_02__container_blocks__list_items__007:
- spec_txt_example_position: 237
- source_specification: commonmark
-05_02__container_blocks__list_items__008:
- spec_txt_example_position: 238
- source_specification: commonmark
-05_02__container_blocks__list_items__009:
- spec_txt_example_position: 239
- source_specification: commonmark
-05_02__container_blocks__list_items__010:
- spec_txt_example_position: 240
- source_specification: commonmark
-05_02__container_blocks__list_items__011:
- spec_txt_example_position: 241
- source_specification: commonmark
-05_02__container_blocks__list_items__012:
- spec_txt_example_position: 242
- source_specification: commonmark
-05_02__container_blocks__list_items__013:
- spec_txt_example_position: 243
- source_specification: commonmark
-05_02__container_blocks__list_items__014:
- spec_txt_example_position: 244
- source_specification: commonmark
-05_02__container_blocks__list_items__015:
- spec_txt_example_position: 245
- source_specification: commonmark
-05_02__container_blocks__list_items__016:
- spec_txt_example_position: 246
- source_specification: commonmark
-05_02__container_blocks__list_items__017:
- spec_txt_example_position: 247
- source_specification: commonmark
-05_02__container_blocks__list_items__018:
- spec_txt_example_position: 248
- source_specification: commonmark
-05_02__container_blocks__list_items__019:
- spec_txt_example_position: 249
- source_specification: commonmark
-05_02__container_blocks__list_items__020:
- spec_txt_example_position: 250
- source_specification: commonmark
-05_02__container_blocks__list_items__021:
- spec_txt_example_position: 251
- source_specification: commonmark
-05_02__container_blocks__list_items__022:
- spec_txt_example_position: 252
- source_specification: commonmark
-05_02__container_blocks__list_items__023:
- spec_txt_example_position: 253
- source_specification: commonmark
-05_02__container_blocks__list_items__024:
- spec_txt_example_position: 254
- source_specification: commonmark
-05_02__container_blocks__list_items__025:
- spec_txt_example_position: 255
- source_specification: commonmark
-05_02__container_blocks__list_items__026:
- spec_txt_example_position: 256
- source_specification: commonmark
-05_02__container_blocks__list_items__027:
- spec_txt_example_position: 257
- source_specification: commonmark
-05_02__container_blocks__list_items__028:
- spec_txt_example_position: 258
- source_specification: commonmark
-05_02__container_blocks__list_items__029:
- spec_txt_example_position: 259
- source_specification: commonmark
-05_02__container_blocks__list_items__030:
- spec_txt_example_position: 260
- source_specification: commonmark
-05_02__container_blocks__list_items__031:
- spec_txt_example_position: 261
- source_specification: commonmark
-05_02__container_blocks__list_items__032:
- spec_txt_example_position: 262
- source_specification: commonmark
-05_02__container_blocks__list_items__033:
- spec_txt_example_position: 263
- source_specification: commonmark
-05_02__container_blocks__list_items__034:
- spec_txt_example_position: 264
- source_specification: commonmark
-05_02__container_blocks__list_items__035:
- spec_txt_example_position: 265
- source_specification: commonmark
-05_02__container_blocks__list_items__036:
- spec_txt_example_position: 266
- source_specification: commonmark
-05_02__container_blocks__list_items__037:
- spec_txt_example_position: 267
- source_specification: commonmark
-05_02__container_blocks__list_items__038:
- spec_txt_example_position: 268
- source_specification: commonmark
-05_02__container_blocks__list_items__039:
- spec_txt_example_position: 269
- source_specification: commonmark
-05_02__container_blocks__list_items__040:
- spec_txt_example_position: 270
- source_specification: commonmark
-05_02__container_blocks__list_items__041:
- spec_txt_example_position: 271
- source_specification: commonmark
-05_02__container_blocks__list_items__042:
- spec_txt_example_position: 272
- source_specification: commonmark
-05_02__container_blocks__list_items__043:
- spec_txt_example_position: 273
- source_specification: commonmark
-05_02__container_blocks__list_items__044:
- spec_txt_example_position: 274
- source_specification: commonmark
-05_02__container_blocks__list_items__045:
- spec_txt_example_position: 275
- source_specification: commonmark
-05_02__container_blocks__list_items__046:
- spec_txt_example_position: 276
- source_specification: commonmark
-05_02__container_blocks__list_items__047:
- spec_txt_example_position: 277
- source_specification: commonmark
-05_02__container_blocks__list_items__048:
- spec_txt_example_position: 278
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049:
- spec_txt_example_position: 281
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050:
- spec_txt_example_position: 282
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051:
- spec_txt_example_position: 283
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052:
- spec_txt_example_position: 284
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053:
- spec_txt_example_position: 285
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054:
- spec_txt_example_position: 286
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055:
- spec_txt_example_position: 287
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056:
- spec_txt_example_position: 288
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057:
- spec_txt_example_position: 289
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058:
- spec_txt_example_position: 290
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059:
- spec_txt_example_position: 291
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060:
- spec_txt_example_position: 292
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061:
- spec_txt_example_position: 293
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062:
- spec_txt_example_position: 294
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063:
- spec_txt_example_position: 295
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064:
- spec_txt_example_position: 296
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065:
- spec_txt_example_position: 297
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066:
- spec_txt_example_position: 298
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067:
- spec_txt_example_position: 299
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068:
- spec_txt_example_position: 300
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069:
- spec_txt_example_position: 301
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070:
- spec_txt_example_position: 302
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071:
- spec_txt_example_position: 303
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072:
- spec_txt_example_position: 304
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073:
- spec_txt_example_position: 305
- source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074:
- spec_txt_example_position: 306
- source_specification: commonmark
-06_01__inlines__001:
- spec_txt_example_position: 307
- source_specification: commonmark
-06_02__inlines__backslash_escapes__001:
- spec_txt_example_position: 308
- source_specification: commonmark
-06_02__inlines__backslash_escapes__002:
- spec_txt_example_position: 309
- source_specification: commonmark
-06_02__inlines__backslash_escapes__003:
- spec_txt_example_position: 310
- source_specification: commonmark
-06_02__inlines__backslash_escapes__004:
- spec_txt_example_position: 311
- source_specification: commonmark
-06_02__inlines__backslash_escapes__005:
- spec_txt_example_position: 312
- source_specification: commonmark
-06_02__inlines__backslash_escapes__006:
- spec_txt_example_position: 313
- source_specification: commonmark
-06_02__inlines__backslash_escapes__007:
- spec_txt_example_position: 314
- source_specification: commonmark
-06_02__inlines__backslash_escapes__008:
- spec_txt_example_position: 315
- source_specification: commonmark
-06_02__inlines__backslash_escapes__009:
- spec_txt_example_position: 316
- source_specification: commonmark
-06_02__inlines__backslash_escapes__010:
- spec_txt_example_position: 317
- source_specification: commonmark
-06_02__inlines__backslash_escapes__011:
- spec_txt_example_position: 318
- source_specification: commonmark
-06_02__inlines__backslash_escapes__012:
- spec_txt_example_position: 319
- source_specification: commonmark
-06_02__inlines__backslash_escapes__013:
- spec_txt_example_position: 320
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__001:
- spec_txt_example_position: 321
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__002:
- spec_txt_example_position: 322
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__003:
- spec_txt_example_position: 323
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__004:
- spec_txt_example_position: 324
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__005:
- spec_txt_example_position: 325
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__006:
- spec_txt_example_position: 326
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__007:
- spec_txt_example_position: 327
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__008:
- spec_txt_example_position: 328
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__009:
- spec_txt_example_position: 329
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__010:
- spec_txt_example_position: 330
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__011:
- spec_txt_example_position: 331
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__012:
- spec_txt_example_position: 332
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__013:
- spec_txt_example_position: 333
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__014:
- spec_txt_example_position: 334
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__015:
- spec_txt_example_position: 335
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__016:
- spec_txt_example_position: 336
- source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__017:
- spec_txt_example_position: 337
- source_specification: commonmark
-06_04__inlines__code_spans__001:
- spec_txt_example_position: 338
- source_specification: commonmark
-06_04__inlines__code_spans__002:
- spec_txt_example_position: 339
- source_specification: commonmark
-06_04__inlines__code_spans__003:
- spec_txt_example_position: 340
- source_specification: commonmark
-06_04__inlines__code_spans__004:
- spec_txt_example_position: 341
- source_specification: commonmark
-06_04__inlines__code_spans__005:
- spec_txt_example_position: 342
- source_specification: commonmark
-06_04__inlines__code_spans__006:
- spec_txt_example_position: 343
- source_specification: commonmark
-06_04__inlines__code_spans__007:
- spec_txt_example_position: 344
- source_specification: commonmark
-06_04__inlines__code_spans__008:
- spec_txt_example_position: 345
- source_specification: commonmark
-06_04__inlines__code_spans__009:
- spec_txt_example_position: 346
- source_specification: commonmark
-06_04__inlines__code_spans__010:
- spec_txt_example_position: 347
- source_specification: commonmark
-06_04__inlines__code_spans__011:
- spec_txt_example_position: 348
- source_specification: commonmark
-06_04__inlines__code_spans__012:
- spec_txt_example_position: 349
- source_specification: commonmark
-06_04__inlines__code_spans__013:
- spec_txt_example_position: 350
- source_specification: commonmark
-06_04__inlines__code_spans__014:
- spec_txt_example_position: 351
- source_specification: commonmark
-06_04__inlines__code_spans__015:
- spec_txt_example_position: 352
- source_specification: commonmark
-06_04__inlines__code_spans__016:
- spec_txt_example_position: 353
- source_specification: commonmark
-06_04__inlines__code_spans__017:
- spec_txt_example_position: 354
- source_specification: commonmark
-06_04__inlines__code_spans__018:
- spec_txt_example_position: 355
- source_specification: commonmark
-06_04__inlines__code_spans__019:
- spec_txt_example_position: 356
- source_specification: commonmark
-06_04__inlines__code_spans__020:
- spec_txt_example_position: 357
- source_specification: commonmark
-06_04__inlines__code_spans__021:
- spec_txt_example_position: 358
- source_specification: commonmark
-06_04__inlines__code_spans__022:
- spec_txt_example_position: 359
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__001:
- spec_txt_example_position: 360
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__002:
- spec_txt_example_position: 361
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__003:
- spec_txt_example_position: 362
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__004:
- spec_txt_example_position: 363
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__005:
- spec_txt_example_position: 364
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__006:
- spec_txt_example_position: 365
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__007:
- spec_txt_example_position: 366
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__008:
- spec_txt_example_position: 367
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__009:
- spec_txt_example_position: 368
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__010:
- spec_txt_example_position: 369
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__011:
- spec_txt_example_position: 370
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__012:
- spec_txt_example_position: 371
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__013:
- spec_txt_example_position: 372
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__014:
- spec_txt_example_position: 373
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__015:
- spec_txt_example_position: 374
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__016:
- spec_txt_example_position: 375
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__017:
- spec_txt_example_position: 376
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__018:
- spec_txt_example_position: 377
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__019:
- spec_txt_example_position: 378
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__020:
- spec_txt_example_position: 379
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__021:
- spec_txt_example_position: 380
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__022:
- spec_txt_example_position: 381
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__023:
- spec_txt_example_position: 382
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__024:
- spec_txt_example_position: 383
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__025:
- spec_txt_example_position: 384
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__026:
- spec_txt_example_position: 385
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__027:
- spec_txt_example_position: 386
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__028:
- spec_txt_example_position: 387
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__029:
- spec_txt_example_position: 388
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__030:
- spec_txt_example_position: 389
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__031:
- spec_txt_example_position: 390
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__032:
- spec_txt_example_position: 391
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__033:
- spec_txt_example_position: 392
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__034:
- spec_txt_example_position: 393
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__035:
- spec_txt_example_position: 394
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__036:
- spec_txt_example_position: 395
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__037:
- spec_txt_example_position: 396
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__038:
- spec_txt_example_position: 397
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__039:
- spec_txt_example_position: 398
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__040:
- spec_txt_example_position: 399
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__041:
- spec_txt_example_position: 400
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__042:
- spec_txt_example_position: 401
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__043:
- spec_txt_example_position: 402
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__044:
- spec_txt_example_position: 403
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__045:
- spec_txt_example_position: 404
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__046:
- spec_txt_example_position: 405
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__047:
- spec_txt_example_position: 406
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__048:
- spec_txt_example_position: 407
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__049:
- spec_txt_example_position: 408
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__050:
- spec_txt_example_position: 409
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__051:
- spec_txt_example_position: 410
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__052:
- spec_txt_example_position: 411
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__053:
- spec_txt_example_position: 412
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__054:
- spec_txt_example_position: 413
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__055:
- spec_txt_example_position: 414
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__056:
- spec_txt_example_position: 415
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__057:
- spec_txt_example_position: 416
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__058:
- spec_txt_example_position: 417
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__059:
- spec_txt_example_position: 418
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__060:
- spec_txt_example_position: 419
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__061:
- spec_txt_example_position: 420
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__062:
- spec_txt_example_position: 421
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__063:
- spec_txt_example_position: 422
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__064:
- spec_txt_example_position: 423
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__065:
- spec_txt_example_position: 424
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__066:
- spec_txt_example_position: 425
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__067:
- spec_txt_example_position: 426
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__068:
- spec_txt_example_position: 427
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__069:
- spec_txt_example_position: 428
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__070:
- spec_txt_example_position: 429
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__071:
- spec_txt_example_position: 430
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__072:
- spec_txt_example_position: 431
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__073:
- spec_txt_example_position: 432
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__074:
- spec_txt_example_position: 433
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__075:
- spec_txt_example_position: 434
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__076:
- spec_txt_example_position: 435
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__077:
- spec_txt_example_position: 436
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__078:
- spec_txt_example_position: 437
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__079:
- spec_txt_example_position: 438
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__080:
- spec_txt_example_position: 439
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__081:
- spec_txt_example_position: 440
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__082:
- spec_txt_example_position: 441
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__083:
- spec_txt_example_position: 442
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__084:
- spec_txt_example_position: 443
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__085:
- spec_txt_example_position: 444
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__086:
- spec_txt_example_position: 445
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__087:
- spec_txt_example_position: 446
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__088:
- spec_txt_example_position: 447
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__089:
- spec_txt_example_position: 448
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__090:
- spec_txt_example_position: 449
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__091:
- spec_txt_example_position: 450
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__092:
- spec_txt_example_position: 451
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__093:
- spec_txt_example_position: 452
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__094:
- spec_txt_example_position: 453
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__095:
- spec_txt_example_position: 454
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__096:
- spec_txt_example_position: 455
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__097:
- spec_txt_example_position: 456
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__098:
- spec_txt_example_position: 457
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__099:
- spec_txt_example_position: 458
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__100:
- spec_txt_example_position: 459
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__101:
- spec_txt_example_position: 460
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__102:
- spec_txt_example_position: 461
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__103:
- spec_txt_example_position: 462
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__104:
- spec_txt_example_position: 463
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__105:
- spec_txt_example_position: 464
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__106:
- spec_txt_example_position: 465
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__107:
- spec_txt_example_position: 466
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__108:
- spec_txt_example_position: 467
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__109:
- spec_txt_example_position: 468
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__110:
- spec_txt_example_position: 469
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__111:
- spec_txt_example_position: 470
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__112:
- spec_txt_example_position: 471
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__113:
- spec_txt_example_position: 472
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__114:
- spec_txt_example_position: 473
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__115:
- spec_txt_example_position: 474
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__116:
- spec_txt_example_position: 475
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__117:
- spec_txt_example_position: 476
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__118:
- spec_txt_example_position: 477
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__119:
- spec_txt_example_position: 478
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__120:
- spec_txt_example_position: 479
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__121:
- spec_txt_example_position: 480
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__122:
- spec_txt_example_position: 481
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__123:
- spec_txt_example_position: 482
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__124:
- spec_txt_example_position: 483
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__125:
- spec_txt_example_position: 484
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__126:
- spec_txt_example_position: 485
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__127:
- spec_txt_example_position: 486
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__128:
- spec_txt_example_position: 487
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__129:
- spec_txt_example_position: 488
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__130:
- spec_txt_example_position: 489
- source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__131:
- spec_txt_example_position: 490
- source_specification: commonmark
-06_06__inlines__strikethrough_extension__001:
- spec_txt_example_position: 491
- source_specification: github
-06_06__inlines__strikethrough_extension__002:
- spec_txt_example_position: 492
- source_specification: github
-06_07__inlines__links__001:
- spec_txt_example_position: 493
- source_specification: commonmark
-06_07__inlines__links__002:
- spec_txt_example_position: 494
- source_specification: commonmark
-06_07__inlines__links__003:
- spec_txt_example_position: 495
- source_specification: commonmark
-06_07__inlines__links__004:
- spec_txt_example_position: 496
- source_specification: commonmark
-06_07__inlines__links__005:
- spec_txt_example_position: 497
- source_specification: commonmark
-06_07__inlines__links__006:
- spec_txt_example_position: 498
- source_specification: commonmark
-06_07__inlines__links__007:
- spec_txt_example_position: 499
- source_specification: commonmark
-06_07__inlines__links__008:
- spec_txt_example_position: 500
- source_specification: commonmark
-06_07__inlines__links__009:
- spec_txt_example_position: 501
- source_specification: commonmark
-06_07__inlines__links__010:
- spec_txt_example_position: 502
- source_specification: commonmark
-06_07__inlines__links__011:
- spec_txt_example_position: 503
- source_specification: commonmark
-06_07__inlines__links__012:
- spec_txt_example_position: 504
- source_specification: commonmark
-06_07__inlines__links__013:
- spec_txt_example_position: 505
- source_specification: commonmark
-06_07__inlines__links__014:
- spec_txt_example_position: 506
- source_specification: commonmark
-06_07__inlines__links__015:
- spec_txt_example_position: 507
- source_specification: commonmark
-06_07__inlines__links__016:
- spec_txt_example_position: 508
- source_specification: commonmark
-06_07__inlines__links__017:
- spec_txt_example_position: 509
- source_specification: commonmark
-06_07__inlines__links__018:
- spec_txt_example_position: 510
- source_specification: commonmark
-06_07__inlines__links__019:
- spec_txt_example_position: 511
- source_specification: commonmark
-06_07__inlines__links__020:
- spec_txt_example_position: 512
- source_specification: commonmark
-06_07__inlines__links__021:
- spec_txt_example_position: 513
- source_specification: commonmark
-06_07__inlines__links__022:
- spec_txt_example_position: 514
- source_specification: commonmark
-06_07__inlines__links__023:
- spec_txt_example_position: 515
- source_specification: commonmark
-06_07__inlines__links__024:
- spec_txt_example_position: 516
- source_specification: commonmark
-06_07__inlines__links__025:
- spec_txt_example_position: 517
- source_specification: commonmark
-06_07__inlines__links__026:
- spec_txt_example_position: 518
- source_specification: commonmark
-06_07__inlines__links__027:
- spec_txt_example_position: 519
- source_specification: commonmark
-06_07__inlines__links__028:
- spec_txt_example_position: 520
- source_specification: commonmark
-06_07__inlines__links__029:
- spec_txt_example_position: 521
- source_specification: commonmark
-06_07__inlines__links__030:
- spec_txt_example_position: 522
- source_specification: commonmark
-06_07__inlines__links__031:
- spec_txt_example_position: 523
- source_specification: commonmark
-06_07__inlines__links__032:
- spec_txt_example_position: 524
- source_specification: commonmark
-06_07__inlines__links__033:
- spec_txt_example_position: 525
- source_specification: commonmark
-06_07__inlines__links__034:
- spec_txt_example_position: 526
- source_specification: commonmark
-06_07__inlines__links__035:
- spec_txt_example_position: 527
- source_specification: commonmark
-06_07__inlines__links__036:
- spec_txt_example_position: 528
- source_specification: commonmark
-06_07__inlines__links__037:
- spec_txt_example_position: 529
- source_specification: commonmark
-06_07__inlines__links__038:
- spec_txt_example_position: 530
- source_specification: commonmark
-06_07__inlines__links__039:
- spec_txt_example_position: 531
- source_specification: commonmark
-06_07__inlines__links__040:
- spec_txt_example_position: 532
- source_specification: commonmark
-06_07__inlines__links__041:
- spec_txt_example_position: 533
- source_specification: commonmark
-06_07__inlines__links__042:
- spec_txt_example_position: 534
- source_specification: commonmark
-06_07__inlines__links__043:
- spec_txt_example_position: 535
- source_specification: commonmark
-06_07__inlines__links__044:
- spec_txt_example_position: 536
- source_specification: commonmark
-06_07__inlines__links__045:
- spec_txt_example_position: 537
- source_specification: commonmark
-06_07__inlines__links__046:
- spec_txt_example_position: 538
- source_specification: commonmark
-06_07__inlines__links__047:
- spec_txt_example_position: 539
- source_specification: commonmark
-06_07__inlines__links__048:
- spec_txt_example_position: 540
- source_specification: commonmark
-06_07__inlines__links__049:
- spec_txt_example_position: 541
- source_specification: commonmark
-06_07__inlines__links__050:
- spec_txt_example_position: 542
- source_specification: commonmark
-06_07__inlines__links__051:
- spec_txt_example_position: 543
- source_specification: commonmark
-06_07__inlines__links__052:
- spec_txt_example_position: 544
- source_specification: commonmark
-06_07__inlines__links__053:
- spec_txt_example_position: 545
- source_specification: commonmark
-06_07__inlines__links__054:
- spec_txt_example_position: 546
- source_specification: commonmark
-06_07__inlines__links__055:
- spec_txt_example_position: 547
- source_specification: commonmark
-06_07__inlines__links__056:
- spec_txt_example_position: 548
- source_specification: commonmark
-06_07__inlines__links__057:
- spec_txt_example_position: 549
- source_specification: commonmark
-06_07__inlines__links__058:
- spec_txt_example_position: 550
- source_specification: commonmark
-06_07__inlines__links__059:
- spec_txt_example_position: 551
- source_specification: commonmark
-06_07__inlines__links__060:
- spec_txt_example_position: 552
- source_specification: commonmark
-06_07__inlines__links__061:
- spec_txt_example_position: 553
- source_specification: commonmark
-06_07__inlines__links__062:
- spec_txt_example_position: 554
- source_specification: commonmark
-06_07__inlines__links__063:
- spec_txt_example_position: 555
- source_specification: commonmark
-06_07__inlines__links__064:
- spec_txt_example_position: 556
- source_specification: commonmark
-06_07__inlines__links__065:
- spec_txt_example_position: 557
- source_specification: commonmark
-06_07__inlines__links__066:
- spec_txt_example_position: 558
- source_specification: commonmark
-06_07__inlines__links__067:
- spec_txt_example_position: 559
- source_specification: commonmark
-06_07__inlines__links__068:
- spec_txt_example_position: 560
- source_specification: commonmark
-06_07__inlines__links__069:
- spec_txt_example_position: 561
- source_specification: commonmark
-06_07__inlines__links__070:
- spec_txt_example_position: 562
- source_specification: commonmark
-06_07__inlines__links__071:
- spec_txt_example_position: 563
- source_specification: commonmark
-06_07__inlines__links__072:
- spec_txt_example_position: 564
- source_specification: commonmark
-06_07__inlines__links__073:
- spec_txt_example_position: 565
- source_specification: commonmark
-06_07__inlines__links__074:
- spec_txt_example_position: 566
- source_specification: commonmark
-06_07__inlines__links__075:
- spec_txt_example_position: 567
- source_specification: commonmark
-06_07__inlines__links__076:
- spec_txt_example_position: 568
- source_specification: commonmark
-06_07__inlines__links__077:
- spec_txt_example_position: 569
- source_specification: commonmark
-06_07__inlines__links__078:
- spec_txt_example_position: 570
- source_specification: commonmark
-06_07__inlines__links__079:
- spec_txt_example_position: 571
- source_specification: commonmark
-06_07__inlines__links__080:
- spec_txt_example_position: 572
- source_specification: commonmark
-06_07__inlines__links__081:
- spec_txt_example_position: 573
- source_specification: commonmark
-06_07__inlines__links__082:
- spec_txt_example_position: 574
- source_specification: commonmark
-06_07__inlines__links__083:
- spec_txt_example_position: 575
- source_specification: commonmark
-06_07__inlines__links__084:
- spec_txt_example_position: 576
- source_specification: commonmark
-06_07__inlines__links__085:
- spec_txt_example_position: 577
- source_specification: commonmark
-06_07__inlines__links__086:
- spec_txt_example_position: 578
- source_specification: commonmark
-06_07__inlines__links__087:
- spec_txt_example_position: 579
- source_specification: commonmark
-06_08__inlines__images__001:
- spec_txt_example_position: 580
- source_specification: commonmark
-06_08__inlines__images__002:
- spec_txt_example_position: 581
- source_specification: commonmark
-06_08__inlines__images__003:
- spec_txt_example_position: 582
- source_specification: commonmark
-06_08__inlines__images__004:
- spec_txt_example_position: 583
- source_specification: commonmark
-06_08__inlines__images__005:
- spec_txt_example_position: 584
- source_specification: commonmark
-06_08__inlines__images__006:
- spec_txt_example_position: 585
- source_specification: commonmark
-06_08__inlines__images__007:
- spec_txt_example_position: 586
- source_specification: commonmark
-06_08__inlines__images__008:
- spec_txt_example_position: 587
- source_specification: commonmark
-06_08__inlines__images__009:
- spec_txt_example_position: 588
- source_specification: commonmark
-06_08__inlines__images__010:
- spec_txt_example_position: 589
- source_specification: commonmark
-06_08__inlines__images__011:
- spec_txt_example_position: 590
- source_specification: commonmark
-06_08__inlines__images__012:
- spec_txt_example_position: 591
- source_specification: commonmark
-06_08__inlines__images__013:
- spec_txt_example_position: 592
- source_specification: commonmark
-06_08__inlines__images__014:
- spec_txt_example_position: 593
- source_specification: commonmark
-06_08__inlines__images__015:
- spec_txt_example_position: 594
- source_specification: commonmark
-06_08__inlines__images__016:
- spec_txt_example_position: 595
- source_specification: commonmark
-06_08__inlines__images__017:
- spec_txt_example_position: 596
- source_specification: commonmark
-06_08__inlines__images__018:
- spec_txt_example_position: 597
- source_specification: commonmark
-06_08__inlines__images__019:
- spec_txt_example_position: 598
- source_specification: commonmark
-06_08__inlines__images__020:
- spec_txt_example_position: 599
- source_specification: commonmark
-06_08__inlines__images__021:
- spec_txt_example_position: 600
- source_specification: commonmark
-06_08__inlines__images__022:
- spec_txt_example_position: 601
- source_specification: commonmark
-06_09__inlines__autolinks__001:
- spec_txt_example_position: 602
- source_specification: commonmark
-06_09__inlines__autolinks__002:
- spec_txt_example_position: 603
- source_specification: commonmark
-06_09__inlines__autolinks__003:
- spec_txt_example_position: 604
- source_specification: commonmark
-06_09__inlines__autolinks__004:
- spec_txt_example_position: 605
- source_specification: commonmark
-06_09__inlines__autolinks__005:
- spec_txt_example_position: 606
- source_specification: commonmark
-06_09__inlines__autolinks__006:
- spec_txt_example_position: 607
- source_specification: commonmark
-06_09__inlines__autolinks__007:
- spec_txt_example_position: 608
- source_specification: commonmark
-06_09__inlines__autolinks__008:
- spec_txt_example_position: 609
- source_specification: commonmark
-06_09__inlines__autolinks__009:
- spec_txt_example_position: 610
- source_specification: commonmark
-06_09__inlines__autolinks__010:
- spec_txt_example_position: 611
- source_specification: commonmark
-06_09__inlines__autolinks__011:
- spec_txt_example_position: 612
- source_specification: commonmark
-06_09__inlines__autolinks__012:
- spec_txt_example_position: 613
- source_specification: commonmark
-06_09__inlines__autolinks__013:
- spec_txt_example_position: 614
- source_specification: commonmark
-06_09__inlines__autolinks__014:
- spec_txt_example_position: 615
- source_specification: commonmark
-06_09__inlines__autolinks__015:
- spec_txt_example_position: 616
- source_specification: commonmark
-06_09__inlines__autolinks__016:
- spec_txt_example_position: 617
- source_specification: commonmark
-06_09__inlines__autolinks__017:
- spec_txt_example_position: 618
- source_specification: commonmark
-06_09__inlines__autolinks__018:
- spec_txt_example_position: 619
- source_specification: commonmark
-06_09__inlines__autolinks__019:
- spec_txt_example_position: 620
- source_specification: commonmark
-06_10__inlines__autolinks_extension__001:
- spec_txt_example_position: 621
- source_specification: github
-06_10__inlines__autolinks_extension__002:
- spec_txt_example_position: 622
- source_specification: github
-06_10__inlines__autolinks_extension__003:
- spec_txt_example_position: 623
- source_specification: github
-06_10__inlines__autolinks_extension__004:
- spec_txt_example_position: 624
- source_specification: github
-06_10__inlines__autolinks_extension__005:
- spec_txt_example_position: 625
- source_specification: github
-06_10__inlines__autolinks_extension__006:
- spec_txt_example_position: 626
- source_specification: github
-06_10__inlines__autolinks_extension__007:
- spec_txt_example_position: 627
- source_specification: github
-06_10__inlines__autolinks_extension__008:
- spec_txt_example_position: 628
- source_specification: github
-06_10__inlines__autolinks_extension__009:
- spec_txt_example_position: 629
- source_specification: github
-06_10__inlines__autolinks_extension__010:
- spec_txt_example_position: 630
- source_specification: github
-06_10__inlines__autolinks_extension__011:
- spec_txt_example_position: 631
- source_specification: github
-06_11__inlines__raw_html__001:
- spec_txt_example_position: 632
- source_specification: commonmark
-06_11__inlines__raw_html__002:
- spec_txt_example_position: 633
- source_specification: commonmark
-06_11__inlines__raw_html__003:
- spec_txt_example_position: 634
- source_specification: commonmark
-06_11__inlines__raw_html__004:
- spec_txt_example_position: 635
- source_specification: commonmark
-06_11__inlines__raw_html__005:
- spec_txt_example_position: 636
- source_specification: commonmark
-06_11__inlines__raw_html__006:
- spec_txt_example_position: 637
- source_specification: commonmark
-06_11__inlines__raw_html__007:
- spec_txt_example_position: 638
- source_specification: commonmark
-06_11__inlines__raw_html__008:
- spec_txt_example_position: 639
- source_specification: commonmark
-06_11__inlines__raw_html__009:
- spec_txt_example_position: 640
- source_specification: commonmark
-06_11__inlines__raw_html__010:
- spec_txt_example_position: 641
- source_specification: commonmark
-06_11__inlines__raw_html__011:
- spec_txt_example_position: 642
- source_specification: commonmark
-06_11__inlines__raw_html__012:
- spec_txt_example_position: 643
- source_specification: commonmark
-06_11__inlines__raw_html__013:
- spec_txt_example_position: 644
- source_specification: commonmark
-06_11__inlines__raw_html__014:
- spec_txt_example_position: 645
- source_specification: commonmark
-06_11__inlines__raw_html__015:
- spec_txt_example_position: 646
- source_specification: commonmark
-06_11__inlines__raw_html__016:
- spec_txt_example_position: 647
- source_specification: commonmark
-06_11__inlines__raw_html__017:
- spec_txt_example_position: 648
- source_specification: commonmark
-06_11__inlines__raw_html__018:
- spec_txt_example_position: 649
- source_specification: commonmark
-06_11__inlines__raw_html__019:
- spec_txt_example_position: 650
- source_specification: commonmark
-06_11__inlines__raw_html__020:
- spec_txt_example_position: 651
- source_specification: commonmark
-06_11__inlines__raw_html__021:
- spec_txt_example_position: 652
- source_specification: commonmark
-06_12__inlines__disallowed_raw_html_extension__001:
- spec_txt_example_position: 653
- source_specification: github
-06_13__inlines__hard_line_breaks__001:
- spec_txt_example_position: 654
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__002:
- spec_txt_example_position: 655
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__003:
- spec_txt_example_position: 656
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__004:
- spec_txt_example_position: 657
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__005:
- spec_txt_example_position: 658
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__006:
- spec_txt_example_position: 659
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__007:
- spec_txt_example_position: 660
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__008:
- spec_txt_example_position: 661
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__009:
- spec_txt_example_position: 662
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__010:
- spec_txt_example_position: 663
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__011:
- spec_txt_example_position: 664
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__012:
- spec_txt_example_position: 665
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__013:
- spec_txt_example_position: 666
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__014:
- spec_txt_example_position: 667
- source_specification: commonmark
-06_13__inlines__hard_line_breaks__015:
- spec_txt_example_position: 668
- source_specification: commonmark
-06_14__inlines__soft_line_breaks__001:
- spec_txt_example_position: 669
- source_specification: commonmark
-06_14__inlines__soft_line_breaks__002:
- spec_txt_example_position: 670
- source_specification: commonmark
-06_15__inlines__textual_content__001:
- spec_txt_example_position: 671
- source_specification: commonmark
-06_15__inlines__textual_content__002:
- spec_txt_example_position: 672
- source_specification: commonmark
-06_15__inlines__textual_content__003:
- spec_txt_example_position: 673
- source_specification: commonmark
-07_01__gitlab_specific_markdown__footnotes__001:
- spec_txt_example_position: 674
- source_specification: gitlab
diff --git a/spec/fixtures/glfm/example_snapshots/html.yml b/spec/fixtures/glfm/example_snapshots/html.yml
deleted file mode 100644
index b9deadcb4cb..00000000000
--- a/spec/fixtures/glfm/example_snapshots/html.yml
+++ /dev/null
@@ -1,7479 +0,0 @@
----
-02_01__preliminaries__tabs__001:
- canonical: "<pre><code>foo\tbaz\t\tbim\n</code></pre>\n"
- static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
- data-sourcepos=\"1:2-1:13\" class=\"code highlight js-syntax-highlight language-plaintext\"
- lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
- class=\"line\" lang=\"plaintext\">foo\tbaz\t\tbim</span></code></pre>\n<copy-code></copy-code>\n</div>"
- wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>foo\tbaz\t\tbim</code></pre>"
-02_01__preliminaries__tabs__002:
- canonical: "<pre><code>foo\tbaz\t\tbim\n</code></pre>\n"
- static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
- data-sourcepos=\"1:4-1:15\" class=\"code highlight js-syntax-highlight language-plaintext\"
- lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
- class=\"line\" lang=\"plaintext\">foo\tbaz\t\tbim</span></code></pre>\n<copy-code></copy-code>\n</div>"
- wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>foo\tbaz\t\tbim</code></pre>"
-02_01__preliminaries__tabs__003:
- canonical: "<pre><code>a\ta\nὐ\ta\n</code></pre>\n"
- static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
- data-sourcepos=\"1:5-2:9\" class=\"code highlight js-syntax-highlight language-plaintext\"
- lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
- class=\"line\" lang=\"plaintext\">a\ta</span>\n<span id=\"LC2\" class=\"line\"
- lang=\"plaintext\">ὐ\ta</span></code></pre>\n<copy-code></copy-code>\n</div>"
- wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>a\ta\nὐ\ta</code></pre>"
-02_01__preliminaries__tabs__004:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <p>bar</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:3-3:4" dir="auto">
- <li data-sourcepos="1:3-3:4">
- <p data-sourcepos="1:5-1:7">foo</p>
- <p data-sourcepos="3:2-3:4">bar</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-02_01__preliminaries__tabs__005:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <pre><code> bar
- </code></pre>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">
- <li data-sourcepos="1:1-3:5">
- <p data-sourcepos="1:3-1:5">foo</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:2-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code> bar</code></pre></li></ul>
-02_01__preliminaries__tabs__006:
- canonical: |
- <blockquote>
- <pre><code> foo
- </code></pre>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-1:6" dir="auto">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:3-1:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code> foo</code></pre></blockquote>
-02_01__preliminaries__tabs__007:
- canonical: |
- <ul>
- <li>
- <pre><code> foo
- </code></pre>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-1:6" dir="auto">
- <li data-sourcepos="1:1-1:6">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:3-1:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code> foo</code></pre></li></ul>
-02_01__preliminaries__tabs__008:
- canonical: |
- <pre><code>foo
- bar
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-2:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span>
- <span id="LC2" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>foo
- bar</code></pre>
-02_01__preliminaries__tabs__009:
- canonical: |
- <ul>
- <li>foo
- <ul>
- <li>bar
- <ul>
- <li>baz</li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:2-3:7" dir="auto">
- <li data-sourcepos="1:2-3:7">foo
- <ul data-sourcepos="2:4-3:7">
- <li data-sourcepos="2:4-3:7">bar
- <ul data-sourcepos="3:3-3:7">
- <li data-sourcepos="3:3-3:7">baz</li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p><ul bullet="*"><li><p>baz</p></li></ul></li></ul></li></ul>
-02_01__preliminaries__tabs__010:
- canonical: |
- <h1>Foo</h1>
- static: |-
- <h1 data-sourcepos="1:1-1:5" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
- wysiwyg: |-
- <h1>Foo</h1>
-02_01__preliminaries__tabs__011:
- canonical: |
- <hr />
- static: |-
- <hr data-sourcepos="1:1-1:6">
- wysiwyg: |-
- <hr>
-03_01__blocks_and_inlines__precedence__001:
- canonical: |
- <ul>
- <li>`one</li>
- <li>two`</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-2:6" dir="auto">
- <li data-sourcepos="1:1-1:6">`one</li>
- <li data-sourcepos="2:1-2:6">two`</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>`one</p></li><li><p>two`</p></li></ul>
-04_01__leaf_blocks__thematic_breaks__001:
- canonical: |
- <hr />
- <hr />
- <hr />
- static: |-
- <hr data-sourcepos="1:1-1:3">
- <hr data-sourcepos="2:1-2:3">
- <hr data-sourcepos="3:1-3:3">
- wysiwyg: |-
- <hr>
-04_01__leaf_blocks__thematic_breaks__002:
- canonical: |
- <p>+++</p>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">+++</p>
- wysiwyg: |-
- <p>+++</p>
-04_01__leaf_blocks__thematic_breaks__003:
- canonical: |
- <p>===</p>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">===</p>
- wysiwyg: |-
- <p>===</p>
-04_01__leaf_blocks__thematic_breaks__004:
- canonical: |
- <p>--
- **
- __</p>
- static: |-
- <p data-sourcepos="1:1-3:2" dir="auto">--
- **
- __</p>
- wysiwyg: |-
- <p>--
- **
- __</p>
-04_01__leaf_blocks__thematic_breaks__005:
- canonical: |
- <hr />
- <hr />
- <hr />
- static: |-
- <hr data-sourcepos="1:2-1:4">
- <hr data-sourcepos="2:3-2:5">
- <hr data-sourcepos="3:4-3:6">
- wysiwyg: |-
- <hr>
-04_01__leaf_blocks__thematic_breaks__006:
- canonical: |
- <pre><code>***
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">***</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>***</code></pre>
-04_01__leaf_blocks__thematic_breaks__007:
- canonical: |
- <p>Foo
- ***</p>
- static: |-
- <p data-sourcepos="1:1-2:7" dir="auto">Foo
- ***</p>
- wysiwyg: |-
- <p>Foo
- ***</p>
-04_01__leaf_blocks__thematic_breaks__008:
- canonical: |
- <hr />
- static: |-
- <hr data-sourcepos="1:1-1:37">
- wysiwyg: |-
- <hr>
-04_01__leaf_blocks__thematic_breaks__009:
- canonical: |
- <hr />
- static: |-
- <hr data-sourcepos="1:2-1:6">
- wysiwyg: |-
- <hr>
-04_01__leaf_blocks__thematic_breaks__010:
- canonical: |
- <hr />
- static: |-
- <hr data-sourcepos="1:2-1:19">
- wysiwyg: |-
- <hr>
-04_01__leaf_blocks__thematic_breaks__011:
- canonical: |
- <hr />
- static: |-
- <hr data-sourcepos="1:1-1:21">
- wysiwyg: |-
- <hr>
-04_01__leaf_blocks__thematic_breaks__012:
- canonical: |
- <hr />
- static: |-
- <hr data-sourcepos="1:1-1:11">
- wysiwyg: |-
- <hr>
-04_01__leaf_blocks__thematic_breaks__013:
- canonical: |
- <p>_ _ _ _ a</p>
- <p>a------</p>
- <p>---a---</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">_ _ _ _ a</p>
- <p data-sourcepos="3:1-3:7" dir="auto">a------</p>
- <p data-sourcepos="5:1-5:7" dir="auto">---a---</p>
- wysiwyg: |-
- <p>_ _ _ _ a</p>
-04_01__leaf_blocks__thematic_breaks__014:
- canonical: |
- <p><em>-</em></p>
- static: |-
- <p data-sourcepos="1:2-1:4" dir="auto"><em>-</em></p>
- wysiwyg: |-
- <p><em>-</em></p>
-04_01__leaf_blocks__thematic_breaks__015:
- canonical: |
- <ul>
- <li>foo</li>
- </ul>
- <hr />
- <ul>
- <li>bar</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">
- <li data-sourcepos="1:1-1:5">foo</li>
- </ul>
- <hr data-sourcepos="2:1-2:3">
- <ul data-sourcepos="3:1-3:5" dir="auto">
- <li data-sourcepos="3:1-3:5">bar</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li></ul>
-04_01__leaf_blocks__thematic_breaks__016:
- canonical: |
- <p>Foo</p>
- <hr />
- <p>bar</p>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
- <hr data-sourcepos="2:1-2:3">
- <p data-sourcepos="3:1-3:3" dir="auto">bar</p>
- wysiwyg: |-
- <p>Foo</p>
-04_01__leaf_blocks__thematic_breaks__017:
- canonical: |
- <h2>Foo</h2>
- <p>bar</p>
- static: |-
- <h2 data-sourcepos="1:1-3:3" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
- <p data-sourcepos="3:1-3:3" dir="auto">bar</p>
- wysiwyg: |-
- <h2>Foo</h2>
-04_01__leaf_blocks__thematic_breaks__018:
- canonical: |
- <ul>
- <li>Foo</li>
- </ul>
- <hr />
- <ul>
- <li>Bar</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">
- <li data-sourcepos="1:1-1:5">Foo</li>
- </ul>
- <hr data-sourcepos="2:1-2:5">
- <ul data-sourcepos="3:1-3:5" dir="auto">
- <li data-sourcepos="3:1-3:5">Bar</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>Foo</p></li></ul>
-04_01__leaf_blocks__thematic_breaks__019:
- canonical: |
- <ul>
- <li>Foo</li>
- <li>
- <hr />
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-2:7" dir="auto">
- <li data-sourcepos="1:1-1:5">Foo</li>
- <li data-sourcepos="2:1-2:7">
- <hr data-sourcepos="2:3-2:7">
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>Foo</p></li><li><p></p><hr></li></ul>
-04_02__leaf_blocks__atx_headings__001:
- canonical: |
- <h1>foo</h1>
- <h2>foo</h2>
- <h3>foo</h3>
- <h4>foo</h4>
- <h5>foo</h5>
- <h6>foo</h6>
- static: |-
- <h1 data-sourcepos="1:1-1:5" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>
- <h2 data-sourcepos="2:1-2:6" dir="auto">
- <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h2>
- <h3 data-sourcepos="3:1-3:7" dir="auto">
- <a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>foo</h3>
- <h4 data-sourcepos="4:1-4:8" dir="auto">
- <a id="user-content-foo-3" class="anchor" href="#foo-3" aria-hidden="true"></a>foo</h4>
- <h5 data-sourcepos="5:1-5:9" dir="auto">
- <a id="user-content-foo-4" class="anchor" href="#foo-4" aria-hidden="true"></a>foo</h5>
- <h6 data-sourcepos="6:1-6:10" dir="auto">
- <a id="user-content-foo-5" class="anchor" href="#foo-5" aria-hidden="true"></a>foo</h6>
- wysiwyg: |-
- <h1>foo</h1>
-04_02__leaf_blocks__atx_headings__002:
- canonical: |
- <p>####### foo</p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto">####### foo</p>
- wysiwyg: |-
- <p>####### foo</p>
-04_02__leaf_blocks__atx_headings__003:
- canonical: |
- <p>#5 bolt</p>
- <p>#hashtag</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">#5 bolt</p>
- <p data-sourcepos="3:1-3:8" dir="auto">#hashtag</p>
- wysiwyg: |-
- <p>#5 bolt</p>
-04_02__leaf_blocks__atx_headings__004:
- canonical: |
- <p>## foo</p>
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto"><span>#</span># foo</p>
- wysiwyg: |-
- <p>## foo</p>
-04_02__leaf_blocks__atx_headings__005:
- canonical: |
- <h1>foo <em>bar</em> *baz*</h1>
- static: |-
- <h1 data-sourcepos="1:1-1:19" dir="auto">
- <a id="user-content-foo-bar-baz" class="anchor" href="#foo-bar-baz" aria-hidden="true"></a>foo <em>bar</em> *baz*</h1>
- wysiwyg: |-
- <h1>foo <em>bar</em> *baz*</h1>
-04_02__leaf_blocks__atx_headings__006:
- canonical: |
- <h1>foo</h1>
- static: |-
- <h1 data-sourcepos="1:1-1:22" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>
- wysiwyg: |-
- <h1>foo</h1>
-04_02__leaf_blocks__atx_headings__007:
- canonical: |
- <h3>foo</h3>
- <h2>foo</h2>
- <h1>foo</h1>
- static: |-
- <h3 data-sourcepos="1:2-1:8" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
- <h2 data-sourcepos="2:3-2:8" dir="auto">
- <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h2>
- <h1 data-sourcepos="3:4-3:8" dir="auto">
- <a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>foo</h1>
- wysiwyg: |-
- <h3>foo</h3>
-04_02__leaf_blocks__atx_headings__008:
- canonical: |
- <pre><code># foo
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"># foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code># foo</code></pre>
-04_02__leaf_blocks__atx_headings__009:
- canonical: |
- <p>foo
- # bar</p>
- static: |-
- <p data-sourcepos="1:1-2:9" dir="auto">foo
- # bar</p>
- wysiwyg: |-
- <p>foo
- # bar</p>
-04_02__leaf_blocks__atx_headings__010:
- canonical: |
- <h2>foo</h2>
- <h3>bar</h3>
- static: |-
- <h2 data-sourcepos="1:1-1:6" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>
- <h3 data-sourcepos="2:3-2:11" dir="auto">
- <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h3>
- wysiwyg: |-
- <h2>foo</h2>
-04_02__leaf_blocks__atx_headings__011:
- canonical: |
- <h1>foo</h1>
- <h5>foo</h5>
- static: |-
- <h1 data-sourcepos="1:1-1:5" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>
- <h5 data-sourcepos="2:1-2:9" dir="auto">
- <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h5>
- wysiwyg: |-
- <h1>foo</h1>
-04_02__leaf_blocks__atx_headings__012:
- canonical: |
- <h3>foo</h3>
- static: |-
- <h3 data-sourcepos="1:1-1:7" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
- wysiwyg: |-
- <h3>foo</h3>
-04_02__leaf_blocks__atx_headings__013:
- canonical: |
- <h3>foo ### b</h3>
- static: |-
- <h3 data-sourcepos="1:1-1:13" dir="auto">
- <a id="user-content-foo-b" class="anchor" href="#foo-b" aria-hidden="true"></a>foo ### b</h3>
- wysiwyg: |-
- <h3>foo ### b</h3>
-04_02__leaf_blocks__atx_headings__014:
- canonical: |
- <h1>foo#</h1>
- static: |-
- <h1 data-sourcepos="1:1-1:6" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo#</h1>
- wysiwyg: |-
- <h1>foo#</h1>
-04_02__leaf_blocks__atx_headings__015:
- canonical: |
- <h3>foo ###</h3>
- <h2>foo ###</h2>
- <h1>foo #</h1>
- static: |-
- <h3 data-sourcepos="1:1-1:32" dir="auto">
- <a id="user-content-foo-" class="anchor" href="#foo-" aria-hidden="true"></a>foo <span>#</span>##</h3>
- <h2 data-sourcepos="2:1-2:31" dir="auto">
- <a id="user-content-foo--1" class="anchor" href="#foo--1" aria-hidden="true"></a>foo #<span>#</span>#</h2>
- <h1 data-sourcepos="3:1-3:28" dir="auto">
- <a id="user-content-foo--2" class="anchor" href="#foo--2" aria-hidden="true"></a>foo <span>#</span>
- </h1>
- wysiwyg: |-
- <h3>foo ###</h3>
-04_02__leaf_blocks__atx_headings__016:
- canonical: |
- <hr />
- <h2>foo</h2>
- <hr />
- static: |-
- <hr data-sourcepos="1:1-1:4">
- <h2 data-sourcepos="2:1-2:6" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>
- <hr data-sourcepos="3:1-3:4">
- wysiwyg: |-
- <hr>
-04_02__leaf_blocks__atx_headings__017:
- canonical: |
- <p>Foo bar</p>
- <h1>baz</h1>
- <p>Bar foo</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">Foo bar</p>
- <h1 data-sourcepos="2:1-2:5" dir="auto">
- <a id="user-content-baz" class="anchor" href="#baz" aria-hidden="true"></a>baz</h1>
- <p data-sourcepos="3:1-3:7" dir="auto">Bar foo</p>
- wysiwyg: |-
- <p>Foo bar</p>
-04_02__leaf_blocks__atx_headings__018:
- canonical: |
- <h2></h2>
- <h1></h1>
- <h3></h3>
- static: |-
- <h2 data-sourcepos="1:1-1:3" dir="auto"></h2>
- <h1 data-sourcepos="2:1-2:1" dir="auto"></h1>
- <h3 data-sourcepos="3:1-3:3" dir="auto"></h3>
- wysiwyg: |-
- <h2></h2>
-04_03__leaf_blocks__setext_headings__001:
- canonical: |
- <h1>Foo <em>bar</em></h1>
- <h2>Foo <em>bar</em></h2>
- static: |-
- <h1 data-sourcepos="1:1-3:0" dir="auto">
- <a id="user-content-foo-bar" class="anchor" href="#foo-bar" aria-hidden="true"></a>Foo <em>bar</em>
- </h1>
- <h2 data-sourcepos="4:1-5:9" dir="auto">
- <a id="user-content-foo-bar-1" class="anchor" href="#foo-bar-1" aria-hidden="true"></a>Foo <em>bar</em>
- </h2>
- wysiwyg: |-
- <h1>Foo <em>bar</em></h1>
-04_03__leaf_blocks__setext_headings__002:
- canonical: |
- <h1>Foo <em>bar
- baz</em></h1>
- static: |-
- <h1 data-sourcepos="1:1-3:4" dir="auto">
- <a id="user-content-foo-barbaz" class="anchor" href="#foo-barbaz" aria-hidden="true"></a>Foo <em>bar
- baz</em>
- </h1>
- wysiwyg: |-
- <h1>Foo <em>bar
- baz</em></h1>
-04_03__leaf_blocks__setext_headings__003:
- canonical: |
- <h1>Foo <em>bar
- baz</em></h1>
- static: |-
- <h1 data-sourcepos="1:3-3:4" dir="auto">
- <a id="user-content-foo-barbaz" class="anchor" href="#foo-barbaz" aria-hidden="true"></a>Foo <em>bar
- baz</em>
- </h1>
- wysiwyg: |-
- <h1>Foo <em>bar
- baz</em></h1>
-04_03__leaf_blocks__setext_headings__004:
- canonical: |
- <h2>Foo</h2>
- <h1>Foo</h1>
- static: |-
- <h2 data-sourcepos="1:1-3:0" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
- <h1 data-sourcepos="4:1-5:1" dir="auto">
- <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>Foo</h1>
- wysiwyg: |-
- <h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__005:
- canonical: |
- <h2>Foo</h2>
- <h2>Foo</h2>
- <h1>Foo</h1>
- static: |-
- <h2 data-sourcepos="1:4-3:0" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
- <h2 data-sourcepos="4:3-6:0" dir="auto">
- <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>Foo</h2>
- <h1 data-sourcepos="7:3-8:5" dir="auto">
- <a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>Foo</h1>
- wysiwyg: |-
- <h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__006:
- canonical: |
- <pre><code>Foo
- ---
-
- Foo
- </code></pre>
- <hr />
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-4:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">Foo</span>
- <span id="LC2" class="line" lang="plaintext">---</span>
- <span id="LC3" class="line" lang="plaintext"></span>
- <span id="LC4" class="line" lang="plaintext">Foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- <hr data-sourcepos="5:1-5:3">
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>Foo
- ---
-
- Foo</code></pre>
-04_03__leaf_blocks__setext_headings__007:
- canonical: |
- <h2>Foo</h2>
- static: |-
- <h2 data-sourcepos="1:1-2:13" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
- wysiwyg: |-
- <h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__008:
- canonical: |
- <p>Foo
- ---</p>
- static: |-
- <p data-sourcepos="1:1-2:7" dir="auto">Foo
- ---</p>
- wysiwyg: |-
- <p>Foo
- ---</p>
-04_03__leaf_blocks__setext_headings__009:
- canonical: |
- <p>Foo
- = =</p>
- <p>Foo</p>
- <hr />
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">Foo
- = =</p>
- <p data-sourcepos="4:1-4:3" dir="auto">Foo</p>
- <hr data-sourcepos="5:1-5:5">
- wysiwyg: |-
- <p>Foo
- = =</p>
-04_03__leaf_blocks__setext_headings__010:
- canonical: |
- <h2>Foo</h2>
- static: |-
- <h2 data-sourcepos="1:1-2:5" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
- wysiwyg: |-
- <h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__011:
- canonical: |
- <h2>Foo\</h2>
- static: |-
- <h2 data-sourcepos="1:1-2:4" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo\</h2>
- wysiwyg: |-
- <h2>Foo\</h2>
-04_03__leaf_blocks__setext_headings__012:
- canonical: |
- <h2>`Foo</h2>
- <p>`</p>
- <h2>&lt;a title=&quot;a lot</h2>
- <p>of dashes&quot;/&gt;</p>
- static: |-
- <h2 data-sourcepos="1:1-3:1" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>`Foo</h2>
- <p data-sourcepos="3:1-3:1" dir="auto">`</p>
- <h2 data-sourcepos="5:1-7:12" dir="auto">
- <a id="user-content-a-titlea-lot" class="anchor" href="#a-titlea-lot" aria-hidden="true"></a>&lt;a title="a lot</h2>
- <p data-sourcepos="7:1-7:12" dir="auto">of dashes"/&gt;</p>
- wysiwyg: |-
- <h2>`Foo</h2>
-04_03__leaf_blocks__setext_headings__013:
- canonical: |
- <blockquote>
- <p>Foo</p>
- </blockquote>
- <hr />
- static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">
- <p data-sourcepos="1:3-1:5">Foo</p>
- </blockquote>
- <hr data-sourcepos="2:1-2:3">
- wysiwyg: |-
- <blockquote multiline="false"><p>Foo</p></blockquote>
-04_03__leaf_blocks__setext_headings__014:
- canonical: |
- <blockquote>
- <p>foo
- bar
- ===</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:3" dir="auto">
- <p data-sourcepos="1:3-3:3">foo
- bar
- ===</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>foo
- bar
- ===</p></blockquote>
-04_03__leaf_blocks__setext_headings__015:
- canonical: |
- <ul>
- <li>Foo</li>
- </ul>
- <hr />
- static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">
- <li data-sourcepos="1:1-1:5">Foo</li>
- </ul>
- <hr data-sourcepos="2:1-2:3">
- wysiwyg: |-
- <ul bullet="*"><li><p>Foo</p></li></ul>
-04_03__leaf_blocks__setext_headings__016:
- canonical: |
- <h2>Foo
- Bar</h2>
- static: |-
- <h2 data-sourcepos="1:1-3:3" dir="auto">
- <a id="user-content-foobar" class="anchor" href="#foobar" aria-hidden="true"></a>Foo
- Bar</h2>
- wysiwyg: |-
- <h2>Foo
- Bar</h2>
-04_03__leaf_blocks__setext_headings__017:
- canonical: |
- <hr />
- <h2>Foo</h2>
- <h2>Bar</h2>
- <p>Baz</p>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="yaml"><span class="s">Foo</span></span></code></pre>
- <copy-code></copy-code>
- </div>
- <h2 data-sourcepos="4:1-6:3" dir="auto">
- <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>Bar</h2>
- <p data-sourcepos="6:1-6:3" dir="auto">Baz</p>
- wysiwyg: |-
- <hr>
-04_03__leaf_blocks__setext_headings__018:
- canonical: |
- <p>====</p>
- static: |-
- <p data-sourcepos="2:1-2:4" dir="auto">====</p>
- wysiwyg: |-
- <p>====</p>
-04_03__leaf_blocks__setext_headings__019:
- canonical: |
- <hr />
- <hr />
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-2:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <hr>
-04_03__leaf_blocks__setext_headings__020:
- canonical: |
- <ul>
- <li>foo</li>
- </ul>
- <hr />
- static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">
- <li data-sourcepos="1:1-1:5">foo</li>
- </ul>
- <hr data-sourcepos="2:1-2:5">
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li></ul>
-04_03__leaf_blocks__setext_headings__021:
- canonical: |
- <pre><code>foo
- </code></pre>
- <hr />
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- <hr data-sourcepos="2:1-2:3">
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_03__leaf_blocks__setext_headings__022:
- canonical: |
- <blockquote>
- <p>foo</p>
- </blockquote>
- <hr />
- static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">
- <p data-sourcepos="1:3-1:5">foo</p>
- </blockquote>
- <hr data-sourcepos="2:1-2:5">
- wysiwyg: |-
- <blockquote multiline="false"><p>foo</p></blockquote>
-04_03__leaf_blocks__setext_headings__023:
- canonical: |
- <h2>&gt; foo</h2>
- static: |-
- <h2 data-sourcepos="1:1-2:6" dir="auto">
- <a id="user-content--foo" class="anchor" href="#-foo" aria-hidden="true"></a>&gt; foo</h2>
- wysiwyg: |-
- <h2>&gt; foo</h2>
-04_03__leaf_blocks__setext_headings__024:
- canonical: |
- <p>Foo</p>
- <h2>bar</h2>
- <p>baz</p>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
- <h2 data-sourcepos="3:1-5:3" dir="auto">
- <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h2>
- <p data-sourcepos="5:1-5:3" dir="auto">baz</p>
- wysiwyg: |-
- <p>Foo</p>
-04_03__leaf_blocks__setext_headings__025:
- canonical: |
- <p>Foo
- bar</p>
- <hr />
- <p>baz</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">Foo
- bar</p>
- <hr data-sourcepos="4:1-5:0">
- <p data-sourcepos="6:1-6:3" dir="auto">baz</p>
- wysiwyg: |-
- <p>Foo
- bar</p>
-04_03__leaf_blocks__setext_headings__026:
- canonical: |
- <p>Foo
- bar</p>
- <hr />
- <p>baz</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">Foo
- bar</p>
- <hr data-sourcepos="3:1-3:5">
- <p data-sourcepos="4:1-4:3" dir="auto">baz</p>
- wysiwyg: |-
- <p>Foo
- bar</p>
-04_03__leaf_blocks__setext_headings__027:
- canonical: |
- <p>Foo
- bar
- ---
- baz</p>
- static: |-
- <p data-sourcepos="1:1-4:3" dir="auto">Foo
- bar
- ---
- baz</p>
- wysiwyg: |-
- <p>Foo
- bar
- ---
- baz</p>
-04_04__leaf_blocks__indented_code_blocks__001:
- canonical: |
- <pre><code>a simple
- indented code block
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-2:25" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">a simple</span>
- <span id="LC2" class="line" lang="plaintext"> indented code block</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>a simple
- indented code block</code></pre>
-04_04__leaf_blocks__indented_code_blocks__002:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <p>bar</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:3-3:7" dir="auto">
- <li data-sourcepos="1:3-3:7">
- <p data-sourcepos="1:5-1:7">foo</p>
- <p data-sourcepos="3:5-3:7">bar</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-04_04__leaf_blocks__indented_code_blocks__003:
- canonical: |
- <ol>
- <li>
- <p>foo</p>
- <ul>
- <li>bar</li>
- </ul>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-3:9" dir="auto">
- <li data-sourcepos="1:1-3:9">
- <p data-sourcepos="1:5-1:7">foo</p>
- <ul data-sourcepos="3:5-3:9">
- <li data-sourcepos="3:5-3:9">bar</li>
- </ul>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>foo</p><ul bullet="*"><li><p>bar</p></li></ul></li></ol>
-04_04__leaf_blocks__indented_code_blocks__004:
- canonical: |
- <pre><code>&lt;a/&gt;
- *hi*
-
- - one
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-4:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;a/&gt;</span>
- <span id="LC2" class="line" lang="plaintext">*hi*</span>
- <span id="LC3" class="line" lang="plaintext"></span>
- <span id="LC4" class="line" lang="plaintext">- one</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>&lt;a/&gt;
- *hi*
-
- - one</code></pre>
-04_04__leaf_blocks__indented_code_blocks__005:
- canonical: |
- <pre><code>chunk1
-
- chunk2
-
-
-
- chunk3
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-7:10" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">chunk1</span>
- <span id="LC2" class="line" lang="plaintext"></span>
- <span id="LC3" class="line" lang="plaintext">chunk2</span>
- <span id="LC4" class="line" lang="plaintext"></span>
- <span id="LC5" class="line" lang="plaintext"></span>
- <span id="LC6" class="line" lang="plaintext"></span>
- <span id="LC7" class="line" lang="plaintext">chunk3</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>chunk1
-
- chunk2
-
-
-
- chunk3</code></pre>
-04_04__leaf_blocks__indented_code_blocks__006:
- canonical: "<pre><code>chunk1\n \n chunk2\n</code></pre>\n"
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-3:12" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">chunk1</span>
- <span id="LC2" class="line" lang="plaintext"> </span>
- <span id="LC3" class="line" lang="plaintext"> chunk2</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>chunk1\n
- \ \n chunk2</code></pre>"
-04_04__leaf_blocks__indented_code_blocks__007:
- canonical: |
- <p>Foo
- bar</p>
- static: |-
- <p data-sourcepos="1:1-2:7" dir="auto">Foo
- bar</p>
- wysiwyg: |-
- <p>Foo
- bar</p>
-04_04__leaf_blocks__indented_code_blocks__008:
- canonical: |
- <pre><code>foo
- </code></pre>
- <p>bar</p>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="2:1-2:3" dir="auto">bar</p>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_04__leaf_blocks__indented_code_blocks__009:
- canonical: |
- <h1>Heading</h1>
- <pre><code>foo
- </code></pre>
- <h2>Heading</h2>
- <pre><code>foo
- </code></pre>
- <hr />
- static: |-
- <h1 data-sourcepos="1:1-1:9" dir="auto">
- <a id="user-content-heading" class="anchor" href="#heading" aria-hidden="true"></a>Heading</h1>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="2:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- <h2 data-sourcepos="3:1-5:7" dir="auto">
- <a id="user-content-heading-1" class="anchor" href="#heading-1" aria-hidden="true"></a>Heading</h2>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="5:5-5:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- <hr data-sourcepos="6:1-6:4">
- wysiwyg: |-
- <h1>Heading</h1>
-04_04__leaf_blocks__indented_code_blocks__010:
- canonical: |
- <pre><code> foo
- bar
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span>
- <span id="LC2" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code> foo
- bar</code></pre>
-04_04__leaf_blocks__indented_code_blocks__011:
- canonical: |
- <pre><code>foo
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:5-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_04__leaf_blocks__indented_code_blocks__012:
- canonical: "<pre><code>foo \n</code></pre>\n"
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo </span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>foo </code></pre>
-04_05__leaf_blocks__fenced_code_blocks__001:
- canonical: |
- <pre><code>&lt;
- &gt;
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;</span>
- <span id="LC2" class="line" lang="plaintext"> &gt;</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>&lt;
- &gt;</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__002:
- canonical: |
- <pre><code>&lt;
- &gt;
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;</span>
- <span id="LC2" class="line" lang="plaintext"> &gt;</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>&lt;
- &gt;</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__003:
- canonical: |
- <p><code>foo</code></p>
- static: |-
- <p data-sourcepos="1:1-3:2" dir="auto"><code>foo</code></p>
- wysiwyg: |-
- <p><code>foo</code></p>
-04_05__leaf_blocks__fenced_code_blocks__004:
- canonical: |
- <pre><code>aaa
- ~~~
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext">~~~</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- ~~~</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__005:
- canonical: |
- <pre><code>aaa
- ```
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext">```</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- ```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__006:
- canonical: |
- <pre><code>aaa
- ```
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext">```</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- ```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__007:
- canonical: |
- <pre><code>aaa
- ~~~
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext">~~~</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- ~~~</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__008:
- canonical: |
- <pre><code></code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-1:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code></code></pre>
-04_05__leaf_blocks__fenced_code_blocks__009:
- canonical: |
- <pre><code>
- ```
- aaa
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>
- <span id="LC2" class="line" lang="plaintext">```</span>
- <span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>
- ```
- aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__010:
- canonical: |
- <blockquote>
- <pre><code>aaa
- </code></pre>
- </blockquote>
- <p>bbb</p>
- static: |-
- <blockquote data-sourcepos="1:1-2:5" dir="auto">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:3-3:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- </blockquote>
- <p data-sourcepos="4:1-4:3" dir="auto">bbb</p>
- wysiwyg: |-
- <blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre></blockquote>
-04_05__leaf_blocks__fenced_code_blocks__011:
- canonical: "<pre><code>\n \n</code></pre>\n"
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>
- <span id="LC2" class="line" lang="plaintext"> </span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>
- </code></pre>
-04_05__leaf_blocks__fenced_code_blocks__012:
- canonical: |
- <pre><code></code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-2:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code></code></pre>
-04_05__leaf_blocks__fenced_code_blocks__013:
- canonical: |
- <pre><code>aaa
- aaa
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:2-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__014:
- canonical: |
- <pre><code>aaa
- aaa
- aaa
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:3-5:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext">aaa</span>
- <span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- aaa
- aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__015:
- canonical: |
- <pre><code>aaa
- aaa
- aaa
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:4-5:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext"> aaa</span>
- <span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- aaa
- aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__016:
- canonical: |
- <pre><code>```
- aaa
- ```
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-3:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">```</span>
- <span id="LC2" class="line" lang="plaintext">aaa</span>
- <span id="LC3" class="line" lang="plaintext">```</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>```
- aaa
- ```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__017:
- canonical: |
- <pre><code>aaa
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__018:
- canonical: |
- <pre><code>aaa
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:4-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__019:
- canonical: |
- <pre><code>aaa
- ```
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext"> ```</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- ```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__020:
- canonical: |
- <p><code> </code>
- aaa</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto"><code> </code>
- aaa</p>
- wysiwyg: |-
- <p><code>
- aaa</code></p>
-04_05__leaf_blocks__fenced_code_blocks__021:
- canonical: |
- <pre><code>aaa
- ~~~ ~~
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
- <span id="LC2" class="line" lang="plaintext">~~~ ~~</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa
- ~~~ ~~</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__022:
- canonical: |
- <p>foo</p>
- <pre><code>bar
- </code></pre>
- <p>baz</p>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">foo</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="2:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="5:1-5:3" dir="auto">baz</p>
- wysiwyg: |-
- <p>foo</p>
-04_05__leaf_blocks__fenced_code_blocks__023:
- canonical: |
- <h2>foo</h2>
- <pre><code>bar
- </code></pre>
- <h1>baz</h1>
- static: |-
- <h2 data-sourcepos="1:1-3:3" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:1-5:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- <h1 data-sourcepos="6:1-6:5" dir="auto">
- <a id="user-content-baz" class="anchor" href="#baz" aria-hidden="true"></a>baz</h1>
- wysiwyg: |-
- <h2>foo</h2>
-04_05__leaf_blocks__fenced_code_blocks__024:
- canonical: |
- <pre><code class="language-ruby">def foo(x)
- return 3
- end
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-5:3" class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="n">x</span><span class="p">)</span></span>
- <span id="LC2" class="line" lang="ruby"> <span class="k">return</span> <span class="mi">3</span></span>
- <span id="LC3" class="line" lang="ruby"><span class="k">end</span></span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre language="ruby" class="content-editor-code-block undefined code highlight"><code>def foo(x)
- return 3
- end</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__025:
- canonical: |
- <pre><code class="language-ruby">def foo(x)
- return 3
- end
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-5:7" class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="n">x</span><span class="p">)</span></span>
- <span id="LC2" class="line" lang="ruby"> <span class="k">return</span> <span class="mi">3</span></span>
- <span id="LC3" class="line" lang="ruby"><span class="k">end</span></span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre language="ruby" class="content-editor-code-block undefined code highlight"><code>def foo(x)
- return 3
- end</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__026:
- canonical: |
- <pre><code class="language-;"></code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-2:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang=";" v-pre="true"><code></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre language=";" class="content-editor-code-block undefined code highlight"><code></code></pre>
-04_05__leaf_blocks__fenced_code_blocks__027:
- canonical: |
- <p><code>aa</code>
- foo</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto"><code>aa</code>
- foo</p>
- wysiwyg: |-
- <p><code>aa</code>
- foo</p>
-04_05__leaf_blocks__fenced_code_blocks__028:
- canonical: |
- <pre><code class="language-aa">foo
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="aa" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre language="aa" class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__029:
- canonical: |
- <pre><code>``` aaa
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">``` aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>``` aaa</code></pre>
-04_06__leaf_blocks__html_blocks__001:
- canonical: |
- <table><tr><td>
- <pre>
- **Hello**,
- <p><em>world</em>.
- </pre></p>
- </td></tr></table>
- static: |-
- <table dir="auto"><tr><td>
- <pre>
- **Hello**,
- <p data-sourcepos="5:1-6:6"><em>world</em>.
- </p></pre>
- </td></tr></table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__002:
- canonical: |
- <table>
- <tr>
- <td>
- hi
- </td>
- </tr>
- </table>
- <p>okay.</p>
- static: |-
- <table dir="auto">
- <tr>
- <td>
- hi
- </td>
- </tr>
- </table>
- <p data-sourcepos="9:1-9:5" dir="auto">okay.</p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__003:
- canonical: |2
- <div>
- *hello*
- <foo><a>
- static: |2-
- <div>
- *hello*
- <a></a>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__004:
- canonical: |
- </div>
- *foo*
- static: |2-
-
- *foo*
- wysiwyg: |-
- <p>
- *foo*</p>
-04_06__leaf_blocks__html_blocks__005:
- canonical: |
- <DIV CLASS="foo">
- <p><em>Markdown</em></p>
- </DIV>
- static: |-
- <div>
- <p data-sourcepos="3:1-3:10"><em>Markdown</em></p>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__006:
- canonical: |
- <div id="foo"
- class="bar">
- </div>
- static: |-
- <div>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__007:
- canonical: |
- <div id="foo" class="bar
- baz">
- </div>
- static: |-
- <div>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__008:
- canonical: |
- <div>
- *foo*
- <p><em>bar</em></p>
- static: |-
- <div>
- *foo*
- <p data-sourcepos="4:1-4:5"><em>bar</em></p>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__009:
- canonical: |
- <div id="foo"
- *hi*
- static: |-
- <div></div>
- wysiwyg: |-
- <p></p>
-04_06__leaf_blocks__html_blocks__010:
- canonical: |
- <div class
- foo
- static: |-
- <div></div>
- wysiwyg: |-
- <p></p>
-04_06__leaf_blocks__html_blocks__011:
- canonical: |
- <div *???-&&&-<---
- *foo*
- static: |-
- <div></div>
- wysiwyg: |-
- <p></p>
-04_06__leaf_blocks__html_blocks__012:
- canonical: |
- <div><a href="bar">*foo*</a></div>
- static: |-
- <div><a href="bar">*foo*</a></div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__013:
- canonical: |
- <table><tr><td>
- foo
- </td></tr></table>
- static: |-
- <table dir="auto"><tr><td>
- foo
- </td></tr></table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__014:
- canonical: |
- <div></div>
- ``` c
- int x = 33;
- ```
- static: |-
- <div></div>
- ``` c
- int x = 33;
- ```
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__015:
- canonical: |
- <a href="foo">
- *bar*
- </a>
- static: |-
- <a href="foo">
- *bar*
- </a>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo">
- *bar*
- </a></p>
-04_06__leaf_blocks__html_blocks__016:
- canonical: |
- <Warning>
- *bar*
- </Warning>
- static: |2
-
- *bar*
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "warning" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__017:
- canonical: |
- <i class="foo">
- *bar*
- </i>
- static: |-
- <i>
- *bar*
- </i>
- wysiwyg: |-
- <p><em>
- *bar*
- </em></p>
-04_06__leaf_blocks__html_blocks__018:
- canonical: |
- </ins>
- *bar*
- static: |2-
-
- *bar*
- wysiwyg: |-
- <p>
- *bar*</p>
-04_06__leaf_blocks__html_blocks__019:
- canonical: |
- <del>
- *foo*
- </del>
- static: |-
- <del>
- *foo*
- </del>
- wysiwyg: |-
- <p><s>
- *foo*
- </s></p>
-04_06__leaf_blocks__html_blocks__020:
- canonical: |
- <del>
- <p><em>foo</em></p>
- </del>
- static: |-
- <del>
- <p data-sourcepos="3:1-3:5"><em>foo</em></p>
- </del>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-04_06__leaf_blocks__html_blocks__021:
- canonical: |
- <p><del><em>foo</em></del></p>
- static: |-
- <p data-sourcepos="1:1-1:16" dir="auto"><del><em>foo</em></del></p>
- wysiwyg: |-
- <p><em><s>foo</s></em></p>
-04_06__leaf_blocks__html_blocks__022:
- canonical: |
- <pre language="haskell"><code>
- import Text.HTML.TagSoup
-
- main :: IO ()
- main = print $ parseTags tags
- </code></pre>
- <p>okay</p>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>
- <span id="LC2" class="line" lang="plaintext">import Text.HTML.TagSoup</span>
- <span id="LC3" class="line" lang="plaintext"></span>
- <span id="LC4" class="line" lang="plaintext">main :: IO ()</span>
- <span id="LC5" class="line" lang="plaintext">main = print $ parseTags tags</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="7:1-7:4" dir="auto">okay</p>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>
- import Text.HTML.TagSoup
-
- main :: IO ()
- main = print $ parseTags tags</code></pre>
-04_06__leaf_blocks__html_blocks__023:
- canonical: |
- <script type="text/javascript">
- // JavaScript example
-
- document.getElementById("demo").innerHTML = "Hello JavaScript!";
- </script>
- <p>okay</p>
- static: |2-
-
- <p data-sourcepos="6:1-6:4" dir="auto">okay</p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__024:
- canonical: |
- <style
- type="text/css">
- h1 {color:red;}
-
- p {color:blue;}
- </style>
- <p>okay</p>
- static: |2-
-
- h1 {color:red;}
-
- p {color:blue;}
-
- <p data-sourcepos="7:1-7:4" dir="auto">okay</p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__025:
- canonical: |
- <style
- type="text/css">
-
- foo
- static: |2-
-
-
- foo
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__026:
- canonical: |
- <blockquote>
- <div>
- foo
- </blockquote>
- <p>bar</p>
- static: |-
- <blockquote data-sourcepos="1:1-2:5" dir="auto">
- <div>
- foo
-
- <p data-sourcepos="4:1-4:3">bar</p>
- </div>
- </blockquote>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__027:
- canonical: |
- <ul>
- <li>
- <div>
- </li>
- <li>foo</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-2:5" dir="auto">
- <li data-sourcepos="1:1-1:7">
- <div>
-
- <li data-sourcepos="2:1-2:5">foo</li>
- </div>
- </li>
- </ul>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__028:
- canonical: |
- <style>p{color:red;}</style>
- <p><em>foo</em></p>
- static: |-
- p{color:red;}
- <p data-sourcepos="2:1-2:5" dir="auto"><em>foo</em></p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__029:
- canonical: |
- <!-- foo -->*bar*
- <p><em>baz</em></p>
- static: |-
- *bar*
- <p data-sourcepos="2:1-2:5" dir="auto"><em>baz</em></p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__030:
- canonical: |
- <script>
- foo
- </script>1. *bar*
- static: |-
- 1. *bar*
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__031:
- canonical: |
- <!-- Foo
-
- bar
- baz -->
- <p>okay</p>
- static: |2-
-
- <p data-sourcepos="5:1-5:4" dir="auto">okay</p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__032:
- canonical: |
- <?php
-
- echo '>';
-
- ?>
- <p>okay</p>
- static: |-
- <?php echo '>';
-
- ?&gt;
- <p data-sourcepos="6:1-6:4" dir="auto">okay</p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__033:
- canonical: |
- <!DOCTYPE html>
- static: ""
- wysiwyg: |-
- <p></p>
-04_06__leaf_blocks__html_blocks__034:
- canonical: |
- <![CDATA[
- function matchwo(a,b)
- {
- if (a < b && a < 0) then {
- return 1;
-
- } else {
-
- return 0;
- }
- }
- ]]>
- <p>okay</p>
- static: |2-
- &lt;![CDATA[
- function matchwo(a,b)
- {
- if (a &lt; b &amp;&amp; a &lt; 0) then {
- return 1;
-
- } else {
-
- return 0;
- }
- }
- ]]&gt;
- <p data-sourcepos="13:1-13:4" dir="auto">okay</p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__035:
- canonical: |2
- <!-- foo -->
- <pre><code>&lt;!-- foo --&gt;
- </code></pre>
- static: " \n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
- data-sourcepos=\"3:5-3:16\" class=\"code highlight js-syntax-highlight language-plaintext\"
- lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
- class=\"line\" lang=\"plaintext\">&lt;!-- foo --&gt;</span></code></pre>\n<copy-code></copy-code>\n</div>"
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__036:
- canonical: |2
- <div>
- <pre><code>&lt;div&gt;
- </code></pre>
- static: |2-
- <div>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:5-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;div&gt;</span></code></pre>
- <copy-code></copy-code>
- </div>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__037:
- canonical: |
- <p>Foo</p>
- <div>
- bar
- </div>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
- <div>
- bar
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__038:
- canonical: |
- <div>
- bar
- </div>
- *foo*
- static: |-
- <div>
- bar
- </div>
- *foo*
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__039:
- canonical: |
- <p>Foo
- <a href="bar">
- baz</p>
- static: |-
- <p data-sourcepos="1:1-3:3" dir="auto">Foo
- <a href="bar">
- baz</a></p>
- wysiwyg: |-
- <p>Foo
- <a target="_blank" rel="noopener noreferrer nofollow" href="bar">
- baz</a></p>
-04_06__leaf_blocks__html_blocks__040:
- canonical: |
- <div>
- <p><em>Emphasized</em> text.</p>
- </div>
- static: |-
- <div>
- <p data-sourcepos="3:1-3:18"><em>Emphasized</em> text.</p>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__041:
- canonical: |
- <div>
- *Emphasized* text.
- </div>
- static: |-
- <div>
- *Emphasized* text.
- </div>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__042:
- canonical: |
- <table>
- <tr>
- <td>
- Hi
- </td>
- </tr>
- </table>
- static: |-
- <table dir="auto">
- <tr>
- <td>
- Hi
- </td>
- </tr>
- </table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__043:
- canonical: |
- <table>
- <tr>
- <pre><code>&lt;td&gt;
- Hi
- &lt;/td&gt;
- </code></pre>
- </tr>
- </table>
- static: |-
- <table dir="auto">
- <tr>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="5:5-8:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;td&gt;</span>
- <span id="LC2" class="line" lang="plaintext"> Hi</span>
- <span id="LC3" class="line" lang="plaintext">&lt;/td&gt;</span></code></pre>
- <copy-code></copy-code>
- </div>
- </tr>
- </table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__001:
- canonical: |
- <p><a href="/url" title="title">foo</a></p>
- static: |-
- <p data-sourcepos="3:1-3:5" dir="auto"><a href="/url" title="title">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__002:
- canonical: |
- <p><a href="/url" title="the title">foo</a></p>
- static: |-
- <p data-sourcepos="5:1-5:5" dir="auto"><a href="/url" title="the title">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="the title">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__003:
- canonical: |
- <p><a href="my_(url)" title="title (with parens)">Foo*bar]</a></p>
- static: |-
- <p data-sourcepos="3:1-3:11" dir="auto"><a href="my_(url)" title="title (with parens)">Foo*bar]</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="my_(url)" title="title (with parens)">Foo*bar]</a></p>
-04_07__leaf_blocks__link_reference_definitions__004:
- canonical: |
- <p><a href="my%20url" title="title">Foo bar</a></p>
- static: |-
- <p data-sourcepos="5:1-5:9" dir="auto"><a href="my%20url" title="title">Foo bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="my%20url" title="title">Foo bar</a></p>
-04_07__leaf_blocks__link_reference_definitions__005:
- canonical: |
- <p><a href="/url" title="
- title
- line1
- line2
- ">foo</a></p>
- static: |-
- <p data-sourcepos="7:1-7:5" dir="auto"><a href="/url" title="
- title
- line1
- line2
- ">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="
- title
- line1
- line2
- ">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__006:
- canonical: |
- <p>[foo]: /url 'title</p>
- <p>with blank line'</p>
- <p>[foo]</p>
- static: |-
- <p data-sourcepos="1:1-1:18" dir="auto">[foo]: /url 'title</p>
- <p data-sourcepos="3:1-3:16" dir="auto">with blank line'</p>
- <p data-sourcepos="5:1-5:5" dir="auto">[foo]</p>
- wysiwyg: |-
- <p>[foo]: /url 'title</p>
-04_07__leaf_blocks__link_reference_definitions__007:
- canonical: |
- <p><a href="/url">foo</a></p>
- static: |-
- <p data-sourcepos="4:1-4:5" dir="auto"><a href="/url">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__008:
- canonical: |
- <p>[foo]:</p>
- <p>[foo]</p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto">[foo]:</p>
- <p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
- wysiwyg: |-
- <p>[foo]:</p>
-04_07__leaf_blocks__link_reference_definitions__009:
- canonical: |
- <p><a href="">foo</a></p>
- static: |-
- <p data-sourcepos="3:1-3:5" dir="auto"><a href="">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__010:
- canonical: |
- <p>[foo]: <bar>(baz)</p>
- <p>[foo]</p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">[foo]: (baz)</p>
- <p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "bar" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__011:
- canonical: |
- <p><a href="/url%5Cbar*baz" title="foo&quot;bar\baz">foo</a></p>
- static: |-
- <p data-sourcepos="3:1-3:5" dir="auto"><a href="/url%5Cbar*baz" title='foo"bar\baz'>foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url%5Cbar*baz" title="foo&quot;bar\baz">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__012:
- canonical: |
- <p><a href="url">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="url">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__013:
- canonical: |
- <p><a href="first">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="first">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="first">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__014:
- canonical: |
- <p><a href="/url">Foo</a></p>
- static: |-
- <p data-sourcepos="3:1-3:5" dir="auto"><a href="/url">Foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__015:
- canonical: |
- <p><a href="/%CF%86%CE%BF%CF%85">αγω</a></p>
- static: |-
- <p data-sourcepos="3:1-3:8" dir="auto"><a href="/%CF%86%CE%BF%CF%85">αγω</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/%CF%86%CE%BF%CF%85">αγω</a></p>
-04_07__leaf_blocks__link_reference_definitions__016:
- canonical: ""
- static: ""
- wysiwyg: |-
- <p></p>
-04_07__leaf_blocks__link_reference_definitions__017:
- canonical: |
- <p>bar</p>
- static: |-
- <p data-sourcepos="1:1-4:3" dir="auto">bar</p>
- wysiwyg: |-
- <p>bar</p>
-04_07__leaf_blocks__link_reference_definitions__018:
- canonical: |
- <p>[foo]: /url &quot;title&quot; ok</p>
- static: |-
- <p data-sourcepos="1:1-1:22" dir="auto">[foo]: /url "title" ok</p>
- wysiwyg: |-
- <p>[foo]: /url "title" ok</p>
-04_07__leaf_blocks__link_reference_definitions__019:
- canonical: |
- <p>&quot;title&quot; ok</p>
- static: |-
- <p data-sourcepos="1:1-2:10" dir="auto">"title" ok</p>
- wysiwyg: |-
- <p>"title" ok</p>
-04_07__leaf_blocks__link_reference_definitions__020:
- canonical: |
- <pre><code>[foo]: /url &quot;title&quot;
- </code></pre>
- <p>[foo]</p>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">[foo]: /url "title"</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>[foo]: /url "title"</code></pre>
-04_07__leaf_blocks__link_reference_definitions__021:
- canonical: |
- <pre><code>[foo]: /url
- </code></pre>
- <p>[foo]</p>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">[foo]: /url</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="5:1-5:5" dir="auto">[foo]</p>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>[foo]: /url</code></pre>
-04_07__leaf_blocks__link_reference_definitions__022:
- canonical: |
- <p>Foo
- [bar]: /baz</p>
- <p>[bar]</p>
- static: |-
- <p data-sourcepos="1:1-2:11" dir="auto">Foo
- [bar]: /baz</p>
- <p data-sourcepos="4:1-4:5" dir="auto">[bar]</p>
- wysiwyg: |-
- <p>Foo
- [bar]: /baz</p>
-04_07__leaf_blocks__link_reference_definitions__023:
- canonical: |
- <h1><a href="/url">Foo</a></h1>
- <blockquote>
- <p>bar</p>
- </blockquote>
- static: |-
- <h1 data-sourcepos="1:1-1:7" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a><a href="/url">Foo</a>
- </h1>
- <blockquote data-sourcepos="3:1-3:5" dir="auto">
- <p data-sourcepos="3:3-3:5">bar</p>
- </blockquote>
- wysiwyg: |-
- <h1><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Foo</a></h1>
-04_07__leaf_blocks__link_reference_definitions__024:
- canonical: |
- <h1>bar</h1>
- <p><a href="/url">foo</a></p>
- static: |-
- <h1 data-sourcepos="1:1-4:5" dir="auto">
- <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h1>
- <p data-sourcepos="4:1-4:5" dir="auto"><a href="/url">foo</a></p>
- wysiwyg: |-
- <h1>bar</h1>
-04_07__leaf_blocks__link_reference_definitions__025:
- canonical: |
- <p>===
- <a href="/url">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-3:5" dir="auto">===
- <a href="/url">foo</a></p>
- wysiwyg: |-
- <p>===
- <a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__026:
- canonical: |
- <p><a href="/foo-url" title="foo">foo</a>,
- <a href="/bar-url" title="bar">bar</a>,
- <a href="/baz-url">baz</a></p>
- static: |-
- <p data-sourcepos="6:1-8:5" dir="auto"><a href="/foo-url" title="foo">foo</a>,
- <a href="/bar-url" title="bar">bar</a>,
- <a href="/baz-url">baz</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/foo-url" title="foo">foo</a>,
- <a target="_blank" rel="noopener noreferrer nofollow" href="/bar-url" title="bar">bar</a>,
- <a target="_blank" rel="noopener noreferrer nofollow" href="/baz-url">baz</a></p>
-04_07__leaf_blocks__link_reference_definitions__027:
- canonical: |
- <p><a href="/url">foo</a></p>
- <blockquote>
- </blockquote>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="/url">foo</a></p>
- <blockquote data-sourcepos="3:1-3:13" dir="auto">
- </blockquote>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__028:
- canonical: ""
- static: ""
- wysiwyg: |-
- <p></p>
-04_08__leaf_blocks__paragraphs__001:
- canonical: |
- <p>aaa</p>
- <p>bbb</p>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">aaa</p>
- <p data-sourcepos="3:1-3:3" dir="auto">bbb</p>
- wysiwyg: |-
- <p>aaa</p>
-04_08__leaf_blocks__paragraphs__002:
- canonical: |
- <p>aaa
- bbb</p>
- <p>ccc
- ddd</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">aaa
- bbb</p>
- <p data-sourcepos="4:1-5:3" dir="auto">ccc
- ddd</p>
- wysiwyg: |-
- <p>aaa
- bbb</p>
-04_08__leaf_blocks__paragraphs__003:
- canonical: |
- <p>aaa</p>
- <p>bbb</p>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">aaa</p>
- <p data-sourcepos="4:1-4:3" dir="auto">bbb</p>
- wysiwyg: |-
- <p>aaa</p>
-04_08__leaf_blocks__paragraphs__004:
- canonical: |
- <p>aaa
- bbb</p>
- static: |-
- <p data-sourcepos="1:3-2:4" dir="auto">aaa
- bbb</p>
- wysiwyg: |-
- <p>aaa
- bbb</p>
-04_08__leaf_blocks__paragraphs__005:
- canonical: |
- <p>aaa
- bbb
- ccc</p>
- static: |-
- <p data-sourcepos="1:1-3:42" dir="auto">aaa
- bbb
- ccc</p>
- wysiwyg: |-
- <p>aaa
- bbb
- ccc</p>
-04_08__leaf_blocks__paragraphs__006:
- canonical: |
- <p>aaa
- bbb</p>
- static: |-
- <p data-sourcepos="1:4-2:3" dir="auto">aaa
- bbb</p>
- wysiwyg: |-
- <p>aaa
- bbb</p>
-04_08__leaf_blocks__paragraphs__007:
- canonical: |
- <pre><code>aaa
- </code></pre>
- <p>bbb</p>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="2:1-2:3" dir="auto">bbb</p>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre>
-04_08__leaf_blocks__paragraphs__008:
- canonical: |
- <p>aaa<br />
- bbb</p>
- static: |-
- <p data-sourcepos="1:1-2:8" dir="auto">aaa<br>
- bbb</p>
- wysiwyg: |-
- <p>aaa<br>
- bbb</p>
-04_09__leaf_blocks__blank_lines__001:
- canonical: |
- <p>aaa</p>
- <h1>aaa</h1>
- static: |-
- <p data-sourcepos="3:1-3:3" dir="auto">aaa</p>
- <h1 data-sourcepos="6:1-6:5" dir="auto">
- <a id="user-content-aaa" class="anchor" href="#aaa" aria-hidden="true"></a>aaa</h1>
- wysiwyg: |-
- <p>aaa</p>
-04_10__leaf_blocks__tables_extension__001:
- canonical: |
- <table>
- <thead>
- <tr>
- <th>foo</th>
- <th>bar</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>baz</td>
- <td>bim</td>
- </tr>
- </tbody>
- </table>
- static: |-
- <table data-sourcepos="1:1-3:13" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:13">
- <th data-sourcepos="1:2-1:6">foo</th>
- <th data-sourcepos="1:8-1:12">bar</th>
- </tr>
- </thead>
- <tbody>
- <tr data-sourcepos="3:1-3:13">
- <td data-sourcepos="3:2-3:6">baz</td>
- <td data-sourcepos="3:8-3:12">bim</td>
- </tr>
- </tbody>
- </table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__002:
- canonical: |
- <table>
- <thead>
- <tr>
- <th align="center">abc</th>
- <th align="right">defghi</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td align="center">bar</td>
- <td align="right">baz</td>
- </tr>
- </tbody>
- </table>
- static: |-
- <table data-sourcepos="1:1-3:9" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:16">
- <th align="center" data-sourcepos="1:2-1:6">abc</th>
- <th align="right" data-sourcepos="1:8-1:15">defghi</th>
- </tr>
- </thead>
- <tbody>
- <tr data-sourcepos="3:1-3:9">
- <td align="center" data-sourcepos="3:1-3:4">bar</td>
- <td align="right" data-sourcepos="3:6-3:9">baz</td>
- </tr>
- </tbody>
- </table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__003:
- canonical: |
- <table>
- <thead>
- <tr>
- <th>f|oo</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>b <code>|</code> az</td>
- </tr>
- <tr>
- <td>b <strong>|</strong> im</td>
- </tr>
- </tbody>
- </table>
- static: |-
- <table data-sourcepos="1:1-4:15" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:10">
- <th data-sourcepos="1:2-1:9">f|oo</th>
- </tr>
- </thead>
- <tbody>
- <tr data-sourcepos="3:1-3:13">
- <td data-sourcepos="3:2-3:12">b <code>|</code> az</td>
- </tr>
- <tr data-sourcepos="4:1-4:15">
- <td data-sourcepos="4:2-4:14">b <strong>|</strong> im</td>
- </tr>
- </tbody>
- </table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__004:
- canonical: |
- <table>
- <thead>
- <tr>
- <th>abc</th>
- <th>def</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>bar</td>
- <td>baz</td>
- </tr>
- </tbody>
- </table>
- <blockquote>
- <p>bar</p>
- </blockquote>
- static: |-
- <table data-sourcepos="1:1-3:13" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:13">
- <th data-sourcepos="1:2-1:6">abc</th>
- <th data-sourcepos="1:8-1:12">def</th>
- </tr>
- </thead>
- <tbody>
- <tr data-sourcepos="3:1-3:13">
- <td data-sourcepos="3:2-3:6">bar</td>
- <td data-sourcepos="3:8-3:12">baz</td>
- </tr>
- </tbody>
- </table>
- <blockquote data-sourcepos="4:1-4:5" dir="auto">
- <p data-sourcepos="4:3-4:5">bar</p>
- </blockquote>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__005:
- canonical: |
- <table>
- <thead>
- <tr>
- <th>abc</th>
- <th>def</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>bar</td>
- <td>baz</td>
- </tr>
- <tr>
- <td>bar</td>
- <td></td>
- </tr>
- </tbody>
- </table>
- <p>bar</p>
- static: |-
- <table data-sourcepos="1:1-4:3" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:13">
- <th data-sourcepos="1:2-1:6">abc</th>
- <th data-sourcepos="1:8-1:12">def</th>
- </tr>
- </thead>
- <tbody>
- <tr data-sourcepos="3:1-3:13">
- <td data-sourcepos="3:2-3:6">bar</td>
- <td data-sourcepos="3:8-3:12">baz</td>
- </tr>
- <tr data-sourcepos="4:1-4:3">
- <td data-sourcepos="4:1-4:3">bar</td>
- <td data-sourcepos="4:0-4:0"></td>
- </tr>
- </tbody>
- </table>
- <p data-sourcepos="6:1-6:3" dir="auto">bar</p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__006:
- canonical: |
- <p>| abc | def |
- | --- |
- | bar |</p>
- static: |-
- <p data-sourcepos="1:1-3:7" dir="auto">| abc | def |
- | --- |
- | bar |</p>
- wysiwyg: |-
- <p>| abc | def |
- | --- |
- | bar |</p>
-04_10__leaf_blocks__tables_extension__007:
- canonical: |
- <table>
- <thead>
- <tr>
- <th>abc</th>
- <th>def</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>bar</td>
- <td></td>
- </tr>
- <tr>
- <td>bar</td>
- <td>baz</td>
- </tr>
- </tbody>
- </table>
- static: |-
- <table data-sourcepos="1:1-4:19" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:13">
- <th data-sourcepos="1:2-1:6">abc</th>
- <th data-sourcepos="1:8-1:12">def</th>
- </tr>
- </thead>
- <tbody>
- <tr data-sourcepos="3:1-3:7">
- <td data-sourcepos="3:2-3:6">bar</td>
- <td data-sourcepos="3:0-3:0"></td>
- </tr>
- <tr data-sourcepos="4:1-4:19">
- <td data-sourcepos="4:2-4:6">bar</td>
- <td data-sourcepos="4:8-4:12">baz</td>
- </tr>
- </tbody>
- </table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__008:
- canonical: |
- <table>
- <thead>
- <tr>
- <th>abc</th>
- <th>def</th>
- </tr>
- </thead>
- </table>
- static: |-
- <table data-sourcepos="1:1-2:13" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:13">
- <th data-sourcepos="1:2-1:6">abc</th>
- <th data-sourcepos="1:8-1:12">def</th>
- </tr>
- </thead>
- </table>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-05_01__container_blocks__block_quotes__001:
- canonical: |
- <blockquote>
- <h1>Foo</h1>
- <p>bar
- baz</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">
- <h1 data-sourcepos="1:3-1:7">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
- <p data-sourcepos="2:3-3:5">bar
- baz</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><h1>Foo</h1><p>bar
- baz</p></blockquote>
-05_01__container_blocks__block_quotes__002:
- canonical: |
- <blockquote>
- <h1>Foo</h1>
- <p>bar
- baz</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">
- <h1 data-sourcepos="1:2-1:6">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
- <p data-sourcepos="2:2-3:5">bar
- baz</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><h1>Foo</h1><p>bar
- baz</p></blockquote>
-05_01__container_blocks__block_quotes__003:
- canonical: |
- <blockquote>
- <h1>Foo</h1>
- <p>bar
- baz</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:4-3:6" dir="auto">
- <h1 data-sourcepos="1:6-1:10">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
- <p data-sourcepos="2:6-3:6">bar
- baz</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><h1>Foo</h1><p>bar
- baz</p></blockquote>
-05_01__container_blocks__block_quotes__004:
- canonical: |
- <pre><code>&gt; # Foo
- &gt; bar
- &gt; baz
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&gt; # Foo</span>
- <span id="LC2" class="line" lang="plaintext">&gt; bar</span>
- <span id="LC3" class="line" lang="plaintext">&gt; baz</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>&gt; # Foo
- &gt; bar
- &gt; baz</code></pre>
-05_01__container_blocks__block_quotes__005:
- canonical: |
- <blockquote>
- <h1>Foo</h1>
- <p>bar
- baz</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:3" dir="auto">
- <h1 data-sourcepos="1:3-1:7">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
- <p data-sourcepos="2:3-3:3">bar
- baz</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><h1>Foo</h1><p>bar
- baz</p></blockquote>
-05_01__container_blocks__block_quotes__006:
- canonical: |
- <blockquote>
- <p>bar
- baz
- foo</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">
- <p data-sourcepos="1:3-3:5">bar
- baz
- foo</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>bar
- baz
- foo</p></blockquote>
-05_01__container_blocks__block_quotes__007:
- canonical: |
- <blockquote>
- <p>foo</p>
- </blockquote>
- <hr />
- static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">
- <p data-sourcepos="1:3-1:5">foo</p>
- </blockquote>
- <hr data-sourcepos="2:1-2:3">
- wysiwyg: |-
- <blockquote multiline="false"><p>foo</p></blockquote>
-05_01__container_blocks__block_quotes__008:
- canonical: |
- <blockquote>
- <ul>
- <li>foo</li>
- </ul>
- </blockquote>
- <ul>
- <li>bar</li>
- </ul>
- static: |-
- <blockquote data-sourcepos="1:1-1:7" dir="auto">
- <ul data-sourcepos="1:3-1:7">
- <li data-sourcepos="1:3-1:7">foo</li>
- </ul>
- </blockquote>
- <ul data-sourcepos="2:1-2:5" dir="auto">
- <li data-sourcepos="2:1-2:5">bar</li>
- </ul>
- wysiwyg: |-
- <blockquote multiline="false"><ul bullet="*"><li><p>foo</p></li></ul></blockquote>
-05_01__container_blocks__block_quotes__009:
- canonical: |
- <blockquote>
- <pre><code>foo
- </code></pre>
- </blockquote>
- <pre><code>bar
- </code></pre>
- static: |-
- <blockquote data-sourcepos="1:1-1:9" dir="auto">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:7-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- </blockquote>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="2:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre></blockquote>
-05_01__container_blocks__block_quotes__010:
- canonical: |
- <blockquote>
- <pre><code></code></pre>
- </blockquote>
- <p>foo</p>
- <pre><code></code></pre>
- static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:3-2:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
- <copy-code></copy-code>
- </div>
- </blockquote>
- <p data-sourcepos="2:1-2:3" dir="auto">foo</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code></code></pre></blockquote>
-05_01__container_blocks__block_quotes__011:
- canonical: |
- <blockquote>
- <p>foo
- - bar</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-2:9" dir="auto">
- <p data-sourcepos="1:3-2:9">foo
- - bar</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>foo
- - bar</p></blockquote>
-05_01__container_blocks__block_quotes__012:
- canonical: |
- <blockquote>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-1:1" dir="auto">
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p></p></blockquote>
-05_01__container_blocks__block_quotes__013:
- canonical: |
- <blockquote>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:2" dir="auto">
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p></p></blockquote>
-05_01__container_blocks__block_quotes__014:
- canonical: |
- <blockquote>
- <p>foo</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:3" dir="auto">
- <p data-sourcepos="2:3-2:5">foo</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>foo</p></blockquote>
-05_01__container_blocks__block_quotes__015:
- canonical: |
- <blockquote>
- <p>foo</p>
- </blockquote>
- <blockquote>
- <p>bar</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">
- <p data-sourcepos="1:3-1:5">foo</p>
- </blockquote>
- <blockquote data-sourcepos="3:1-3:5" dir="auto">
- <p data-sourcepos="3:3-3:5">bar</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>foo</p></blockquote>
-05_01__container_blocks__block_quotes__016:
- canonical: |
- <blockquote>
- <p>foo
- bar</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-2:5" dir="auto">
- <p data-sourcepos="1:3-2:5">foo
- bar</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>foo
- bar</p></blockquote>
-05_01__container_blocks__block_quotes__017:
- canonical: |
- <blockquote>
- <p>foo</p>
- <p>bar</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">
- <p data-sourcepos="1:3-1:5">foo</p>
- <p data-sourcepos="3:3-3:5">bar</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>foo</p><p>bar</p></blockquote>
-05_01__container_blocks__block_quotes__018:
- canonical: |
- <p>foo</p>
- <blockquote>
- <p>bar</p>
- </blockquote>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">foo</p>
- <blockquote data-sourcepos="2:1-2:5" dir="auto">
- <p data-sourcepos="2:3-2:5">bar</p>
- </blockquote>
- wysiwyg: |-
- <p>foo</p>
-05_01__container_blocks__block_quotes__019:
- canonical: |
- <blockquote>
- <p>aaa</p>
- </blockquote>
- <hr />
- <blockquote>
- <p>bbb</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">
- <p data-sourcepos="1:3-1:5">aaa</p>
- </blockquote>
- <hr data-sourcepos="2:1-2:3">
- <blockquote data-sourcepos="3:1-3:5" dir="auto">
- <p data-sourcepos="3:3-3:5">bbb</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>aaa</p></blockquote>
-05_01__container_blocks__block_quotes__020:
- canonical: |
- <blockquote>
- <p>bar
- baz</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-2:3" dir="auto">
- <p data-sourcepos="1:3-2:3">bar
- baz</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><p>bar
- baz</p></blockquote>
-05_01__container_blocks__block_quotes__021:
- canonical: |
- <blockquote>
- <p>bar</p>
- </blockquote>
- <p>baz</p>
- static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">
- <p data-sourcepos="1:3-1:5">bar</p>
- </blockquote>
- <p data-sourcepos="3:1-3:3" dir="auto">baz</p>
- wysiwyg: |-
- <blockquote multiline="false"><p>bar</p></blockquote>
-05_01__container_blocks__block_quotes__022:
- canonical: |
- <blockquote>
- <p>bar</p>
- </blockquote>
- <p>baz</p>
- static: |-
- <blockquote data-sourcepos="1:1-2:1" dir="auto">
- <p data-sourcepos="1:3-1:5">bar</p>
- </blockquote>
- <p data-sourcepos="3:1-3:3" dir="auto">baz</p>
- wysiwyg: |-
- <blockquote multiline="false"><p>bar</p></blockquote>
-05_01__container_blocks__block_quotes__023:
- canonical: |
- <blockquote>
- <blockquote>
- <blockquote>
- <p>foo
- bar</p>
- </blockquote>
- </blockquote>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-2:3" dir="auto">
- <blockquote data-sourcepos="1:3-2:3">
- <blockquote data-sourcepos="1:5-2:3">
- <p data-sourcepos="1:7-2:3">foo
- bar</p>
- </blockquote>
- </blockquote>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><blockquote multiline="false"><blockquote multiline="false"><p>foo
- bar</p></blockquote></blockquote></blockquote>
-05_01__container_blocks__block_quotes__024:
- canonical: |
- <blockquote>
- <blockquote>
- <blockquote>
- <p>foo
- bar
- baz</p>
- </blockquote>
- </blockquote>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">
- <blockquote data-sourcepos="1:2-3:5">
- <blockquote data-sourcepos="1:3-3:5">
- <p data-sourcepos="1:5-3:5">foo
- bar
- baz</p>
- </blockquote>
- </blockquote>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><blockquote multiline="false"><blockquote multiline="false"><p>foo
- bar
- baz</p></blockquote></blockquote></blockquote>
-05_01__container_blocks__block_quotes__025:
- canonical: |
- <blockquote>
- <pre><code>code
- </code></pre>
- </blockquote>
- <blockquote>
- <p>not code</p>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-1:10" dir="auto">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:7-1:10" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">code</span></code></pre>
- <copy-code></copy-code>
- </div>
- </blockquote>
- <blockquote data-sourcepos="3:1-3:13" dir="auto">
- <p data-sourcepos="3:6-3:13">not code</p>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code>code</code></pre></blockquote>
-05_02__container_blocks__list_items__001:
- canonical: |
- <p>A paragraph
- with two lines.</p>
- <pre><code>indented code
- </code></pre>
- <blockquote>
- <p>A block quote.</p>
- </blockquote>
- static: |-
- <p data-sourcepos="1:1-2:15" dir="auto">A paragraph
- with two lines.</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="4:5-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <blockquote data-sourcepos="6:1-6:16" dir="auto">
- <p data-sourcepos="6:3-6:16">A block quote.</p>
- </blockquote>
- wysiwyg: |-
- <p>A paragraph
- with two lines.</p>
-05_02__container_blocks__list_items__002:
- canonical: |
- <ol>
- <li>
- <p>A paragraph
- with two lines.</p>
- <pre><code>indented code
- </code></pre>
- <blockquote>
- <p>A block quote.</p>
- </blockquote>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-6:20" dir="auto">
- <li data-sourcepos="1:1-6:20">
- <p data-sourcepos="1:5-2:19">A paragraph
- with two lines.</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="4:9-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <blockquote data-sourcepos="6:5-6:20">
- <p data-sourcepos="6:7-6:20">A block quote.</p>
- </blockquote>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>A paragraph
- with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__003:
- canonical: |
- <ul>
- <li>one</li>
- </ul>
- <p>two</p>
- static: |-
- <ul data-sourcepos="1:1-2:0" dir="auto">
- <li data-sourcepos="1:1-2:0">one</li>
- </ul>
- <p data-sourcepos="3:2-3:4" dir="auto">two</p>
- wysiwyg: |-
- <ul bullet="*"><li><p>one</p></li></ul>
-05_02__container_blocks__list_items__004:
- canonical: |
- <ul>
- <li>
- <p>one</p>
- <p>two</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">
- <li data-sourcepos="1:1-3:5">
- <p data-sourcepos="1:3-1:5">one</p>
- <p data-sourcepos="3:3-3:5">two</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>one</p><p>two</p></li></ul>
-05_02__container_blocks__list_items__005:
- canonical: |
- <ul>
- <li>one</li>
- </ul>
- <pre><code> two
- </code></pre>
- static: |-
- <ul data-sourcepos="1:2-2:0" dir="auto">
- <li data-sourcepos="1:2-2:0">one</li>
- </ul>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:5-3:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> two</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <ul bullet="*"><li><p>one</p></li></ul>
-05_02__container_blocks__list_items__006:
- canonical: |
- <ul>
- <li>
- <p>one</p>
- <p>two</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:2-3:9" dir="auto">
- <li data-sourcepos="1:2-3:9">
- <p data-sourcepos="1:7-1:9">one</p>
- <p data-sourcepos="3:7-3:9">two</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>one</p><p>two</p></li></ul>
-05_02__container_blocks__list_items__007:
- canonical: |
- <blockquote>
- <blockquote>
- <ol>
- <li>
- <p>one</p>
- <p>two</p>
- </li>
- </ol>
- </blockquote>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:4-3:10" dir="auto">
- <blockquote data-sourcepos="1:6-3:10">
- <ol data-sourcepos="1:8-3:10">
- <li data-sourcepos="1:8-3:10">
- <p data-sourcepos="1:12-1:14">one</p>
- <p data-sourcepos="3:8-3:10">two</p>
- </li>
- </ol>
- </blockquote>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><blockquote multiline="false"><ol parens="false"><li><p>one</p><p>two</p></li></ol></blockquote></blockquote>
-05_02__container_blocks__list_items__008:
- canonical: |
- <blockquote>
- <blockquote>
- <ul>
- <li>one</li>
- </ul>
- <p>two</p>
- </blockquote>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-3:10" dir="auto">
- <blockquote data-sourcepos="1:2-3:10">
- <ul data-sourcepos="1:3-2:2">
- <li data-sourcepos="1:3-2:2">one</li>
- </ul>
- <p data-sourcepos="3:8-3:10">two</p>
- </blockquote>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><blockquote multiline="false"><ul bullet="*"><li><p>one</p></li></ul><p>two</p></blockquote></blockquote>
-05_02__container_blocks__list_items__009:
- canonical: |
- <p>-one</p>
- <p>2.two</p>
- static: |-
- <p data-sourcepos="1:1-1:4" dir="auto">-one</p>
- <p data-sourcepos="3:1-3:5" dir="auto">2.two</p>
- wysiwyg: |-
- <p>-one</p>
-05_02__container_blocks__list_items__010:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <p>bar</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:5" dir="auto">
- <li data-sourcepos="1:1-4:5">
- <p data-sourcepos="1:3-1:5">foo</p>
- <p data-sourcepos="4:3-4:5">bar</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-05_02__container_blocks__list_items__011:
- canonical: |
- <ol>
- <li>
- <p>foo</p>
- <pre><code>bar
- </code></pre>
- <p>baz</p>
- <blockquote>
- <p>bam</p>
- </blockquote>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-9:9" dir="auto">
- <li data-sourcepos="1:1-9:9">
- <p data-sourcepos="1:5-1:7">foo</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:5-5:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="7:5-7:7">baz</p>
- <blockquote data-sourcepos="9:5-9:9">
- <p data-sourcepos="9:7-9:9">bam</p>
- </blockquote>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre><p>baz</p><blockquote multiline="false"><p>bam</p></blockquote></li></ol>
-05_02__container_blocks__list_items__012:
- canonical: |
- <ul>
- <li>
- <p>Foo</p>
- <pre><code>bar
-
-
- baz
- </code></pre>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-6:9" dir="auto">
- <li data-sourcepos="1:1-6:9">
- <p data-sourcepos="1:3-1:5">Foo</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:7-6:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span>
- <span id="LC2" class="line" lang="plaintext"></span>
- <span id="LC3" class="line" lang="plaintext"></span>
- <span id="LC4" class="line" lang="plaintext">baz</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>Foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar
-
-
- baz</code></pre></li></ul>
-05_02__container_blocks__list_items__013:
- canonical: |
- <ol start="123456789">
- <li>ok</li>
- </ol>
- static: |-
- <ol start="123456789" data-sourcepos="1:1-1:13" dir="auto">
- <li data-sourcepos="1:1-1:13">ok</li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>ok</p></li></ol>
-05_02__container_blocks__list_items__014:
- canonical: |
- <p>1234567890. not ok</p>
- static: |-
- <p data-sourcepos="1:1-1:18" dir="auto">1234567890. not ok</p>
- wysiwyg: |-
- <p>1234567890. not ok</p>
-05_02__container_blocks__list_items__015:
- canonical: |
- <ol start="0">
- <li>ok</li>
- </ol>
- static: |-
- <ol start="0" data-sourcepos="1:1-1:5" dir="auto">
- <li data-sourcepos="1:1-1:5">ok</li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>ok</p></li></ol>
-05_02__container_blocks__list_items__016:
- canonical: |
- <ol start="3">
- <li>ok</li>
- </ol>
- static: |-
- <ol start="3" data-sourcepos="1:1-1:7" dir="auto">
- <li data-sourcepos="1:1-1:7">ok</li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>ok</p></li></ol>
-05_02__container_blocks__list_items__017:
- canonical: |
- <p>-1. not ok</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">-1. not ok</p>
- wysiwyg: |-
- <p>-1. not ok</p>
-05_02__container_blocks__list_items__018:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <pre><code>bar
- </code></pre>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-3:9" dir="auto">
- <li data-sourcepos="1:1-3:9">
- <p data-sourcepos="1:3-1:5">foo</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:7-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre></li></ul>
-05_02__container_blocks__list_items__019:
- canonical: |
- <ol start="10">
- <li>
- <p>foo</p>
- <pre><code>bar
- </code></pre>
- </li>
- </ol>
- static: |-
- <ol start="10" data-sourcepos="1:3-3:14" dir="auto">
- <li data-sourcepos="1:3-3:14">
- <p data-sourcepos="1:8-1:10">foo</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:12-3:14" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre></li></ol>
-05_02__container_blocks__list_items__020:
- canonical: |
- <pre><code>indented code
- </code></pre>
- <p>paragraph</p>
- <pre><code>more code
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="3:1-3:9" dir="auto">paragraph</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="5:5-5:13" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre>
-05_02__container_blocks__list_items__021:
- canonical: |
- <ol>
- <li>
- <pre><code>indented code
- </code></pre>
- <p>paragraph</p>
- <pre><code>more code
- </code></pre>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-5:16" dir="auto">
- <li data-sourcepos="1:1-5:16">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:8-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="3:4-3:12">paragraph</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="5:8-5:16" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><p>paragraph</p><pre class="content-editor-code-block undefined code highlight"><code>more code</code></pre></li></ol>
-05_02__container_blocks__list_items__022:
- canonical: |
- <ol>
- <li>
- <pre><code> indented code
- </code></pre>
- <p>paragraph</p>
- <pre><code>more code
- </code></pre>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-5:16" dir="auto">
- <li data-sourcepos="1:1-5:16">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:8-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="3:4-3:12">paragraph</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="5:8-5:16" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code> indented code</code></pre><p>paragraph</p><pre class="content-editor-code-block undefined code highlight"><code>more code</code></pre></li></ol>
-05_02__container_blocks__list_items__023:
- canonical: |
- <p>foo</p>
- <p>bar</p>
- static: |-
- <p data-sourcepos="1:4-1:6" dir="auto">foo</p>
- <p data-sourcepos="3:1-3:3" dir="auto">bar</p>
- wysiwyg: |-
- <p>foo</p>
-05_02__container_blocks__list_items__024:
- canonical: |
- <ul>
- <li>foo</li>
- </ul>
- <p>bar</p>
- static: |-
- <ul data-sourcepos="1:1-2:0" dir="auto">
- <li data-sourcepos="1:1-2:0">foo</li>
- </ul>
- <p data-sourcepos="3:3-3:5" dir="auto">bar</p>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li></ul>
-05_02__container_blocks__list_items__025:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <p>bar</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-3:6" dir="auto">
- <li data-sourcepos="1:1-3:6">
- <p data-sourcepos="1:4-1:6">foo</p>
- <p data-sourcepos="3:4-3:6">bar</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-05_02__container_blocks__list_items__026:
- canonical: |
- <ul>
- <li>foo</li>
- <li>
- <pre><code>bar
- </code></pre>
- </li>
- <li>
- <pre><code>baz
- </code></pre>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-8:9" dir="auto">
- <li data-sourcepos="1:1-2:5">foo</li>
- <li data-sourcepos="3:1-6:5">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="4:3-6:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- <li data-sourcepos="7:1-8:9">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="8:7-8:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">baz</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>baz</code></pre></li></ul>
-05_02__container_blocks__list_items__027:
- canonical: |
- <ul>
- <li>foo</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-2:5" dir="auto">
- <li data-sourcepos="1:1-2:5">foo</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li></ul>
-05_02__container_blocks__list_items__028:
- canonical: |
- <ul>
- <li></li>
- </ul>
- <p>foo</p>
- static: |-
- <ul data-sourcepos="1:1-2:0" dir="auto">
- <li data-sourcepos="1:1-1:1">
- </li>
- </ul>
- <p data-sourcepos="3:3-3:5" dir="auto">foo</p>
- wysiwyg: |-
- <ul bullet="*"><li><p></p></li></ul>
-05_02__container_blocks__list_items__029:
- canonical: |
- <ul>
- <li>foo</li>
- <li></li>
- <li>bar</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">
- <li data-sourcepos="1:1-1:5">foo</li>
- <li data-sourcepos="2:1-2:1">
- </li>
- <li data-sourcepos="3:1-3:5">bar</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li><li><p></p></li><li><p>bar</p></li></ul>
-05_02__container_blocks__list_items__030:
- canonical: |
- <ul>
- <li>foo</li>
- <li></li>
- <li>bar</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">
- <li data-sourcepos="1:1-1:5">foo</li>
- <li data-sourcepos="2:1-2:4">
- </li>
- <li data-sourcepos="3:1-3:5">bar</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li><li><p></p></li><li><p>bar</p></li></ul>
-05_02__container_blocks__list_items__031:
- canonical: |
- <ol>
- <li>foo</li>
- <li></li>
- <li>bar</li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-3:6" dir="auto">
- <li data-sourcepos="1:1-1:6">foo</li>
- <li data-sourcepos="2:1-2:2">
- </li>
- <li data-sourcepos="3:1-3:6">bar</li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>foo</p></li><li><p></p></li><li><p>bar</p></li></ol>
-05_02__container_blocks__list_items__032:
- canonical: |
- <ul>
- <li></li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-1:1" dir="auto">
- <li data-sourcepos="1:1-1:1">
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p></p></li></ul>
-05_02__container_blocks__list_items__033:
- canonical: |
- <p>foo
- *</p>
- <p>foo
- 1.</p>
- static: |-
- <p data-sourcepos="1:1-2:1" dir="auto">foo
- *</p>
- <p data-sourcepos="4:1-5:2" dir="auto">foo
- 1.</p>
- wysiwyg: |-
- <p>foo
- *</p>
-05_02__container_blocks__list_items__034:
- canonical: |
- <ol>
- <li>
- <p>A paragraph
- with two lines.</p>
- <pre><code>indented code
- </code></pre>
- <blockquote>
- <p>A block quote.</p>
- </blockquote>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:2-6:21" dir="auto">
- <li data-sourcepos="1:2-6:21">
- <p data-sourcepos="1:6-2:20">A paragraph
- with two lines.</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="4:10-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <blockquote data-sourcepos="6:6-6:21">
- <p data-sourcepos="6:8-6:21">A block quote.</p>
- </blockquote>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>A paragraph
- with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__035:
- canonical: |
- <ol>
- <li>
- <p>A paragraph
- with two lines.</p>
- <pre><code>indented code
- </code></pre>
- <blockquote>
- <p>A block quote.</p>
- </blockquote>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:3-6:22" dir="auto">
- <li data-sourcepos="1:3-6:22">
- <p data-sourcepos="1:7-2:21">A paragraph
- with two lines.</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="4:11-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <blockquote data-sourcepos="6:7-6:22">
- <p data-sourcepos="6:9-6:22">A block quote.</p>
- </blockquote>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>A paragraph
- with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__036:
- canonical: |
- <ol>
- <li>
- <p>A paragraph
- with two lines.</p>
- <pre><code>indented code
- </code></pre>
- <blockquote>
- <p>A block quote.</p>
- </blockquote>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:4-6:23" dir="auto">
- <li data-sourcepos="1:4-6:23">
- <p data-sourcepos="1:8-2:22">A paragraph
- with two lines.</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="4:12-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <blockquote data-sourcepos="6:8-6:23">
- <p data-sourcepos="6:10-6:23">A block quote.</p>
- </blockquote>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>A paragraph
- with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__037:
- canonical: |
- <pre><code>1. A paragraph
- with two lines.
-
- indented code
-
- &gt; A block quote.
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-6:24" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">1. A paragraph</span>
- <span id="LC2" class="line" lang="plaintext"> with two lines.</span>
- <span id="LC3" class="line" lang="plaintext"></span>
- <span id="LC4" class="line" lang="plaintext"> indented code</span>
- <span id="LC5" class="line" lang="plaintext"></span>
- <span id="LC6" class="line" lang="plaintext"> &gt; A block quote.</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>1. A paragraph
- with two lines.
-
- indented code
-
- &gt; A block quote.</code></pre>
-05_02__container_blocks__list_items__038:
- canonical: |
- <ol>
- <li>
- <p>A paragraph
- with two lines.</p>
- <pre><code>indented code
- </code></pre>
- <blockquote>
- <p>A block quote.</p>
- </blockquote>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:3-6:22" dir="auto">
- <li data-sourcepos="1:3-6:22">
- <p data-sourcepos="1:7-2:15">A paragraph
- with two lines.</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="4:11-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
- <copy-code></copy-code>
- </div>
- <blockquote data-sourcepos="6:7-6:22">
- <p data-sourcepos="6:9-6:22">A block quote.</p>
- </blockquote>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>A paragraph
- with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__039:
- canonical: |
- <ol>
- <li>A paragraph
- with two lines.</li>
- </ol>
- static: |-
- <ol data-sourcepos="1:3-2:19" dir="auto">
- <li data-sourcepos="1:3-2:19">A paragraph
- with two lines.</li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>A paragraph
- with two lines.</p></li></ol>
-05_02__container_blocks__list_items__040:
- canonical: |
- <blockquote>
- <ol>
- <li>
- <blockquote>
- <p>Blockquote
- continued here.</p>
- </blockquote>
- </li>
- </ol>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-2:15" dir="auto">
- <ol data-sourcepos="1:3-2:15">
- <li data-sourcepos="1:3-2:15">
- <blockquote data-sourcepos="1:6-2:15">
- <p data-sourcepos="1:8-2:15">Blockquote
- continued here.</p>
- </blockquote>
- </li>
- </ol>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><ol parens="false"><li><p></p><blockquote multiline="false"><p>Blockquote
- continued here.</p></blockquote></li></ol></blockquote>
-05_02__container_blocks__list_items__041:
- canonical: |
- <blockquote>
- <ol>
- <li>
- <blockquote>
- <p>Blockquote
- continued here.</p>
- </blockquote>
- </li>
- </ol>
- </blockquote>
- static: |-
- <blockquote data-sourcepos="1:1-2:17" dir="auto">
- <ol data-sourcepos="1:3-2:17">
- <li data-sourcepos="1:3-2:17">
- <blockquote data-sourcepos="1:6-2:17">
- <p data-sourcepos="1:8-2:17">Blockquote
- continued here.</p>
- </blockquote>
- </li>
- </ol>
- </blockquote>
- wysiwyg: |-
- <blockquote multiline="false"><ol parens="false"><li><p></p><blockquote multiline="false"><p>Blockquote
- continued here.</p></blockquote></li></ol></blockquote>
-05_02__container_blocks__list_items__042:
- canonical: |
- <ul>
- <li>foo
- <ul>
- <li>bar
- <ul>
- <li>baz
- <ul>
- <li>boo</li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:11" dir="auto">
- <li data-sourcepos="1:1-4:11">foo
- <ul data-sourcepos="2:3-4:11">
- <li data-sourcepos="2:3-4:11">bar
- <ul data-sourcepos="3:5-4:11">
- <li data-sourcepos="3:5-4:11">baz
- <ul data-sourcepos="4:7-4:11">
- <li data-sourcepos="4:7-4:11">boo</li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p><ul bullet="*"><li><p>baz</p><ul bullet="*"><li><p>boo</p></li></ul></li></ul></li></ul></li></ul>
-05_02__container_blocks__list_items__043:
- canonical: |
- <ul>
- <li>foo</li>
- <li>bar</li>
- <li>baz</li>
- <li>boo</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:8" dir="auto">
- <li data-sourcepos="1:1-1:5">foo</li>
- <li data-sourcepos="2:2-2:6">bar</li>
- <li data-sourcepos="3:3-3:7">baz</li>
- <li data-sourcepos="4:4-4:8">boo</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li><li><p>bar</p></li><li><p>baz</p></li><li><p>boo</p></li></ul>
-05_02__container_blocks__list_items__044:
- canonical: |
- <ol start="10">
- <li>foo
- <ul>
- <li>bar</li>
- </ul>
- </li>
- </ol>
- static: |-
- <ol start="10" data-sourcepos="1:1-2:9" dir="auto">
- <li data-sourcepos="1:1-2:9">foo
- <ul data-sourcepos="2:5-2:9">
- <li data-sourcepos="2:5-2:9">bar</li>
- </ul>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>foo</p><ul bullet="*"><li><p>bar</p></li></ul></li></ol>
-05_02__container_blocks__list_items__045:
- canonical: |
- <ol start="10">
- <li>foo</li>
- </ol>
- <ul>
- <li>bar</li>
- </ul>
- static: |-
- <ol start="10" data-sourcepos="1:1-1:7" dir="auto">
- <li data-sourcepos="1:1-1:7">foo</li>
- </ol>
- <ul data-sourcepos="2:4-2:8" dir="auto">
- <li data-sourcepos="2:4-2:8">bar</li>
- </ul>
- wysiwyg: |-
- <ol parens="false"><li><p>foo</p></li></ol>
-05_02__container_blocks__list_items__046:
- canonical: |
- <ul>
- <li>
- <ul>
- <li>foo</li>
- </ul>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-1:7" dir="auto">
- <li data-sourcepos="1:1-1:7">
- <ul data-sourcepos="1:3-1:7">
- <li data-sourcepos="1:3-1:7">foo</li>
- </ul>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p></p><ul bullet="*"><li><p>foo</p></li></ul></li></ul>
-05_02__container_blocks__list_items__047:
- canonical: |
- <ol>
- <li>
- <ul>
- <li>
- <ol start="2">
- <li>foo</li>
- </ol>
- </li>
- </ul>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-1:11" dir="auto">
- <li data-sourcepos="1:1-1:11">
- <ul data-sourcepos="1:4-1:11">
- <li data-sourcepos="1:4-1:11">
- <ol start="2" data-sourcepos="1:6-1:11">
- <li data-sourcepos="1:6-1:11">foo</li>
- </ol>
- </li>
- </ul>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p></p><ul bullet="*"><li><p></p><ol parens="false"><li><p>foo</p></li></ol></li></ul></li></ol>
-05_02__container_blocks__list_items__048:
- canonical: |
- <ul>
- <li>
- <h1>Foo</h1>
- </li>
- <li>
- <h2>Bar</h2>
- baz</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:5" dir="auto">
- <li data-sourcepos="1:1-1:7">
- <h1 data-sourcepos="1:3-1:7">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
- </li>
- <li data-sourcepos="2:1-4:5">
- <h2 data-sourcepos="2:3-4:5">
- <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>Bar</h2>
- baz</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p></p><h1>Foo</h1></li><li><p></p><h2>Bar
- baz</h2></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049:
- canonical: |
- <ul>
- <li><input disabled="" type="checkbox"> foo</li>
- <li><input checked="" disabled="" type="checkbox"> bar</li>
- </ul>
- <ul>
- <li><input checked="" disabled="" type="checkbox"> foo
- <ul>
- <li><input disabled="" type="checkbox"> bar</li>
- <li><input checked="" disabled="" type="checkbox"> baz</li>
- </ul>
- </li>
- <li><input disabled="" type="checkbox"> bim</li>
- </ul>
- <ul>
- <li>foo</li>
- <li>bar</li>
- </ul>
- <ul>
- <li>baz</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-8:5" class="task-list" dir="auto">
- <li data-sourcepos="1:1-1:9" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> foo</li>
- <li data-sourcepos="2:1-2:9" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> bar</li>
- <li data-sourcepos="3:1-5:11" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> foo
- <ul data-sourcepos="4:3-5:11" class="task-list">
- <li data-sourcepos="4:3-4:11" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> bar</li>
- <li data-sourcepos="5:3-5:11" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> baz</li>
- </ul>
- </li>
- <li data-sourcepos="6:1-6:9" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> bim</li>
- <li data-sourcepos="7:1-7:5">foo</li>
- <li data-sourcepos="8:1-8:5">bar</li>
- </ul>
- <ul data-sourcepos="9:1-9:5" dir="auto">
- <li data-sourcepos="9:1-9:5">baz</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>baz</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050:
- canonical: |
- <ol>
- <li>foo</li>
- <li>bar</li>
- </ol>
- <ol start="3">
- <li>baz</li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-2:6" dir="auto">
- <li data-sourcepos="1:1-1:6">foo</li>
- <li data-sourcepos="2:1-2:6">bar</li>
- </ol>
- <ol start="3" data-sourcepos="3:1-3:6" dir="auto">
- <li data-sourcepos="3:1-3:6">baz</li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>foo</p></li><li><p>bar</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051:
- canonical: |
- <p>Foo</p>
- <ul>
- <li>bar</li>
- <li>baz</li>
- </ul>
- static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
- <ul data-sourcepos="2:1-3:5" dir="auto">
- <li data-sourcepos="2:1-2:5">bar</li>
- <li data-sourcepos="3:1-3:5">baz</li>
- </ul>
- wysiwyg: |-
- <p>Foo</p>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052:
- canonical: |
- <p>The number of windows in my house is
- 14. The number of doors is 6.</p>
- static: |-
- <p data-sourcepos="1:1-2:30" dir="auto">The number of windows in my house is
- 14. The number of doors is 6.</p>
- wysiwyg: |-
- <p>The number of windows in my house is
- 14. The number of doors is 6.</p>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053:
- canonical: |
- <p>The number of windows in my house is</p>
- <ol>
- <li>The number of doors is 6.</li>
- </ol>
- static: |-
- <p data-sourcepos="1:1-1:36" dir="auto">The number of windows in my house is</p>
- <ol data-sourcepos="2:1-2:29" dir="auto">
- <li data-sourcepos="2:1-2:29">The number of doors is 6.</li>
- </ol>
- wysiwyg: |-
- <p>The number of windows in my house is</p>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- </li>
- <li>
- <p>bar</p>
- </li>
- <li>
- <p>baz</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-6:5" dir="auto">
- <li data-sourcepos="1:1-2:0">
- <p data-sourcepos="1:3-1:5">foo</p>
- </li>
- <li data-sourcepos="3:1-5:0">
- <p data-sourcepos="3:3-3:5">bar</p>
- </li>
- <li data-sourcepos="6:1-6:5">
- <p data-sourcepos="6:3-6:5">baz</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p></li><li><p>bar</p></li><li><p>baz</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055:
- canonical: |
- <ul>
- <li>foo
- <ul>
- <li>bar
- <ul>
- <li>
- <p>baz</p>
- <p>bim</p>
- </li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-6:9" dir="auto">
- <li data-sourcepos="1:1-6:9">foo
- <ul data-sourcepos="2:3-6:9">
- <li data-sourcepos="2:3-6:9">bar
- <ul data-sourcepos="3:5-6:9">
- <li data-sourcepos="3:5-6:9">
- <p data-sourcepos="3:7-3:9">baz</p>
- <p data-sourcepos="6:7-6:9">bim</p>
- </li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p><ul bullet="*"><li><p>baz</p><p>bim</p></li></ul></li></ul></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056:
- canonical: |
- <ul>
- <li>foo</li>
- <li>bar</li>
- </ul>
- <!-- -->
- <ul>
- <li>baz</li>
- <li>bim</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-3:0" dir="auto">
- <li data-sourcepos="1:1-1:5">foo</li>
- <li data-sourcepos="2:1-3:0">bar</li>
- </ul>
-
- <ul data-sourcepos="6:1-7:5" dir="auto">
- <li data-sourcepos="6:1-6:5">baz</li>
- <li data-sourcepos="7:1-7:5">bim</li>
- </ul>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <p>notcode</p>
- </li>
- <li>
- <p>foo</p>
- </li>
- </ul>
- <!-- -->
- <pre><code>code
- </code></pre>
- static: |-
- <ul data-sourcepos="1:1-6:0" dir="auto">
- <li data-sourcepos="1:1-4:0">
- <p data-sourcepos="1:5-1:7">foo</p>
- <p data-sourcepos="3:5-3:11">notcode</p>
- </li>
- <li data-sourcepos="5:1-6:0">
- <p data-sourcepos="5:5-5:7">foo</p>
- </li>
- </ul>
-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="9:5-9:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">code</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058:
- canonical: |
- <ul>
- <li>a</li>
- <li>b</li>
- <li>c</li>
- <li>d</li>
- <li>e</li>
- <li>f</li>
- <li>g</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-7:3" dir="auto">
- <li data-sourcepos="1:1-1:3">a</li>
- <li data-sourcepos="2:2-2:4">b</li>
- <li data-sourcepos="3:3-3:5">c</li>
- <li data-sourcepos="4:4-4:6">d</li>
- <li data-sourcepos="5:3-5:5">e</li>
- <li data-sourcepos="6:2-6:4">f</li>
- <li data-sourcepos="7:1-7:3">g</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li><li><p>d</p></li><li><p>e</p></li><li><p>f</p></li><li><p>g</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059:
- canonical: |
- <ol>
- <li>
- <p>a</p>
- </li>
- <li>
- <p>b</p>
- </li>
- <li>
- <p>c</p>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-5:7" dir="auto">
- <li data-sourcepos="1:1-2:0">
- <p data-sourcepos="1:4-1:4">a</p>
- </li>
- <li data-sourcepos="3:3-4:0">
- <p data-sourcepos="3:6-3:6">b</p>
- </li>
- <li data-sourcepos="5:4-5:7">
- <p data-sourcepos="5:7-5:7">c</p>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060:
- canonical: |
- <ul>
- <li>a</li>
- <li>b</li>
- <li>c</li>
- <li>d
- - e</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-5:7" dir="auto">
- <li data-sourcepos="1:1-1:3">a</li>
- <li data-sourcepos="2:2-2:4">b</li>
- <li data-sourcepos="3:3-3:5">c</li>
- <li data-sourcepos="4:4-5:7">d
- - e</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li><li><p>d
- - e</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061:
- canonical: |
- <ol>
- <li>
- <p>a</p>
- </li>
- <li>
- <p>b</p>
- </li>
- </ol>
- <pre><code>3. c
- </code></pre>
- static: |-
- <ol data-sourcepos="1:1-4:0" dir="auto">
- <li data-sourcepos="1:1-2:0">
- <p data-sourcepos="1:4-1:4">a</p>
- </li>
- <li data-sourcepos="3:3-4:0">
- <p data-sourcepos="3:6-3:6">b</p>
- </li>
- </ol>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="5:5-5:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">3. c</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <ol parens="false"><li><p>a</p></li><li><p>b</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062:
- canonical: |
- <ul>
- <li>
- <p>a</p>
- </li>
- <li>
- <p>b</p>
- </li>
- <li>
- <p>c</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:3" dir="auto">
- <li data-sourcepos="1:1-1:3">
- <p data-sourcepos="1:3-1:3">a</p>
- </li>
- <li data-sourcepos="2:1-3:0">
- <p data-sourcepos="2:3-2:3">b</p>
- </li>
- <li data-sourcepos="4:1-4:3">
- <p data-sourcepos="4:3-4:3">c</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063:
- canonical: |
- <ul>
- <li>
- <p>a</p>
- </li>
- <li></li>
- <li>
- <p>c</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:3" dir="auto">
- <li data-sourcepos="1:1-1:3">
- <p data-sourcepos="1:3-1:3">a</p>
- </li>
- <li data-sourcepos="2:1-2:1">
- </li>
- <li data-sourcepos="4:1-4:3">
- <p data-sourcepos="4:3-4:3">c</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li><li><p></p></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064:
- canonical: |
- <ul>
- <li>
- <p>a</p>
- </li>
- <li>
- <p>b</p>
- <p>c</p>
- </li>
- <li>
- <p>d</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-5:3" dir="auto">
- <li data-sourcepos="1:1-1:3">
- <p data-sourcepos="1:3-1:3">a</p>
- </li>
- <li data-sourcepos="2:1-4:3">
- <p data-sourcepos="2:3-2:3">b</p>
- <p data-sourcepos="4:3-4:3">c</p>
- </li>
- <li data-sourcepos="5:1-5:3">
- <p data-sourcepos="5:3-5:3">d</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li><li><p>b</p><p>c</p></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065:
- canonical: |
- <ul>
- <li>
- <p>a</p>
- </li>
- <li>
- <p>b</p>
- </li>
- <li>
- <p>d</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-5:3" dir="auto">
- <li data-sourcepos="1:1-1:3">
- <p data-sourcepos="1:3-1:3">a</p>
- </li>
- <li data-sourcepos="2:1-4:13">
- <p data-sourcepos="2:3-2:3">b</p>
- </li>
- <li data-sourcepos="5:1-5:3">
- <p data-sourcepos="5:3-5:3">d</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066:
- canonical: |
- <ul>
- <li>a</li>
- <li>
- <pre><code>b
-
-
- </code></pre>
- </li>
- <li>c</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-7:3" dir="auto">
- <li data-sourcepos="1:1-1:3">a</li>
- <li data-sourcepos="2:1-6:5">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="2:3-6:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">b</span>
- <span id="LC2" class="line" lang="plaintext"></span>
- <span id="LC3" class="line" lang="plaintext"></span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- <li data-sourcepos="7:1-7:3">c</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>b
-
- </code></pre></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067:
- canonical: |
- <ul>
- <li>a
- <ul>
- <li>
- <p>b</p>
- <p>c</p>
- </li>
- </ul>
- </li>
- <li>d</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-5:3" dir="auto">
- <li data-sourcepos="1:1-4:5">a
- <ul data-sourcepos="2:3-4:5">
- <li data-sourcepos="2:3-4:5">
- <p data-sourcepos="2:5-2:5">b</p>
- <p data-sourcepos="4:5-4:5">c</p>
- </li>
- </ul>
- </li>
- <li data-sourcepos="5:1-5:3">d</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p><ul bullet="*"><li><p>b</p><p>c</p></li></ul></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068:
- canonical: |
- <ul>
- <li>a
- <blockquote>
- <p>b</p>
- </blockquote>
- </li>
- <li>c</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:3" dir="auto">
- <li data-sourcepos="1:1-3:3">a
- <blockquote data-sourcepos="2:3-3:3">
- <p data-sourcepos="2:5-2:5">b</p>
- </blockquote>
- </li>
- <li data-sourcepos="4:1-4:3">c</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p><blockquote multiline="false"><p>b</p></blockquote></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069:
- canonical: |
- <ul>
- <li>a
- <blockquote>
- <p>b</p>
- </blockquote>
- <pre><code>c
- </code></pre>
- </li>
- <li>d</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-6:3" dir="auto">
- <li data-sourcepos="1:1-5:5">a
- <blockquote data-sourcepos="2:3-2:5">
- <p data-sourcepos="2:5-2:5">b</p>
- </blockquote>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="3:3-5:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">c</span></code></pre>
- <copy-code></copy-code>
- </div>
- </li>
- <li data-sourcepos="6:1-6:3">d</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p><blockquote multiline="false"><p>b</p></blockquote><pre class="content-editor-code-block undefined code highlight"><code>c</code></pre></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070:
- canonical: |
- <ul>
- <li>a</li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-1:3" dir="auto">
- <li data-sourcepos="1:1-1:3">a</li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071:
- canonical: |
- <ul>
- <li>a
- <ul>
- <li>b</li>
- </ul>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-2:5" dir="auto">
- <li data-sourcepos="1:1-2:5">a
- <ul data-sourcepos="2:3-2:5">
- <li data-sourcepos="2:3-2:5">b</li>
- </ul>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p><ul bullet="*"><li><p>b</p></li></ul></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072:
- canonical: |
- <ol>
- <li>
- <pre><code>foo
- </code></pre>
- <p>bar</p>
- </li>
- </ol>
- static: |-
- <ol data-sourcepos="1:1-5:6" dir="auto">
- <li data-sourcepos="1:1-5:6">
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:4-3:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- <p data-sourcepos="5:4-5:6">bar</p>
- </li>
- </ol>
- wysiwyg: |-
- <ol parens="false"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre><p>bar</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073:
- canonical: |
- <ul>
- <li>
- <p>foo</p>
- <ul>
- <li>bar</li>
- </ul>
- <p>baz</p>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-4:5" dir="auto">
- <li data-sourcepos="1:1-4:5">
- <p data-sourcepos="1:3-1:5">foo</p>
- <ul data-sourcepos="2:3-3:0">
- <li data-sourcepos="2:3-3:0">bar</li>
- </ul>
- <p data-sourcepos="4:3-4:5">baz</p>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p></li></ul><p>baz</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074:
- canonical: |
- <ul>
- <li>
- <p>a</p>
- <ul>
- <li>b</li>
- <li>c</li>
- </ul>
- </li>
- <li>
- <p>d</p>
- <ul>
- <li>e</li>
- <li>f</li>
- </ul>
- </li>
- </ul>
- static: |-
- <ul data-sourcepos="1:1-7:5" dir="auto">
- <li data-sourcepos="1:1-4:0">
- <p data-sourcepos="1:3-1:3">a</p>
- <ul data-sourcepos="2:3-4:0">
- <li data-sourcepos="2:3-2:5">b</li>
- <li data-sourcepos="3:3-4:0">c</li>
- </ul>
- </li>
- <li data-sourcepos="5:1-7:5">
- <p data-sourcepos="5:3-5:3">d</p>
- <ul data-sourcepos="6:3-7:5">
- <li data-sourcepos="6:3-6:5">e</li>
- <li data-sourcepos="7:3-7:5">f</li>
- </ul>
- </li>
- </ul>
- wysiwyg: |-
- <ul bullet="*"><li><p>a</p><ul bullet="*"><li><p>b</p></li><li><p>c</p></li></ul></li><li><p>d</p><ul bullet="*"><li><p>e</p></li><li><p>f</p></li></ul></li></ul>
-06_01__inlines__001:
- canonical: |
- <p><code>hi</code>lo`</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><code>hi</code>lo`</p>
- wysiwyg: |-
- <p><code>hi</code>lo`</p>
-06_02__inlines__backslash_escapes__001:
- canonical: |
- <p>!&quot;#$%&amp;'()*+,-./:;&lt;=&gt;?@[\]^_`{|}~</p>
- static: |-
- <p data-sourcepos="1:1-1:224" dir="auto"><span>!</span>"<span>#</span><span>$</span><span>%</span><span>&amp;</span>'()*+,-./:;&lt;=&gt;?<span>@</span>[\]<span>^</span>_`{|}<span>~</span></p>
- wysiwyg: |-
- <p>!"#$%&amp;'()*+,-./:;&lt;=&gt;?@[\]^_`{|}~</p>
-06_02__inlines__backslash_escapes__002:
- canonical: "<p>\\\t\\A\\a\\ \\3\\φ\\«</p>\n"
- static: "<p data-sourcepos=\"1:1-1:16\" dir=\"auto\">\\\t\\A\\a\\ \\3\\φ\\«</p>"
- wysiwyg: "<p>\\\t\\A\\a\\ \\3\\φ\\«</p>"
-06_02__inlines__backslash_escapes__003:
- canonical: |
- <p>*not emphasized*
- &lt;br/&gt; not a tag
- [not a link](/foo)
- `not code`
- 1. not a list
- * not a list
- # not a heading
- [foo]: /url &quot;not a reference&quot;
- &amp;ouml; not a character entity</p>
- static: |-
- <p data-sourcepos="1:1-9:50" dir="auto">*not emphasized*
- &lt;br/&gt; not a tag
- <a href="/foo">not a link</a>
- `not code`
- 1. not a list
- * not a list
- <span>#</span> not a heading
- [foo]: /url "not a reference"
- <span>&amp;</span>ouml; not a character entity</p>
- wysiwyg: |-
- <p>*not emphasized*
- &lt;br/&gt; not a tag
- [not a link](/foo)
- `not code`
- 1. not a list
- * not a list
- # not a heading
- [foo]: /url "not a reference"
- &amp;ouml; not a character entity</p>
-06_02__inlines__backslash_escapes__004:
- canonical: |
- <p>\<em>emphasis</em></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">\<em>emphasis</em></p>
- wysiwyg: |-
- <p>\<em>emphasis</em></p>
-06_02__inlines__backslash_escapes__005:
- canonical: |
- <p>foo<br />
- bar</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
- bar</p>
- wysiwyg: |-
- <p>foo<br>
- bar</p>
-06_02__inlines__backslash_escapes__006:
- canonical: |
- <p><code>\[\`</code></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><code>\[\`</code></p>
- wysiwyg: |-
- <p><code>\[\`</code></p>
-06_02__inlines__backslash_escapes__007:
- canonical: |
- <pre><code>\[\]
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">\[\]</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>\[\]</code></pre>
-06_02__inlines__backslash_escapes__008:
- canonical: |
- <pre><code>\[\]
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">\[\]</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>\[\]</code></pre>
-06_02__inlines__backslash_escapes__009:
- canonical: |
- <p><a href="http://example.com?find=%5C*">http://example.com?find=\*</a></p>
- static: |-
- <p data-sourcepos="1:1-1:28" dir="auto"><a href="http://example.com?find=%5C*" rel="nofollow noreferrer noopener" target="_blank">http://example.com?find=\*</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com?find=%5C*">http://example.com?find=\*</a></p>
-06_02__inlines__backslash_escapes__010:
- canonical: |
- <a href="/bar\/)">
- static: |-
- <a href="/bar%5C/)" rel="nofollow noreferrer noopener" target="_blank"></a>
- wysiwyg: |-
- <p></p>
-06_02__inlines__backslash_escapes__011:
- canonical: |
- <p><a href="/bar*" title="ti*tle">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto"><a href="/bar*" title="ti*tle">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/bar*" title="ti*tle">foo</a></p>
-06_02__inlines__backslash_escapes__012:
- canonical: |
- <p><a href="/bar*" title="ti*tle">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="/bar*" title="ti*tle">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/bar*" title="ti*tle">foo</a></p>
-06_02__inlines__backslash_escapes__013:
- canonical: |
- <pre><code class="language-foo+bar">foo
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="foo+bar" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre language="foo+bar" class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-06_03__inlines__entity_and_numeric_character_references__001:
- canonical: |
- <p>  &amp; © Æ Ď
- ¾ ℋ ⅆ
- ∲ ≧̸</p>
- static: |-
- <p data-sourcepos="1:1-3:32" dir="auto">  &amp; © Æ Ď
- ¾ ℋ ⅆ
- ∲ ≧̸</p>
- wysiwyg: |-
- <p>&nbsp; &amp; © Æ Ď
- ¾ ℋ ⅆ
- ∲ ≧̸</p>
-06_03__inlines__entity_and_numeric_character_references__002:
- canonical: |
- <p># Ӓ Ϡ �</p>
- static: |-
- <p data-sourcepos="1:1-1:25" dir="auto"># Ӓ Ϡ �</p>
- wysiwyg: |-
- <p># Ӓ Ϡ �</p>
-06_03__inlines__entity_and_numeric_character_references__003:
- canonical: |
- <p>&quot; ആ ಫ</p>
- static: |-
- <p data-sourcepos="1:1-1:22" dir="auto">" ആ ಫ</p>
- wysiwyg: |-
- <p>" ആ ಫ</p>
-06_03__inlines__entity_and_numeric_character_references__004:
- canonical: |
- <p>&amp;nbsp &amp;x; &amp;#; &amp;#x;
- &amp;#987654321;
- &amp;#abcdef0;
- &amp;ThisIsNotDefined; &amp;hi?;</p>
- static: |-
- <p data-sourcepos="1:1-4:24" dir="auto">&amp;nbsp &amp;x; &amp;#; &amp;#x;
- &amp;#987654321;
- &amp;#abcdef0;
- &amp;ThisIsNotDefined; &amp;hi?;</p>
- wysiwyg: |-
- <p>&amp;nbsp &amp;x; &amp;#; &amp;#x;
- &amp;#987654321;
- &amp;#abcdef0;
- &amp;ThisIsNotDefined; &amp;hi?;</p>
-06_03__inlines__entity_and_numeric_character_references__005:
- canonical: |
- <p>&amp;copy</p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto">&amp;copy</p>
- wysiwyg: |-
- <p>&amp;copy</p>
-06_03__inlines__entity_and_numeric_character_references__006:
- canonical: |
- <p>&amp;MadeUpEntity;</p>
- static: |-
- <p data-sourcepos="1:1-1:14" dir="auto">&amp;MadeUpEntity;</p>
- wysiwyg: |-
- <p>&amp;MadeUpEntity;</p>
-06_03__inlines__entity_and_numeric_character_references__007:
- canonical: |
- <a href="&ouml;&ouml;.html">
- static: |-
- <a href="%C3%B6%C3%B6.html" rel="nofollow noreferrer noopener" target="_blank"></a>
- wysiwyg: |-
- <p></p>
-06_03__inlines__entity_and_numeric_character_references__008:
- canonical: |
- <p><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:37" dir="auto"><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
-06_03__inlines__entity_and_numeric_character_references__009:
- canonical: |
- <p><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
-06_03__inlines__entity_and_numeric_character_references__010:
- canonical: |
- <pre><code class="language-föö">foo
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="föö" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre language="föö" class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-06_03__inlines__entity_and_numeric_character_references__011:
- canonical: |
- <p><code>f&amp;ouml;&amp;ouml;</code></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><code>f&amp;ouml;&amp;ouml;</code></p>
- wysiwyg: |-
- <p><code>f&amp;ouml;&amp;ouml;</code></p>
-06_03__inlines__entity_and_numeric_character_references__012:
- canonical: |
- <pre><code>f&amp;ouml;f&amp;ouml;
- </code></pre>
- static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:5-1:18" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">f&amp;ouml;f&amp;ouml;</span></code></pre>
- <copy-code></copy-code>
- </div>
- wysiwyg: |-
- <pre class="content-editor-code-block undefined code highlight"><code>f&amp;ouml;f&amp;ouml;</code></pre>
-06_03__inlines__entity_and_numeric_character_references__013:
- canonical: |
- <p>*foo*
- <em>foo</em></p>
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto">*foo*
- <em>foo</em></p>
- wysiwyg: |-
- <p>*foo*
- <em>foo</em></p>
-06_03__inlines__entity_and_numeric_character_references__014:
- canonical: |
- <p>* foo</p>
- <ul>
- <li>foo</li>
- </ul>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">* foo</p>
- <ul data-sourcepos="3:1-3:5" dir="auto">
- <li data-sourcepos="3:1-3:5">foo</li>
- </ul>
- wysiwyg: |-
- <p>* foo</p>
-06_03__inlines__entity_and_numeric_character_references__015:
- canonical: |
- <p>foo
-
- bar</p>
- static: |-
- <p data-sourcepos="1:1-1:16" dir="auto">foo
-
- bar</p>
- wysiwyg: |-
- <p>foo
-
- bar</p>
-06_03__inlines__entity_and_numeric_character_references__016:
- canonical: "<p>\tfoo</p>\n"
- static: "<p data-sourcepos=\"1:1-1:7\" dir=\"auto\">\tfoo</p>"
- wysiwyg: "<p>\tfoo</p>"
-06_03__inlines__entity_and_numeric_character_references__017:
- canonical: |
- <p>[a](url &quot;tit&quot;)</p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto"><a href="url" title="tit">a</a></p>
- wysiwyg: |-
- <p>[a](url "tit")</p>
-06_04__inlines__code_spans__001:
- canonical: |
- <p><code>foo</code></p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><code>foo</code></p>
- wysiwyg: |-
- <p><code>foo</code></p>
-06_04__inlines__code_spans__002:
- canonical: |
- <p><code>foo ` bar</code></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><code>foo ` bar</code></p>
- wysiwyg: |-
- <p><code>foo ` bar</code></p>
-06_04__inlines__code_spans__003:
- canonical: |
- <p><code>``</code></p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto"><code>``</code></p>
- wysiwyg: |-
- <p><code>``</code></p>
-06_04__inlines__code_spans__004:
- canonical: |
- <p><code> `` </code></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><code> `` </code></p>
- wysiwyg: |-
- <p><code> `` </code></p>
-06_04__inlines__code_spans__005:
- canonical: |
- <p><code> a</code></p>
- static: |-
- <p data-sourcepos="1:1-1:4" dir="auto"><code> a</code></p>
- wysiwyg: |-
- <p><code> a</code></p>
-06_04__inlines__code_spans__006:
- canonical: |
- <p><code> b </code></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><code> b </code></p>
- wysiwyg: |-
- <p><code>&nbsp;b&nbsp;</code></p>
-06_04__inlines__code_spans__007:
- canonical: |
- <p><code> </code>
- <code> </code></p>
- static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><code> </code>
- <code> </code></p>
- wysiwyg: |-
- <p></p>
-06_04__inlines__code_spans__008:
- canonical: |
- <p><code>foo bar baz</code></p>
- static: |-
- <p data-sourcepos="1:1-5:2" dir="auto"><code>foo bar baz</code></p>
- wysiwyg: |-
- <p><code>foo bar baz</code></p>
-06_04__inlines__code_spans__009:
- canonical: |
- <p><code>foo </code></p>
- static: |-
- <p data-sourcepos="1:1-3:2" dir="auto"><code>foo </code></p>
- wysiwyg: |-
- <p><code>foo </code></p>
-06_04__inlines__code_spans__010:
- canonical: |
- <p><code>foo bar baz</code></p>
- static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><code>foo bar baz</code></p>
- wysiwyg: |-
- <p><code>foo bar baz</code></p>
-06_04__inlines__code_spans__011:
- canonical: |
- <p><code>foo\</code>bar`</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><code>foo\</code>bar`</p>
- wysiwyg: |-
- <p><code>foo\</code>bar`</p>
-06_04__inlines__code_spans__012:
- canonical: |
- <p><code>foo`bar</code></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><code>foo`bar</code></p>
- wysiwyg: |-
- <p><code>foo`bar</code></p>
-06_04__inlines__code_spans__013:
- canonical: |
- <p><code>foo `` bar</code></p>
- static: |-
- <p data-sourcepos="1:1-1:14" dir="auto"><code>foo `` bar</code></p>
- wysiwyg: |-
- <p><code>foo `` bar</code></p>
-06_04__inlines__code_spans__014:
- canonical: |
- <p>*foo<code>*</code></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">*foo<code>*</code></p>
- wysiwyg: |-
- <p>*foo<code>*</code></p>
-06_04__inlines__code_spans__015:
- canonical: |
- <p>[not a <code>link](/foo</code>)</p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto">[not a <code>link](/foo</code>)</p>
- wysiwyg: |-
- <p>[not a <code>link](/foo</code>)</p>
-06_04__inlines__code_spans__016:
- canonical: |
- <p><code>&lt;a href=&quot;</code>&quot;&gt;`</p>
- static: |-
- <p data-sourcepos="1:1-1:14" dir="auto"><code>&lt;a href="</code>"&gt;`</p>
- wysiwyg: |-
- <p><code>&lt;a href="</code>"&gt;`</p>
-06_04__inlines__code_spans__017:
- canonical: |
- <p><a href="`">`</p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><a href="%60" rel="nofollow noreferrer noopener" target="_blank">`</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="`">`</a></p>
-06_04__inlines__code_spans__018:
- canonical: |
- <p><code>&lt;http://foo.bar.</code>baz&gt;`</p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto"><code>&lt;http://foo.bar.</code>baz&gt;`</p>
- wysiwyg: |-
- <p><code>&lt;http://foo.bar.</code>baz&gt;`</p>
-06_04__inlines__code_spans__019:
- canonical: |
- <p><a href="http://foo.bar.%60baz">http://foo.bar.`baz</a>`</p>
- static: |-
- <p data-sourcepos="1:1-1:22" dir="auto"><a href="http://foo.bar.%60baz" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar.`baz</a>`</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar.%60baz">http://foo.bar.`baz</a>`</p>
-06_04__inlines__code_spans__020:
- canonical: |
- <p>```foo``</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">```foo``</p>
- wysiwyg: |-
- <p>```foo``</p>
-06_04__inlines__code_spans__021:
- canonical: |
- <p>`foo</p>
- static: |-
- <p data-sourcepos="1:1-1:4" dir="auto">`foo</p>
- wysiwyg: |-
- <p>`foo</p>
-06_04__inlines__code_spans__022:
- canonical: |
- <p>`foo<code>bar</code></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto">`foo<code>bar</code></p>
- wysiwyg: |-
- <p>`foo<code>bar</code></p>
-06_05__inlines__emphasis_and_strong_emphasis__001:
- canonical: |
- <p><em>foo bar</em></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><em>foo bar</em></p>
- wysiwyg: |-
- <p><em>foo bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__002:
- canonical: |
- <p>a * foo bar*</p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">a * foo bar*</p>
- wysiwyg: |-
- <p>a * foo bar*</p>
-06_05__inlines__emphasis_and_strong_emphasis__003:
- canonical: |
- <p>a*&quot;foo&quot;*</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">a*"foo"*</p>
- wysiwyg: |-
- <p>a*"foo"*</p>
-06_05__inlines__emphasis_and_strong_emphasis__004:
- canonical: |
- <p>* a *</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">* a *</p>
- wysiwyg: |-
- <p>*&nbsp;a&nbsp;*</p>
-06_05__inlines__emphasis_and_strong_emphasis__005:
- canonical: |
- <p>foo<em>bar</em></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">foo<em>bar</em></p>
- wysiwyg: |-
- <p>foo<em>bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__006:
- canonical: |
- <p>5<em>6</em>78</p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto">5<em>6</em>78</p>
- wysiwyg: |-
- <p>5<em>6</em>78</p>
-06_05__inlines__emphasis_and_strong_emphasis__007:
- canonical: |
- <p><em>foo bar</em></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><em>foo bar</em></p>
- wysiwyg: |-
- <p><em>foo bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__008:
- canonical: |
- <p>_ foo bar_</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">_ foo bar_</p>
- wysiwyg: |-
- <p>_ foo bar_</p>
-06_05__inlines__emphasis_and_strong_emphasis__009:
- canonical: |
- <p>a_&quot;foo&quot;_</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">a_"foo"_</p>
- wysiwyg: |-
- <p>a_"foo"_</p>
-06_05__inlines__emphasis_and_strong_emphasis__010:
- canonical: |
- <p>foo_bar_</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">foo_bar_</p>
- wysiwyg: |-
- <p>foo_bar_</p>
-06_05__inlines__emphasis_and_strong_emphasis__011:
- canonical: |
- <p>5_6_78</p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto">5_6_78</p>
- wysiwyg: |-
- <p>5_6_78</p>
-06_05__inlines__emphasis_and_strong_emphasis__012:
- canonical: |
- <p>пристаням_стремятся_</p>
- static: |-
- <p data-sourcepos="1:1-1:38" dir="auto">пристаням_стремятся_</p>
- wysiwyg: |-
- <p>пристаням_стремятся_</p>
-06_05__inlines__emphasis_and_strong_emphasis__013:
- canonical: |
- <p>aa_&quot;bb&quot;_cc</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">aa_"bb"_cc</p>
- wysiwyg: |-
- <p>aa_"bb"_cc</p>
-06_05__inlines__emphasis_and_strong_emphasis__014:
- canonical: |
- <p>foo-<em>(bar)</em></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto">foo-<em>(bar)</em></p>
- wysiwyg: |-
- <p>foo-<em>(bar)</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__015:
- canonical: |
- <p>_foo*</p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto">_foo*</p>
- wysiwyg: |-
- <p>_foo*</p>
-06_05__inlines__emphasis_and_strong_emphasis__016:
- canonical: |
- <p>*foo bar *</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">*foo bar *</p>
- wysiwyg: |-
- <p>*foo bar *</p>
-06_05__inlines__emphasis_and_strong_emphasis__017:
- canonical: |
- <p>*foo bar
- *</p>
- static: |-
- <p data-sourcepos="1:1-2:1" dir="auto">*foo bar
- *</p>
- wysiwyg: |-
- <p>*foo bar
- *</p>
-06_05__inlines__emphasis_and_strong_emphasis__018:
- canonical: |
- <p>*(*foo)</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">*(*foo)</p>
- wysiwyg: |-
- <p>*(*foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__019:
- canonical: |
- <p><em>(<em>foo</em>)</em></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><em>(<em>foo</em>)</em></p>
- wysiwyg: |-
- <p><em>(foo</em>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__020:
- canonical: |
- <p><em>foo</em>bar</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><em>foo</em>bar</p>
- wysiwyg: |-
- <p><em>foo</em>bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__021:
- canonical: |
- <p>_foo bar _</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">_foo bar _</p>
- wysiwyg: |-
- <p>_foo bar _</p>
-06_05__inlines__emphasis_and_strong_emphasis__022:
- canonical: |
- <p>_(_foo)</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">_(_foo)</p>
- wysiwyg: |-
- <p>_(_foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__023:
- canonical: |
- <p><em>(<em>foo</em>)</em></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><em>(<em>foo</em>)</em></p>
- wysiwyg: |-
- <p><em>(foo</em>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__024:
- canonical: |
- <p>_foo_bar</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">_foo_bar</p>
- wysiwyg: |-
- <p>_foo_bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__025:
- canonical: |
- <p>_пристаням_стремятся</p>
- static: |-
- <p data-sourcepos="1:1-1:38" dir="auto">_пристаням_стремятся</p>
- wysiwyg: |-
- <p>_пристаням_стремятся</p>
-06_05__inlines__emphasis_and_strong_emphasis__026:
- canonical: |
- <p><em>foo_bar_baz</em></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><em>foo_bar_baz</em></p>
- wysiwyg: |-
- <p><em>foo_bar_baz</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__027:
- canonical: |
- <p><em>(bar)</em>.</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><em>(bar)</em>.</p>
- wysiwyg: |-
- <p><em>(bar)</em>.</p>
-06_05__inlines__emphasis_and_strong_emphasis__028:
- canonical: |
- <p><strong>foo bar</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><strong>foo bar</strong></p>
- wysiwyg: |-
- <p><strong>foo bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__029:
- canonical: |
- <p>** foo bar**</p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">** foo bar**</p>
- wysiwyg: |-
- <p>** foo bar**</p>
-06_05__inlines__emphasis_and_strong_emphasis__030:
- canonical: |
- <p>a**&quot;foo&quot;**</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">a**"foo"**</p>
- wysiwyg: |-
- <p>a**"foo"**</p>
-06_05__inlines__emphasis_and_strong_emphasis__031:
- canonical: |
- <p>foo<strong>bar</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">foo<strong>bar</strong></p>
- wysiwyg: |-
- <p>foo<strong>bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__032:
- canonical: |
- <p><strong>foo bar</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><strong>foo bar</strong></p>
- wysiwyg: |-
- <p><strong>foo bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__033:
- canonical: |
- <p>__ foo bar__</p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">__ foo bar__</p>
- wysiwyg: |-
- <p>__ foo bar__</p>
-06_05__inlines__emphasis_and_strong_emphasis__034:
- canonical: |
- <p>__
- foo bar__</p>
- static: |-
- <p data-sourcepos="1:1-2:9" dir="auto">__
- foo bar__</p>
- wysiwyg: |-
- <p>__
- foo bar__</p>
-06_05__inlines__emphasis_and_strong_emphasis__035:
- canonical: |
- <p>a__&quot;foo&quot;__</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">a__"foo"__</p>
- wysiwyg: |-
- <p>a__"foo"__</p>
-06_05__inlines__emphasis_and_strong_emphasis__036:
- canonical: |
- <p>foo__bar__</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">foo__bar__</p>
- wysiwyg: |-
- <p>foo__bar__</p>
-06_05__inlines__emphasis_and_strong_emphasis__037:
- canonical: |
- <p>5__6__78</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">5__6__78</p>
- wysiwyg: |-
- <p>5__6__78</p>
-06_05__inlines__emphasis_and_strong_emphasis__038:
- canonical: |
- <p>пристаням__стремятся__</p>
- static: |-
- <p data-sourcepos="1:1-1:40" dir="auto">пристаням__стремятся__</p>
- wysiwyg: |-
- <p>пристаням__стремятся__</p>
-06_05__inlines__emphasis_and_strong_emphasis__039:
- canonical: |
- <p><strong>foo, <strong>bar</strong>, baz</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><strong>foo, <strong>bar</strong>, baz</strong></p>
- wysiwyg: |-
- <p><strong>foo, bar</strong>, baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__040:
- canonical: |
- <p>foo-<strong>(bar)</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">foo-<strong>(bar)</strong></p>
- wysiwyg: |-
- <p>foo-<strong>(bar)</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__041:
- canonical: |
- <p>**foo bar **</p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">**foo bar **</p>
- wysiwyg: |-
- <p>**foo bar **</p>
-06_05__inlines__emphasis_and_strong_emphasis__042:
- canonical: |
- <p>**(**foo)</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">**(**foo)</p>
- wysiwyg: |-
- <p>**(**foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__043:
- canonical: |
- <p><em>(<strong>foo</strong>)</em></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><em>(<strong>foo</strong>)</em></p>
- wysiwyg: |-
- <p><em>(</em><strong>foo</strong>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__044:
- canonical: |
- <p><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
- <em>Asclepias physocarpa</em>)</strong></p>
- static: |-
- <p data-sourcepos="1:1-2:25" dir="auto"><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
- <em>Asclepias physocarpa</em>)</strong></p>
- wysiwyg: |-
- <p><strong>Gomphocarpus (</strong><em>Gomphocarpus physocarpus</em>, syn.
- <em>Asclepias physocarpa</em>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__045:
- canonical: |
- <p><strong>foo &quot;<em>bar</em>&quot; foo</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:19" dir="auto"><strong>foo "<em>bar</em>" foo</strong></p>
- wysiwyg: |-
- <p><strong>foo "</strong><em>bar</em>" foo</p>
-06_05__inlines__emphasis_and_strong_emphasis__046:
- canonical: |
- <p><strong>foo</strong>bar</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><strong>foo</strong>bar</p>
- wysiwyg: |-
- <p><strong>foo</strong>bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__047:
- canonical: |
- <p>__foo bar __</p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">__foo bar __</p>
- wysiwyg: |-
- <p>__foo bar __</p>
-06_05__inlines__emphasis_and_strong_emphasis__048:
- canonical: |
- <p>__(__foo)</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">__(__foo)</p>
- wysiwyg: |-
- <p>__(__foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__049:
- canonical: |
- <p><em>(<strong>foo</strong>)</em></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><em>(<strong>foo</strong>)</em></p>
- wysiwyg: |-
- <p><em>(</em><strong>foo</strong>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__050:
- canonical: |
- <p>__foo__bar</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">__foo__bar</p>
- wysiwyg: |-
- <p>__foo__bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__051:
- canonical: |
- <p>__пристаням__стремятся</p>
- static: |-
- <p data-sourcepos="1:1-1:40" dir="auto">__пристаням__стремятся</p>
- wysiwyg: |-
- <p>__пристаням__стремятся</p>
-06_05__inlines__emphasis_and_strong_emphasis__052:
- canonical: |
- <p><strong>foo__bar__baz</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><strong>foo__bar__baz</strong></p>
- wysiwyg: |-
- <p><strong>foo__bar__baz</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__053:
- canonical: |
- <p><strong>(bar)</strong>.</p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><strong>(bar)</strong>.</p>
- wysiwyg: |-
- <p><strong>(bar)</strong>.</p>
-06_05__inlines__emphasis_and_strong_emphasis__054:
- canonical: |
- <p><em>foo <a href="/url">bar</a></em></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><em>foo <a href="/url">bar</a></em></p>
- wysiwyg: |-
- <p><em>foo </em><a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
-06_05__inlines__emphasis_and_strong_emphasis__055:
- canonical: |
- <p><em>foo
- bar</em></p>
- static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><em>foo
- bar</em></p>
- wysiwyg: |-
- <p><em>foo
- bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__056:
- canonical: |
- <p><em>foo <strong>bar</strong> baz</em></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><em>foo <strong>bar</strong> baz</em></p>
- wysiwyg: |-
- <p><em>foo </em><strong>bar</strong> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__057:
- canonical: |
- <p><em>foo <em>bar</em> baz</em></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><em>foo <em>bar</em> baz</em></p>
- wysiwyg: |-
- <p><em>foo bar</em> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__058:
- canonical: |
- <p><em><em>foo</em> bar</em></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><em><em>foo</em> bar</em></p>
- wysiwyg: |-
- <p><em>foo</em> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__059:
- canonical: |
- <p><em>foo <em>bar</em></em></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><em>foo <em>bar</em></em></p>
- wysiwyg: |-
- <p><em>foo bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__060:
- canonical: |
- <p><em>foo <strong>bar</strong> baz</em></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><em>foo <strong>bar</strong> baz</em></p>
- wysiwyg: |-
- <p><em>foo </em><strong>bar</strong> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__061:
- canonical: |
- <p><em>foo<strong>bar</strong>baz</em></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><em>foo<strong>bar</strong>baz</em></p>
- wysiwyg: |-
- <p><em>foo</em><strong>bar</strong>baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__062:
- canonical: |
- <p><em>foo**bar</em></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><em>foo**bar</em></p>
- wysiwyg: |-
- <p><em>foo**bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__063:
- canonical: |
- <p><em><strong>foo</strong> bar</em></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><em><strong>foo</strong> bar</em></p>
- wysiwyg: |-
- <p><strong><em>foo</em></strong> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__064:
- canonical: |
- <p><em>foo <strong>bar</strong></em></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><em>foo <strong>bar</strong></em></p>
- wysiwyg: |-
- <p><em>foo </em><strong>bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__065:
- canonical: |
- <p><em>foo<strong>bar</strong></em></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto"><em>foo<strong>bar</strong></em></p>
- wysiwyg: |-
- <p><em>foo</em><strong>bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__066:
- canonical: |
- <p>foo<em><strong>bar</strong></em>baz</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">foo<em><strong>bar</strong></em>baz</p>
- wysiwyg: |-
- <p>foo<strong><em>bar</em></strong>baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__067:
- canonical: |
- <p>foo<strong><strong><strong>bar</strong></strong></strong>***baz</p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto">foo<strong><strong><strong>bar</strong></strong></strong>***baz</p>
- wysiwyg: |-
- <p>foo<strong>bar</strong>***baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__068:
- canonical: |
- <p><em>foo <strong>bar <em>baz</em> bim</strong> bop</em></p>
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto"><em>foo <strong>bar <em>baz</em> bim</strong> bop</em></p>
- wysiwyg: |-
- <p><em>foo </em><strong>bar </strong><em>baz</em> bim bop</p>
-06_05__inlines__emphasis_and_strong_emphasis__069:
- canonical: |
- <p><em>foo <a href="/url"><em>bar</em></a></em></p>
- static: |-
- <p data-sourcepos="1:1-1:19" dir="auto"><em>foo <a href="/url"><em>bar</em></a></em></p>
- wysiwyg: |-
- <p><em>foo </em><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><em>bar</em></a></p>
-06_05__inlines__emphasis_and_strong_emphasis__070:
- canonical: |
- <p>** is not an empty emphasis</p>
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto">** is not an empty emphasis</p>
- wysiwyg: |-
- <p>** is not an empty emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__071:
- canonical: |
- <p>**** is not an empty strong emphasis</p>
- static: |-
- <p data-sourcepos="1:1-1:36" dir="auto">**** is not an empty strong emphasis</p>
- wysiwyg: |-
- <p>**** is not an empty strong emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__072:
- canonical: |
- <p><strong>foo <a href="/url">bar</a></strong></p>
- static: |-
- <p data-sourcepos="1:1-1:19" dir="auto"><strong>foo <a href="/url">bar</a></strong></p>
- wysiwyg: |-
- <p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
-06_05__inlines__emphasis_and_strong_emphasis__073:
- canonical: |
- <p><strong>foo
- bar</strong></p>
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto"><strong>foo
- bar</strong></p>
- wysiwyg: |-
- <p><strong>foo
- bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__074:
- canonical: |
- <p><strong>foo <em>bar</em> baz</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><strong>foo <em>bar</em> baz</strong></p>
- wysiwyg: |-
- <p><strong>foo </strong><em>bar</em> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__075:
- canonical: |
- <p><strong>foo <strong>bar</strong> baz</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:19" dir="auto"><strong>foo <strong>bar</strong> baz</strong></p>
- wysiwyg: |-
- <p><strong>foo bar</strong> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__076:
- canonical: |
- <p><strong><strong>foo</strong> bar</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><strong><strong>foo</strong> bar</strong></p>
- wysiwyg: |-
- <p><strong>foo</strong> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__077:
- canonical: |
- <p><strong>foo <strong>bar</strong></strong></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><strong>foo <strong>bar</strong></strong></p>
- wysiwyg: |-
- <p><strong>foo bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__078:
- canonical: |
- <p><strong>foo <em>bar</em> baz</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><strong>foo <em>bar</em> baz</strong></p>
- wysiwyg: |-
- <p><strong>foo </strong><em>bar</em> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__079:
- canonical: |
- <p><strong>foo<em>bar</em>baz</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><strong>foo<em>bar</em>baz</strong></p>
- wysiwyg: |-
- <p><strong>foo</strong><em>bar</em>baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__080:
- canonical: |
- <p><strong><em>foo</em> bar</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><strong><em>foo</em> bar</strong></p>
- wysiwyg: |-
- <p><strong><em>foo</em></strong> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__081:
- canonical: |
- <p><strong>foo <em>bar</em></strong></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><strong>foo <em>bar</em></strong></p>
- wysiwyg: |-
- <p><strong>foo </strong><em>bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__082:
- canonical: |
- <p><strong>foo <em>bar <strong>baz</strong>
- bim</em> bop</strong></p>
- static: |-
- <p data-sourcepos="1:1-2:10" dir="auto"><strong>foo <em>bar <strong>baz</strong>
- bim</em> bop</strong></p>
- wysiwyg: |-
- <p><strong>foo </strong><em>bar </em><strong>baz</strong>
- bim bop</p>
-06_05__inlines__emphasis_and_strong_emphasis__083:
- canonical: |
- <p><strong>foo <a href="/url"><em>bar</em></a></strong></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><strong>foo <a href="/url"><em>bar</em></a></strong></p>
- wysiwyg: |-
- <p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><em>bar</em></a></p>
-06_05__inlines__emphasis_and_strong_emphasis__084:
- canonical: |
- <p>__ is not an empty emphasis</p>
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto">__ is not an empty emphasis</p>
- wysiwyg: |-
- <p>__ is not an empty emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__085:
- canonical: |
- <p>____ is not an empty strong emphasis</p>
- static: |-
- <p data-sourcepos="1:1-1:36" dir="auto">____ is not an empty strong emphasis</p>
- wysiwyg: |-
- <p>____ is not an empty strong emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__086:
- canonical: |
- <p>foo ***</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">foo ***</p>
- wysiwyg: |-
- <p>foo ***</p>
-06_05__inlines__emphasis_and_strong_emphasis__087:
- canonical: |
- <p>foo <em>*</em></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">foo <em>*</em></p>
- wysiwyg: |-
- <p>foo <em>*</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__088:
- canonical: |
- <p>foo <em>_</em></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">foo <em>_</em></p>
- wysiwyg: |-
- <p>foo <em>_</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__089:
- canonical: |
- <p>foo *****</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">foo *****</p>
- wysiwyg: |-
- <p>foo *****</p>
-06_05__inlines__emphasis_and_strong_emphasis__090:
- canonical: |
- <p>foo <strong>*</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">foo <strong>*</strong></p>
- wysiwyg: |-
- <p>foo <strong>*</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__091:
- canonical: |
- <p>foo <strong>_</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">foo <strong>_</strong></p>
- wysiwyg: |-
- <p>foo <strong>_</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__092:
- canonical: |
- <p>*<em>foo</em></p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto">*<em>foo</em></p>
- wysiwyg: |-
- <p>*<em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__093:
- canonical: |
- <p><em>foo</em>*</p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto"><em>foo</em>*</p>
- wysiwyg: |-
- <p><em>foo</em>*</p>
-06_05__inlines__emphasis_and_strong_emphasis__094:
- canonical: |
- <p>*<strong>foo</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">*<strong>foo</strong></p>
- wysiwyg: |-
- <p>*<strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__095:
- canonical: |
- <p>***<em>foo</em></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">***<em>foo</em></p>
- wysiwyg: |-
- <p>***<em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__096:
- canonical: |
- <p><strong>foo</strong>*</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><strong>foo</strong>*</p>
- wysiwyg: |-
- <p><strong>foo</strong>*</p>
-06_05__inlines__emphasis_and_strong_emphasis__097:
- canonical: |
- <p><em>foo</em>***</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><em>foo</em>***</p>
- wysiwyg: |-
- <p><em>foo</em>***</p>
-06_05__inlines__emphasis_and_strong_emphasis__098:
- canonical: |
- <p>foo ___</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">foo ___</p>
- wysiwyg: |-
- <p>foo ___</p>
-06_05__inlines__emphasis_and_strong_emphasis__099:
- canonical: |
- <p>foo <em>_</em></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">foo <em>_</em></p>
- wysiwyg: |-
- <p>foo <em>_</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__100:
- canonical: |
- <p>foo <em>*</em></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">foo <em>*</em></p>
- wysiwyg: |-
- <p>foo <em>*</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__101:
- canonical: |
- <p>foo _____</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">foo _____</p>
- wysiwyg: |-
- <p>foo _____</p>
-06_05__inlines__emphasis_and_strong_emphasis__102:
- canonical: |
- <p>foo <strong>_</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto">foo <strong>_</strong></p>
- wysiwyg: |-
- <p>foo <strong>_</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__103:
- canonical: |
- <p>foo <strong>*</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">foo <strong>*</strong></p>
- wysiwyg: |-
- <p>foo <strong>*</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__104:
- canonical: |
- <p>_<em>foo</em></p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto">_<em>foo</em></p>
- wysiwyg: |-
- <p>_<em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__105:
- canonical: |
- <p><em>foo</em>_</p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto"><em>foo</em>_</p>
- wysiwyg: |-
- <p><em>foo</em>_</p>
-06_05__inlines__emphasis_and_strong_emphasis__106:
- canonical: |
- <p>_<strong>foo</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">_<strong>foo</strong></p>
- wysiwyg: |-
- <p>_<strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__107:
- canonical: |
- <p>___<em>foo</em></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">___<em>foo</em></p>
- wysiwyg: |-
- <p>___<em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__108:
- canonical: |
- <p><strong>foo</strong>_</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><strong>foo</strong>_</p>
- wysiwyg: |-
- <p><strong>foo</strong>_</p>
-06_05__inlines__emphasis_and_strong_emphasis__109:
- canonical: |
- <p><em>foo</em>___</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><em>foo</em>___</p>
- wysiwyg: |-
- <p><em>foo</em>___</p>
-06_05__inlines__emphasis_and_strong_emphasis__110:
- canonical: |
- <p><strong>foo</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><strong>foo</strong></p>
- wysiwyg: |-
- <p><strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__111:
- canonical: |
- <p><em><em>foo</em></em></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><em><em>foo</em></em></p>
- wysiwyg: |-
- <p><em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__112:
- canonical: |
- <p><strong>foo</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><strong>foo</strong></p>
- wysiwyg: |-
- <p><strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__113:
- canonical: |
- <p><em><em>foo</em></em></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><em><em>foo</em></em></p>
- wysiwyg: |-
- <p><em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__114:
- canonical: |
- <p><strong><strong>foo</strong></strong></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><strong><strong>foo</strong></strong></p>
- wysiwyg: |-
- <p><strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__115:
- canonical: |
- <p><strong><strong>foo</strong></strong></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><strong><strong>foo</strong></strong></p>
- wysiwyg: |-
- <p><strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__116:
- canonical: |
- <p><strong><strong><strong>foo</strong></strong></strong></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><strong><strong><strong>foo</strong></strong></strong></p>
- wysiwyg: |-
- <p><strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__117:
- canonical: |
- <p><em><strong>foo</strong></em></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><em><strong>foo</strong></em></p>
- wysiwyg: |-
- <p><strong><em>foo</em></strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__118:
- canonical: |
- <p><em><strong><strong>foo</strong></strong></em></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><em><strong><strong>foo</strong></strong></em></p>
- wysiwyg: |-
- <p><strong><em>foo</em></strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__119:
- canonical: |
- <p><em>foo _bar</em> baz_</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><em>foo _bar</em> baz_</p>
- wysiwyg: |-
- <p><em>foo _bar</em> baz_</p>
-06_05__inlines__emphasis_and_strong_emphasis__120:
- canonical: |
- <p><em>foo <strong>bar *baz bim</strong> bam</em></p>
- static: |-
- <p data-sourcepos="1:1-1:26" dir="auto"><em>foo <strong>bar *baz bim</strong> bam</em></p>
- wysiwyg: |-
- <p><em>foo </em><strong>bar *baz bim</strong> bam</p>
-06_05__inlines__emphasis_and_strong_emphasis__121:
- canonical: |
- <p>**foo <strong>bar baz</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">**foo <strong>bar baz</strong></p>
- wysiwyg: |-
- <p>**foo <strong>bar baz</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__122:
- canonical: |
- <p>*foo <em>bar baz</em></p>
- static: |-
- <p data-sourcepos="1:1-1:14" dir="auto">*foo <em>bar baz</em></p>
- wysiwyg: |-
- <p>*foo <em>bar baz</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__123:
- canonical: |
- <p>*<a href="/url">bar*</a></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">*<a href="/url">bar*</a></p>
- wysiwyg: |-
- <p>*<a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar*</a></p>
-06_05__inlines__emphasis_and_strong_emphasis__124:
- canonical: |
- <p>_foo <a href="/url">bar_</a></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">_foo <a href="/url">bar_</a></p>
- wysiwyg: |-
- <p>_foo <a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar_</a></p>
-06_05__inlines__emphasis_and_strong_emphasis__125:
- canonical: |
- <p>*<img src="foo" title="*"/></p>
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto">*<a class="no-attachment-icon" href="foo" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" title="*" decoding="async" class="lazy" data-src="foo"></a></p>
- wysiwyg: |-
- <p>*<img src="foo" title="*"></p>
-06_05__inlines__emphasis_and_strong_emphasis__126:
- canonical: |
- <p>**<a href="**"></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">**<a href="**"></a></p>
- wysiwyg: |-
- <p>**</p>
-06_05__inlines__emphasis_and_strong_emphasis__127:
- canonical: |
- <p>__<a href="__"></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">__<a href="__"></a></p>
- wysiwyg: |-
- <p>__</p>
-06_05__inlines__emphasis_and_strong_emphasis__128:
- canonical: |
- <p><em>a <code>*</code></em></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><em>a <code>*</code></em></p>
- wysiwyg: |-
- <p><em>a </em><code>*</code></p>
-06_05__inlines__emphasis_and_strong_emphasis__129:
- canonical: |
- <p><em>a <code>_</code></em></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><em>a <code>_</code></em></p>
- wysiwyg: |-
- <p><em>a </em><code>_</code></p>
-06_05__inlines__emphasis_and_strong_emphasis__130:
- canonical: |
- <p>**a<a href="http://foo.bar/?q=**">http://foo.bar/?q=**</a></p>
- static: |-
- <p data-sourcepos="1:1-1:25" dir="auto">**a<a href="http://foo.bar/?q=**" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar/?q=**</a></p>
- wysiwyg: |-
- <p>**a<a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar/?q=**">http://foo.bar/?q=**</a></p>
-06_05__inlines__emphasis_and_strong_emphasis__131:
- canonical: |
- <p>__a<a href="http://foo.bar/?q=__">http://foo.bar/?q=__</a></p>
- static: |-
- <p data-sourcepos="1:1-1:25" dir="auto">__a<a href="http://foo.bar/?q=__" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar/?q=__</a></p>
- wysiwyg: |-
- <p>__a<a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar/?q=__">http://foo.bar/?q=__</a></p>
-06_06__inlines__strikethrough_extension__001:
- canonical: |
- <p><del>Hi</del> Hello, world!</p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
- wysiwyg: |-
- <p><s>Hi</s> Hello, world!</p>
-06_06__inlines__strikethrough_extension__002:
- canonical: |
- <p>This ~~has a</p>
- <p>new paragraph~~.</p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">This ~~has a</p>
- <p data-sourcepos="3:1-3:16" dir="auto">new paragraph~~.</p>
- wysiwyg: |-
- <p>This ~~has a</p>
-06_07__inlines__links__001:
- canonical: |
- <p><a href="/uri" title="title">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><a href="/uri" title="title">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri" title="title">link</a></p>
-06_07__inlines__links__002:
- canonical: |
- <p><a href="/uri">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto"><a href="/uri">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link</a></p>
-06_07__inlines__links__003:
- canonical: |
- <p><a href="">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><a href="">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="">link</a></p>
-06_07__inlines__links__004:
- canonical: |
- <p><a href="">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><a href="">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="">link</a></p>
-06_07__inlines__links__005:
- canonical: |
- <p>[link](/my uri)</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><a href="/my%20uri">link</a></p>
- wysiwyg: |-
- <p>[link](/my uri)</p>
-06_07__inlines__links__006:
- canonical: |
- <p><a href="/my%20uri">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><a href="/my%20uri">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/my%20uri">link</a></p>
-06_07__inlines__links__007:
- canonical: |
- <p>[link](foo
- bar)</p>
- static: |-
- <p data-sourcepos="1:1-2:4" dir="auto">[link](foo
- bar)</p>
- wysiwyg: |-
- <p>[link](foo
- bar)</p>
-06_07__inlines__links__008:
- canonical: |
- <p>[link](<foo
- bar>)</p>
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto">[link]()</p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "foo" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__009:
- canonical: |
- <p><a href="b)c">a</a></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><a href="b)c">a</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="b)c">a</a></p>
-06_07__inlines__links__010:
- canonical: |
- <p>[link](&lt;foo&gt;)</p>
- static: |-
- <p data-sourcepos="1:1-1:14" dir="auto"><a href="%3Cfoo%3E">link</a></p>
- wysiwyg: |-
- <p>[link](&lt;foo&gt;)</p>
-06_07__inlines__links__011:
- canonical: |
- <p>[a](&lt;b)c
- [a](&lt;b)c&gt;
- [a](<b>c)</p>
- static: |-
- <p data-sourcepos="1:1-3:9" dir="auto"><a href="%3Cb">a</a>c
- <a href="%3Cb">a</a>c&gt;
- [a](<b>c)</b></p>
- wysiwyg: |-
- <p>[a](&lt;b)c
- [a](&lt;b)c&gt;
- [a](<strong>c)</strong></p>
-06_07__inlines__links__012:
- canonical: |
- <p><a href="(foo)">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><a href="(foo)">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="(foo)">link</a></p>
-06_07__inlines__links__013:
- canonical: |
- <p><a href="foo(and(bar))">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><a href="foo(and(bar))">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo(and(bar))">link</a></p>
-06_07__inlines__links__014:
- canonical: |
- <p><a href="foo(and(bar)">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto"><a href="foo(and(bar)">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo(and(bar)">link</a></p>
-06_07__inlines__links__015:
- canonical: |
- <p><a href="foo(and(bar)">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:22" dir="auto"><a href="foo(and(bar)">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo(and(bar)">link</a></p>
-06_07__inlines__links__016:
- canonical: |
- <p><a href="foo):">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><a>link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo):">link</a></p>
-06_07__inlines__links__017:
- canonical: |
- <p><a href="#fragment">link</a></p>
- <p><a href="http://example.com#fragment">link</a></p>
- <p><a href="http://example.com?foo=3#frag">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><a href="#fragment">link</a></p>
- <p data-sourcepos="3:1-3:35" dir="auto"><a href="http://example.com#fragment" rel="nofollow noreferrer noopener" target="_blank">link</a></p>
- <p data-sourcepos="5:1-5:37" dir="auto"><a href="http://example.com?foo=3#frag" rel="nofollow noreferrer noopener" target="_blank">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="#fragment">link</a></p>
-06_07__inlines__links__018:
- canonical: |
- <p><a href="foo%5Cbar">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><a href="foo%5Cbar">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo%5Cbar">link</a></p>
-06_07__inlines__links__019:
- canonical: |
- <p><a href="foo%20b%C3%A4">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><a href="foo%20b%C3%A4">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo%20b%C3%A4">link</a></p>
-06_07__inlines__links__020:
- canonical: |
- <p><a href="%22title%22">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><a href="%22title%22">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="%22title%22">link</a></p>
-06_07__inlines__links__021:
- canonical: |
- <p><a href="/url" title="title">link</a>
- <a href="/url" title="title">link</a>
- <a href="/url" title="title">link</a></p>
- static: |-
- <p data-sourcepos="1:1-3:20" dir="auto"><a href="/url" title="title">link</a>
- <a href="/url" title="title">link</a>
- <a href="/url" title="title">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">linklinklink</a></p>
-06_07__inlines__links__022:
- canonical: |
- <p><a href="/url" title="title &quot;&quot;">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:29" dir="auto"><a href="/url" title='title ""'>link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title &quot;&quot;">link</a></p>
-06_07__inlines__links__023:
- canonical: |
- <p><a href="/url%C2%A0%22title%22">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><a href="/url%C2%A0%22title%22">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url%C2%A0%22title%22">link</a></p>
-06_07__inlines__links__024:
- canonical: |
- <p>[link](/url &quot;title &quot;and&quot; title&quot;)</p>
- static: |-
- <p data-sourcepos="1:1-1:32" dir="auto">[link](/url "title "and" title")</p>
- wysiwyg: |-
- <p>[link](/url "title "and" title")</p>
-06_07__inlines__links__025:
- canonical: |
- <p><a href="/url" title="title &quot;and&quot; title">link</a></p>
- static: |-
- <p data-sourcepos="1:1-1:32" dir="auto"><a href="/url" title='title "and" title'>link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title &quot;and&quot; title">link</a></p>
-06_07__inlines__links__026:
- canonical: |
- <p><a href="/uri" title="title">link</a></p>
- static: |-
- <p data-sourcepos="1:1-2:12" dir="auto"><a href="/uri" title="title">link</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri" title="title">link</a></p>
-06_07__inlines__links__027:
- canonical: |
- <p>[link] (/uri)</p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">[link] (/uri)</p>
- wysiwyg: |-
- <p>[link] (/uri)</p>
-06_07__inlines__links__028:
- canonical: |
- <p><a href="/uri">link [foo [bar]]</a></p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto"><a href="/uri">link [foo [bar]]</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [foo [bar]]</a></p>
-06_07__inlines__links__029:
- canonical: |
- <p>[link] bar](/uri)</p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">[link] bar](/uri)</p>
- wysiwyg: |-
- <p>[link] bar](/uri)</p>
-06_07__inlines__links__030:
- canonical: |
- <p>[link <a href="/uri">bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">[link <a href="/uri">bar</a></p>
- wysiwyg: |-
- <p>[link <a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar</a></p>
-06_07__inlines__links__031:
- canonical: |
- <p><a href="/uri">link [bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:18" dir="auto"><a href="/uri">link [bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [bar</a></p>
-06_07__inlines__links__032:
- canonical: |
- <p><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
- static: |-
- <p data-sourcepos="1:1-1:30" dir="auto"><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link </a><em>foo </em><strong>bar</strong><code>#</code></p>
-06_07__inlines__links__033:
- canonical: |
- <p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
- static: |-
- <p data-sourcepos="1:1-1:25" dir="auto"><a href="/uri"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="moon" decoding="async" class="lazy" data-src="moon.jpg"></a></p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__034:
- canonical: |
- <p>[foo <a href="/uri">bar</a>](/uri)</p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto">[foo <a href="/uri">bar</a>](/uri)</p>
- wysiwyg: |-
- <p>[foo <a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar</a>](/uri)</p>
-06_07__inlines__links__035:
- canonical: |
- <p>[foo <em>[bar <a href="/uri">baz</a>](/uri)</em>](/uri)</p>
- static: |-
- <p data-sourcepos="1:1-1:37" dir="auto">[foo <em>[bar <a href="/uri">baz</a>](/uri)</em>](/uri)</p>
- wysiwyg: |-
- <p>[foo <em>[bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">baz</a>](/uri)](/uri)</p>
-06_07__inlines__links__036:
- canonical: |
- <p><img src="uri3" alt="[foo](uri2)" /></p>
- static: |-
- <p data-sourcepos="1:1-1:28" dir="auto"><a class="no-attachment-icon" href="uri3" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="[foo](uri2)" decoding="async" class="lazy" data-src="uri3"></a></p>
- wysiwyg: |-
- <p><img src="uri3" alt="[foo](uri2)"></p>
-06_07__inlines__links__037:
- canonical: |
- <p>*<a href="/uri">foo*</a></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">*<a href="/uri">foo*</a></p>
- wysiwyg: |-
- <p>*<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo*</a></p>
-06_07__inlines__links__038:
- canonical: |
- <p><a href="baz*">foo *bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:16" dir="auto"><a href="baz*">foo *bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="baz*">foo *bar</a></p>
-06_07__inlines__links__039:
- canonical: |
- <p><em>foo [bar</em> baz]</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><em>foo [bar</em> baz]</p>
- wysiwyg: |-
- <p><em>foo [bar</em> baz]</p>
-06_07__inlines__links__040:
- canonical: |
- <p>[foo <bar attr="](baz)"></p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto">[foo </p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__041:
- canonical: |
- <p>[foo<code>](/uri)</code></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">[foo<code>](/uri)</code></p>
- wysiwyg: |-
- <p>[foo<code>](/uri)</code></p>
-06_07__inlines__links__042:
- canonical: |
- <p>[foo<a href="http://example.com/?search=%5D(uri)">http://example.com/?search=](uri)</a></p>
- static: |-
- <p data-sourcepos="1:1-1:39" dir="auto">[foo<a href="http://example.com/?search=%5D(uri)" rel="nofollow noreferrer noopener" target="_blank">http://example.com/?search=](uri)</a></p>
- wysiwyg: |-
- <p>[foo<a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com/?search=%5D(uri)">http://example.com/?search=](uri)</a></p>
-06_07__inlines__links__043:
- canonical: |
- <p><a href="/url" title="title">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><a href="/url" title="title">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__044:
- canonical: |
- <p><a href="/uri">link [foo [bar]]</a></p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto"><a href="/uri">link [foo [bar]]</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [foo [bar]]</a></p>
-06_07__inlines__links__045:
- canonical: |
- <p><a href="/uri">link [bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><a href="/uri">link [bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [bar</a></p>
-06_07__inlines__links__046:
- canonical: |
- <p><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
- static: |-
- <p data-sourcepos="1:1-1:29" dir="auto"><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link </a><em>foo </em><strong>bar</strong><code>#</code></p>
-06_07__inlines__links__047:
- canonical: |
- <p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto"><a href="/uri"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="moon" decoding="async" class="lazy" data-src="moon.jpg"></a></p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__048:
- canonical: |
- <p>[foo <a href="/uri">bar</a>]<a href="/uri">ref</a></p>
- static: |-
- <p data-sourcepos="1:1-1:22" dir="auto">[foo <a href="/uri">bar</a>]<a href="/uri">ref</a></p>
- wysiwyg: |-
- <p>[foo <a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar</a>]<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">ref</a></p>
-06_07__inlines__links__049:
- canonical: |
- <p>[foo <em>bar <a href="/uri">baz</a></em>]<a href="/uri">ref</a></p>
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto">[foo <em>bar <a href="/uri">baz</a></em>]<a href="/uri">ref</a></p>
- wysiwyg: |-
- <p>[foo <em>bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">baz</a>]<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">ref</a></p>
-06_07__inlines__links__050:
- canonical: |
- <p>*<a href="/uri">foo*</a></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">*<a href="/uri">foo*</a></p>
- wysiwyg: |-
- <p>*<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo*</a></p>
-06_07__inlines__links__051:
- canonical: |
- <p><a href="/uri">foo *bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><a href="/uri">foo *bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo *bar</a></p>
-06_07__inlines__links__052:
- canonical: |
- <p>[foo <bar attr="][ref]"></p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto">[foo </p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__053:
- canonical: |
- <p>[foo<code>][ref]</code></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">[foo<code>][ref]</code></p>
- wysiwyg: |-
- <p>[foo<code>][ref]</code></p>
-06_07__inlines__links__054:
- canonical: |
- <p>[foo<a href="http://example.com/?search=%5D%5Bref%5D">http://example.com/?search=][ref]</a></p>
- static: |-
- <p data-sourcepos="1:1-1:39" dir="auto">[foo<a href="http://example.com/?search=%5D%5Bref%5D" rel="nofollow noreferrer noopener" target="_blank">http://example.com/?search=][ref]</a></p>
- wysiwyg: |-
- <p>[foo<a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com/?search=%5D%5Bref%5D">http://example.com/?search=][ref]</a></p>
-06_07__inlines__links__055:
- canonical: |
- <p><a href="/url" title="title">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><a href="/url" title="title">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__056:
- canonical: |
- <p><a href="/url">Толпой</a> is a Russian word.</p>
- static: |-
- <p data-sourcepos="1:1-1:47" dir="auto"><a href="/url">Толпой</a> is a Russian word.</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Толпой</a> is a Russian word.</p>
-06_07__inlines__links__057:
- canonical: |
- <p><a href="/url">Baz</a></p>
- static: |-
- <p data-sourcepos="4:1-4:14" dir="auto"><a href="/url">Baz</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Baz</a></p>
-06_07__inlines__links__058:
- canonical: |
- <p>[foo] <a href="/url" title="title">bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto">[foo] <a href="/url" title="title">bar</a></p>
- wysiwyg: |-
- <p>[foo] <a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">bar</a></p>
-06_07__inlines__links__059:
- canonical: |
- <p>[foo]
- <a href="/url" title="title">bar</a></p>
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto">[foo]
- <a href="/url" title="title">bar</a></p>
- wysiwyg: |-
- <p>[foo]
- <a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">bar</a></p>
-06_07__inlines__links__060:
- canonical: |
- <p><a href="/url1">bar</a></p>
- static: |-
- <p data-sourcepos="5:1-5:10" dir="auto"><a href="/url1">bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">bar</a></p>
-06_07__inlines__links__061:
- canonical: |
- <p>[bar][foo!]</p>
- static: |-
- <p data-sourcepos="1:1-1:32" dir="auto">[bar][foo<span>!</span>]</p>
- wysiwyg: |-
- <p>[bar][foo!]</p>
-06_07__inlines__links__062:
- canonical: |
- <p>[foo][ref[]</p>
- <p>[ref[]: /uri</p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto">[foo][ref[]</p>
- <p data-sourcepos="3:1-3:12" dir="auto">[ref[]: /uri</p>
- wysiwyg: |-
- <p>[foo][ref[]</p>
-06_07__inlines__links__063:
- canonical: |
- <p>[foo][ref[bar]]</p>
- <p>[ref[bar]]: /uri</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">[foo][ref[bar]]</p>
- <p data-sourcepos="3:1-3:16" dir="auto">[ref[bar]]: /uri</p>
- wysiwyg: |-
- <p>[foo][ref[bar]]</p>
-06_07__inlines__links__064:
- canonical: |
- <p>[[[foo]]]</p>
- <p>[[[foo]]]: /url</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">[[[foo]]]</p>
- <p data-sourcepos="3:1-3:15" dir="auto">[[[foo]]]: /url</p>
- wysiwyg: |-
- <p>[[[foo]]]</p>
-06_07__inlines__links__065:
- canonical: |
- <p><a href="/uri">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto"><a href="/uri">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo</a></p>
-06_07__inlines__links__066:
- canonical: |
- <p><a href="/uri">bar\</a></p>
- static: |-
- <p data-sourcepos="3:1-3:7" dir="auto"><a href="/uri">bar\</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar\</a></p>
-06_07__inlines__links__067:
- canonical: |
- <p>[]</p>
- <p>[]: /uri</p>
- static: |-
- <p data-sourcepos="1:1-1:2" dir="auto">[]</p>
- <p data-sourcepos="3:1-3:8" dir="auto">[]: /uri</p>
- wysiwyg: |-
- <p>[]</p>
-06_07__inlines__links__068:
- canonical: |
- <p>[
- ]</p>
- <p>[
- ]: /uri</p>
- static: |-
- <p data-sourcepos="1:1-2:2" dir="auto">[
- ]</p>
- <p data-sourcepos="4:1-5:8" dir="auto">[
- ]: /uri</p>
- wysiwyg: |-
- <p>[
- ]</p>
-06_07__inlines__links__069:
- canonical: |
- <p><a href="/url" title="title">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><a href="/url" title="title">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__070:
- canonical: |
- <p><a href="/url" title="title"><em>foo</em> bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><a href="/url" title="title"><em>foo</em> bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar</p>
-06_07__inlines__links__071:
- canonical: |
- <p><a href="/url" title="title">Foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><a href="/url" title="title">Foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">Foo</a></p>
-06_07__inlines__links__072:
- canonical: |
- <p><a href="/url" title="title">foo</a>
- []</p>
- static: |-
- <p data-sourcepos="1:1-2:2" dir="auto"><a href="/url" title="title">foo</a>
- []</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a>
- []</p>
-06_07__inlines__links__073:
- canonical: |
- <p><a href="/url" title="title">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="/url" title="title">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__074:
- canonical: |
- <p><a href="/url" title="title"><em>foo</em> bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><a href="/url" title="title"><em>foo</em> bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar</p>
-06_07__inlines__links__075:
- canonical: |
- <p>[<a href="/url" title="title"><em>foo</em> bar</a>]</p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">[<a href="/url" title="title"><em>foo</em> bar</a>]</p>
- wysiwyg: |-
- <p>[<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar]</p>
-06_07__inlines__links__076:
- canonical: |
- <p>[[bar <a href="/url">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto">[[bar <a href="/url">foo</a></p>
- wysiwyg: |-
- <p>[[bar <a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-06_07__inlines__links__077:
- canonical: |
- <p><a href="/url" title="title">Foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="/url" title="title">Foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">Foo</a></p>
-06_07__inlines__links__078:
- canonical: |
- <p><a href="/url">foo</a> bar</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><a href="/url">foo</a> bar</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a> bar</p>
-06_07__inlines__links__079:
- canonical: |
- <p>[foo]</p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto">[foo]</p>
- wysiwyg: |-
- <p>[foo]</p>
-06_07__inlines__links__080:
- canonical: |
- <p>*<a href="/url">foo*</a></p>
- static: |-
- <p data-sourcepos="3:1-3:7" dir="auto">*<a href="/url">foo*</a></p>
- wysiwyg: |-
- <p>*<a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo*</a></p>
-06_07__inlines__links__081:
- canonical: |
- <p><a href="/url2">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:10" dir="auto"><a href="/url2">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url2">foo</a></p>
-06_07__inlines__links__082:
- canonical: |
- <p><a href="/url1">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><a href="/url1">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">foo</a></p>
-06_07__inlines__links__083:
- canonical: |
- <p><a href="">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><a href="">foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="">foo</a></p>
-06_07__inlines__links__084:
- canonical: |
- <p><a href="/url1">foo</a>(not a link)</p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><a href="/url1">foo</a>(not a link)</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">foo</a>(not a link)</p>
-06_07__inlines__links__085:
- canonical: |
- <p>[foo]<a href="/url">bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">[foo]<a href="/url">bar</a></p>
- wysiwyg: |-
- <p>[foo]<a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
-06_07__inlines__links__086:
- canonical: |
- <p><a href="/url2">foo</a><a href="/url1">baz</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto"><a href="/url2">foo</a><a href="/url1">baz</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/url2">foo</a><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">baz</a></p>
-06_07__inlines__links__087:
- canonical: |
- <p>[foo]<a href="/url1">bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">[foo]<a href="/url1">bar</a></p>
- wysiwyg: |-
- <p>[foo]<a target="_blank" rel="noopener noreferrer nofollow" href="/url1">bar</a></p>
-06_08__inlines__images__001:
- canonical: |
- <p><img src="/url" alt="foo" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="foo" title="title"></p>
-06_08__inlines__images__002:
- canonical: |
- <p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="train &amp; tracks" decoding="async" class="lazy" data-src="train.jpg"></a></p>
- wysiwyg: |-
- <p><img src="train.jpg" alt="foo bar" title="train &amp; tracks"></p>
-06_08__inlines__images__003:
- canonical: |
- <p><img src="/url2" alt="foo bar" /></p>
- static: |-
- <p data-sourcepos="1:1-1:26" dir="auto"><a class="no-attachment-icon" href="/url2" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" decoding="async" class="lazy" data-src="/url2"></a></p>
- wysiwyg: |-
- <p><img src="/url2" alt="foo bar"></p>
-06_08__inlines__images__004:
- canonical: |
- <p><img src="/url2" alt="foo bar" /></p>
- static: |-
- <p data-sourcepos="1:1-1:25" dir="auto"><a class="no-attachment-icon" href="/url2" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" decoding="async" class="lazy" data-src="/url2"></a></p>
- wysiwyg: |-
- <p><img src="/url2" alt="foo bar"></p>
-06_08__inlines__images__005:
- canonical: |
- <p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
- static: |-
- <p data-sourcepos="1:1-1:14" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="train &amp; tracks" decoding="async" class="lazy" data-src="train.jpg"></a></p>
- wysiwyg: |-
- <p><img src="train.jpg" alt="foo bar" title="train &amp; tracks"></p>
-06_08__inlines__images__006:
- canonical: |
- <p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="train &amp; tracks" decoding="async" class="lazy" data-src="train.jpg"></a></p>
- wysiwyg: |-
- <p><img src="train.jpg" alt="foo bar" title="train &amp; tracks"></p>
-06_08__inlines__images__007:
- canonical: |
- <p><img src="train.jpg" alt="foo" /></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="train.jpg"></a></p>
- wysiwyg: |-
- <p><img src="train.jpg" alt="foo"></p>
-06_08__inlines__images__008:
- canonical: |
- <p>My <img src="/path/to/train.jpg" alt="foo bar" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:45" dir="auto">My <a class="no-attachment-icon" href="/path/to/train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="title" decoding="async" class="lazy" data-src="/path/to/train.jpg"></a></p>
- wysiwyg: |-
- <p>My <img src="/path/to/train.jpg" alt="foo bar" title="title"></p>
-06_08__inlines__images__009:
- canonical: |
- <p><img src="url" alt="foo" /></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><a class="no-attachment-icon" href="url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="url"></a></p>
- wysiwyg: |-
- <p><img src="url" alt="foo"></p>
-06_08__inlines__images__010:
- canonical: |
- <p><img src="/url" alt="" /></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt=""></p>
-06_08__inlines__images__011:
- canonical: |
- <p><img src="/url" alt="foo" /></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="foo"></p>
-06_08__inlines__images__012:
- canonical: |
- <p><img src="/url" alt="foo" /></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="foo"></p>
-06_08__inlines__images__013:
- canonical: |
- <p><img src="/url" alt="foo" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="foo" title="title"></p>
-06_08__inlines__images__014:
- canonical: |
- <p><img src="/url" alt="foo bar" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:14" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="foo bar" title="title"></p>
-06_08__inlines__images__015:
- canonical: |
- <p><img src="/url" alt="Foo" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="Foo" title="title"></p>
-06_08__inlines__images__016:
- canonical: |
- <p><img src="/url" alt="foo" title="title" />
- []</p>
- static: |-
- <p data-sourcepos="1:1-2:2" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a>
- []</p>
- wysiwyg: |-
- <p><img src="/url" alt="foo" title="title">
- []</p>
-06_08__inlines__images__017:
- canonical: |
- <p><img src="/url" alt="foo" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="foo" title="title"></p>
-06_08__inlines__images__018:
- canonical: |
- <p><img src="/url" alt="foo bar" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="foo bar" title="title"></p>
-06_08__inlines__images__019:
- canonical: |
- <p>![[foo]]</p>
- <p>[[foo]]: /url &quot;title&quot;</p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">![[foo]]</p>
- <p data-sourcepos="3:1-3:21" dir="auto">[[foo]]: /url "title"</p>
- wysiwyg: |-
- <p>![[foo]]</p>
-06_08__inlines__images__020:
- canonical: |
- <p><img src="/url" alt="Foo" title="title" /></p>
- static: |-
- <p data-sourcepos="1:1-1:6" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
- wysiwyg: |-
- <p><img src="/url" alt="Foo" title="title"></p>
-06_08__inlines__images__021:
- canonical: |
- <p>![foo]</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">![foo]</p>
- wysiwyg: |-
- <p>![foo]</p>
-06_08__inlines__images__022:
- canonical: |
- <p>!<a href="/url" title="title">foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto"><span>!</span><a href="/url" title="title">foo</a></p>
- wysiwyg: |-
- <p>!<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_09__inlines__autolinks__001:
- canonical: |
- <p><a href="http://foo.bar.baz">http://foo.bar.baz</a></p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><a href="http://foo.bar.baz" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar.baz</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar.baz">http://foo.bar.baz</a></p>
-06_09__inlines__autolinks__002:
- canonical: |
- <p><a href="http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean">http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean</a></p>
- static: |-
- <p data-sourcepos="1:1-1:47" dir="auto"><a href="http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean">http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean</a></p>
-06_09__inlines__autolinks__003:
- canonical: |
- <p><a href="irc://foo.bar:2233/baz">irc://foo.bar:2233/baz</a></p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto"><a href="irc://foo.bar:2233/baz">irc://foo.bar:2233/baz</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="irc://foo.bar:2233/baz">irc://foo.bar:2233/baz</a></p>
-06_09__inlines__autolinks__004:
- canonical: |
- <p><a href="MAILTO:FOO@BAR.BAZ">MAILTO:FOO@BAR.BAZ</a></p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><a href="mailto:FOO@BAR.BAZ">MAILTO:FOO@BAR.BAZ</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="MAILTO:FOO@BAR.BAZ">MAILTO:FOO@BAR.BAZ</a></p>
-06_09__inlines__autolinks__005:
- canonical: |
- <p><a href="a+b+c:d">a+b+c:d</a></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><a href="a+b+c:d">a+b+c:d</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="a+b+c:d">a+b+c:d</a></p>
-06_09__inlines__autolinks__006:
- canonical: |
- <p><a href="made-up-scheme://foo,bar">made-up-scheme://foo,bar</a></p>
- static: |-
- <p data-sourcepos="1:1-1:26" dir="auto"><a href="made-up-scheme://foo,bar">made-up-scheme://foo,bar</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="made-up-scheme://foo,bar">made-up-scheme://foo,bar</a></p>
-06_09__inlines__autolinks__007:
- canonical: |
- <p><a href="http://../">http://../</a></p>
- static: |-
- <p data-sourcepos="1:1-1:12" dir="auto"><a href="http://../" rel="nofollow noreferrer noopener" target="_blank">http://../</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://../">http://../</a></p>
-06_09__inlines__autolinks__008:
- canonical: |
- <p><a href="localhost:5001/foo">localhost:5001/foo</a></p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><a href="localhost:5001/foo">localhost:5001/foo</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="localhost:5001/foo">localhost:5001/foo</a></p>
-06_09__inlines__autolinks__009:
- canonical: |
- <p>&lt;http://foo.bar/baz bim&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto">&lt;<a href="http://foo.bar/baz" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar/baz</a> bim&gt;</p>
- wysiwyg: |-
- <p>&lt;<a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar/baz">http://foo.bar/baz</a> bim&gt;</p>
-06_09__inlines__autolinks__010:
- canonical: |
- <p><a href="http://example.com/%5C%5B%5C">http://example.com/\[\</a></p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto"><a href="http://example.com/%5C%5B%5C" rel="nofollow noreferrer noopener" target="_blank">http://example.com/\[\</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com/%5C%5B%5C">http://example.com/\[\</a></p>
-06_09__inlines__autolinks__011:
- canonical: |
- <p><a href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><a href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
-06_09__inlines__autolinks__012:
- canonical: |
- <p><a href="mailto:foo+special@Bar.baz-bar0.com">foo+special@Bar.baz-bar0.com</a></p>
- static: |-
- <p data-sourcepos="1:1-1:30" dir="auto"><a href="mailto:foo+special@Bar.baz-bar0.com">foo+special@Bar.baz-bar0.com</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo+special@Bar.baz-bar0.com">foo+special@Bar.baz-bar0.com</a></p>
-06_09__inlines__autolinks__013:
- canonical: |
- <p>&lt;foo+@bar.example.com&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto">&lt;<a href="mailto:foo+@bar.example.com">foo+@bar.example.com</a>&gt;</p>
- wysiwyg: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'end')
-06_09__inlines__autolinks__014:
- canonical: |
- <p>&lt;&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:2" dir="auto">&lt;&gt;</p>
- wysiwyg: |-
- <p>&lt;&gt;</p>
-06_09__inlines__autolinks__015:
- canonical: |
- <p>&lt; http://foo.bar &gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:18" dir="auto">&lt; <a href="http://foo.bar" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar</a> &gt;</p>
- wysiwyg: |-
- <p>&lt; <a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar">http://foo.bar</a> &gt;</p>
-06_09__inlines__autolinks__016:
- canonical: |
- <p>&lt;m:abc&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">&lt;m:abc&gt;</p>
- wysiwyg: |-
- <p>&lt;m:abc&gt;</p>
-06_09__inlines__autolinks__017:
- canonical: |
- <p>&lt;foo.bar.baz&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">&lt;foo.bar.baz&gt;</p>
- wysiwyg: |-
- <p>&lt;foo.bar.baz&gt;</p>
-06_09__inlines__autolinks__018:
- canonical: |
- <p>http://example.com</p>
- static: |-
- <p data-sourcepos="1:1-1:18" dir="auto"><a href="http://example.com" rel="nofollow noreferrer noopener" target="_blank">http://example.com</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com">http://example.com</a></p>
-06_09__inlines__autolinks__019:
- canonical: |
- <p>foo@bar.example.com</p>
- static: |-
- <p data-sourcepos="1:1-1:19" dir="auto"><a href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
-06_10__inlines__autolinks_extension__001:
- canonical: |
- <p><a href="http://www.commonmark.org">www.commonmark.org</a></p>
- static: |-
- <p data-sourcepos="1:1-1:18" dir="auto"><a href="http://www.commonmark.org" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org">www.commonmark.org</a></p>
-06_10__inlines__autolinks_extension__002:
- canonical: |
- <p>Visit <a href="http://www.commonmark.org/help">www.commonmark.org/help</a> for more information.</p>
- static: |-
- <p data-sourcepos="1:1-1:51" dir="auto">Visit <a href="http://www.commonmark.org/help" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org/help</a> for more information.</p>
- wysiwyg: |-
- <p>Visit <a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org/help">www.commonmark.org/help</a> for more information.</p>
-06_10__inlines__autolinks_extension__003:
- canonical: |
- <p>Visit <a href="http://www.commonmark.org">www.commonmark.org</a>.</p>
- <p>Visit <a href="http://www.commonmark.org/a.b">www.commonmark.org/a.b</a>.</p>
- static: |-
- <p data-sourcepos="1:1-1:25" dir="auto">Visit <a href="http://www.commonmark.org" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org</a>.</p>
- <p data-sourcepos="3:1-3:29" dir="auto">Visit <a href="http://www.commonmark.org/a.b" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org/a.b</a>.</p>
- wysiwyg: |-
- <p>Visit <a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org">www.commonmark.org</a>.</p>
-06_10__inlines__autolinks_extension__004:
- canonical: |
- <p><a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a></p>
- <p><a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a>))</p>
- <p>(<a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a>)</p>
- <p>(<a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a></p>
- static: |-
- <p data-sourcepos="1:1-1:41" dir="auto"><a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a></p>
- <p data-sourcepos="3:1-3:43" dir="auto"><a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a>))</p>
- <p data-sourcepos="5:1-5:43" dir="auto">(<a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a>)</p>
- <p data-sourcepos="7:1-7:42" dir="auto">(<a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a></p>
-06_10__inlines__autolinks_extension__005:
- canonical: |
- <p><a href="http://www.google.com/search?q=(business))+ok">www.google.com/search?q=(business))+ok</a></p>
- static: |-
- <p data-sourcepos="1:1-1:38" dir="auto"><a href="http://www.google.com/search?q=(business))+ok" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=(business))+ok</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.google.com/search?q=(business))+ok">www.google.com/search?q=(business))+ok</a></p>
-06_10__inlines__autolinks_extension__006:
- canonical: |
- <p><a href="http://www.google.com/search?q=commonmark&amp;hl=en">www.google.com/search?q=commonmark&amp;hl=en</a></p>
- <p><a href="http://www.google.com/search?q=commonmark">www.google.com/search?q=commonmark</a>&amp;hl;</p>
- static: |-
- <p data-sourcepos="1:1-1:40" dir="auto"><a href="http://www.google.com/search?q=commonmark&amp;hl=en" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=commonmark&amp;hl=en</a></p>
- <p data-sourcepos="3:1-3:38" dir="auto"><a href="http://www.google.com/search?q=commonmark" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=commonmark</a>&amp;hl;</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.google.com/search?q=commonmark&amp;hl=en">www.google.com/search?q=commonmark&amp;hl=en</a></p>
-06_10__inlines__autolinks_extension__007:
- canonical: |
- <p><a href="http://www.commonmark.org/he">www.commonmark.org/he</a>&lt;lp</p>
- static: |-
- <p data-sourcepos="1:1-1:24" dir="auto"><a href="http://www.commonmark.org/he" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org/he</a>&lt;lp</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org/he">www.commonmark.org/he</a>&lt;lp</p>
-06_10__inlines__autolinks_extension__008:
- canonical: |
- <p><a href="http://commonmark.org">http://commonmark.org</a></p>
- <p>(Visit <a href="https://encrypted.google.com/search?q=Markup+(business)">https://encrypted.google.com/search?q=Markup+(business)</a>)</p>
- <p>Anonymous FTP is available at <a href="ftp://foo.bar.baz">ftp://foo.bar.baz</a>.</p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><a href="http://commonmark.org" rel="nofollow noreferrer noopener" target="_blank">http://commonmark.org</a></p>
- <p data-sourcepos="3:1-3:63" dir="auto">(Visit <a href="https://encrypted.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">https://encrypted.google.com/search?q=Markup+(business)</a>)</p>
- <p data-sourcepos="5:1-5:48" dir="auto">Anonymous FTP is available at <a href="ftp://foo.bar.baz/">ftp://foo.bar.baz</a>.</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://commonmark.org">http://commonmark.org</a></p>
-06_10__inlines__autolinks_extension__009:
- canonical: |
- <p><a href="mailto:foo@bar.baz">foo@bar.baz</a></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><a href="mailto:foo@bar.baz">foo@bar.baz</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo@bar.baz">foo@bar.baz</a></p>
-06_10__inlines__autolinks_extension__010:
- canonical: |
- <p>hello@mail+xyz.example isn't valid, but <a href="mailto:hello+xyz@mail.example">hello+xyz@mail.example</a> is.</p>
- static: |-
- <p data-sourcepos="1:1-1:66" dir="auto">hello@mail+xyz.example isn't valid, but <a href="mailto:hello+xyz@mail.example">hello+xyz@mail.example</a> is.</p>
- wysiwyg: |-
- <p>hello@mail+xyz.example isn't valid, but <a target="_blank" rel="noopener noreferrer nofollow" href="mailto:hello+xyz@mail.example">hello+xyz@mail.example</a> is.</p>
-06_10__inlines__autolinks_extension__011:
- canonical: |
- <p><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a></p>
- <p><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a>.</p>
- <p>a.b-c_d@a.b-</p>
- <p>a.b-c_d@a.b_</p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a></p>
- <p data-sourcepos="3:1-3:12" dir="auto"><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a>.</p>
- <p data-sourcepos="5:1-5:12" dir="auto">a.b-c_d@a.b-</p>
- <p data-sourcepos="7:1-7:12" dir="auto">a.b-c_d@a.b_</p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a></p>
-06_11__inlines__raw_html__001:
- canonical: |
- <p><a><bab><c2c></p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto"><a></a></p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "bab" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__002:
- canonical: |
- <p><a/><b2/></p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto"><a></a></p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__003:
- canonical: |
- <p><a /><b2
- data="foo" ></p>
- static: |-
- <p data-sourcepos="1:1-2:12" dir="auto"><a></a></p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__004:
- canonical: |
- <p><a foo="bar" bam = 'baz <em>"</em>'
- _boolean zoop:33=zoop:33 /></p>
- static: |-
- <p data-sourcepos="1:1-2:27" dir="auto"><a></a></p>
- wysiwyg: |-
- <p></p>
-06_11__inlines__raw_html__005:
- canonical: |
- <p>Foo <responsive-image src="foo.jpg" /></p>
- static: |-
- <p data-sourcepos="1:1-1:38" dir="auto">Foo </p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "responsive-image" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__006:
- canonical: |
- <p>&lt;33&gt; &lt;__&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">&lt;33&gt; &lt;__&gt;</p>
- wysiwyg: |-
- <p>&lt;33&gt; &lt;__&gt;</p>
-06_11__inlines__raw_html__007:
- canonical: |
- <p>&lt;a h*#ref=&quot;hi&quot;&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">&lt;a h*#ref="hi"&gt;</p>
- wysiwyg: |-
- <p>&lt;a h*#ref="hi"&gt;</p>
-06_11__inlines__raw_html__008:
- canonical: |
- <p>&lt;a href=&quot;hi'&gt; &lt;a href=hi'&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:26" dir="auto">&lt;a href="hi'&gt; &lt;a href=hi'&gt;</p>
- wysiwyg: |-
- <p>&lt;a href="hi'&gt; &lt;a href=hi'&gt;</p>
-06_11__inlines__raw_html__009:
- canonical: |
- <p>&lt; a&gt;&lt;
- foo&gt;&lt;bar/ &gt;
- &lt;foo bar=baz
- bim!bop /&gt;</p>
- static: |-
- <p data-sourcepos="1:1-4:10" dir="auto">&lt; a&gt;&lt;
- foo&gt;&lt;bar/ &gt;
- &lt;foo bar=baz
- bim!bop /&gt;</p>
- wysiwyg: |-
- <p>&lt; a&gt;&lt;
- foo&gt;&lt;bar/ &gt;
- &lt;foo bar=baz
- bim!bop /&gt;</p>
-06_11__inlines__raw_html__010:
- canonical: |
- <p>&lt;a href='bar'title=title&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:25" dir="auto">&lt;a href='bar'title=title&gt;</p>
- wysiwyg: |-
- <p>&lt;a href='bar'title=title&gt;</p>
-06_11__inlines__raw_html__011:
- canonical: |
- <p></a></foo ></p>
- static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"></p>
- wysiwyg: |-
- <p></p>
-06_11__inlines__raw_html__012:
- canonical: |
- <p>&lt;/a href=&quot;foo&quot;&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">&lt;/a href="foo"&gt;</p>
- wysiwyg: |-
- <p>&lt;/a href="foo"&gt;</p>
-06_11__inlines__raw_html__013:
- canonical: |
- <p>foo <!-- this is a
- comment - with hyphen --></p>
- static: |-
- <p data-sourcepos="1:1-2:25" dir="auto">foo </p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__014:
- canonical: |
- <p>foo &lt;!-- not a comment -- two hyphens --&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:41" dir="auto">foo &lt;!-- not a comment -- two hyphens --&gt;</p>
- wysiwyg: |-
- <p>foo &lt;!-- not a comment -- two hyphens --&gt;</p>
-06_11__inlines__raw_html__015:
- canonical: |
- <p>foo &lt;!--&gt; foo --&gt;</p>
- <p>foo &lt;!-- foo---&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">foo &lt;!--&gt; foo --&gt;</p>
- <p data-sourcepos="3:1-3:16" dir="auto">foo &lt;!-- foo---&gt;</p>
- wysiwyg: |-
- <p>foo &lt;!--&gt; foo --&gt;</p>
-06_11__inlines__raw_html__016:
- canonical: |
- <p>foo <?php echo $a; ?></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto">foo <?php echo $a; ?></p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__017:
- canonical: |
- <p>foo <!ELEMENT br EMPTY></p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto">foo &lt;!ELEMENT br EMPTY&gt;</p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__018:
- canonical: |
- <p>foo <![CDATA[>&<]]></p>
- static: |-
- <p data-sourcepos="1:1-1:19" dir="auto">foo &lt;![CDATA[&gt;&amp;&lt;]]&gt;</p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__019:
- canonical: |
- <p>foo <a href="&ouml;"></p>
- static: |-
- <p data-sourcepos="1:1-1:21" dir="auto">foo <a href="%C3%B6" rel="nofollow noreferrer noopener" target="_blank"></a></p>
- wysiwyg: |-
- <p>foo </p>
-06_11__inlines__raw_html__020:
- canonical: |
- <p>foo <a href="\*"></p>
- static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">foo <a href="%5C*" rel="nofollow noreferrer noopener" target="_blank"></a></p>
- wysiwyg: |-
- <p>foo </p>
-06_11__inlines__raw_html__021:
- canonical: |
- <p>&lt;a href=&quot;&quot;&quot;&gt;</p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">&lt;a href="""&gt;</p>
- wysiwyg: |-
- <p>&lt;a href="""&gt;</p>
-06_12__inlines__disallowed_raw_html_extension__001:
- canonical: |
- <p><strong> &lt;title> &lt;style> <em></p>
- <blockquote>
- &lt;xmp> is disallowed. &lt;XMP> is also disallowed.
- </blockquote>
- static: |-
- <p data-sourcepos="1:1-1:29" dir="auto"><strong> &lt;em&gt;&lt;/p&gt;
- &lt;blockquote&gt;
- &lt;xmp&gt; is disallowed. &lt;XMP&gt; is also disallowed.
- &lt;/blockquote&gt;</strong></p>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "title" not supported by this converter. Please, provide an specification.
-06_13__inlines__hard_line_breaks__001:
- canonical: |
- <p>foo<br />
- baz</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
- baz</p>
- wysiwyg: |-
- <p>foo<br>
- baz</p>
-06_13__inlines__hard_line_breaks__002:
- canonical: |
- <p>foo<br />
- baz</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
- baz</p>
- wysiwyg: |-
- <p>foo<br>
- baz</p>
-06_13__inlines__hard_line_breaks__003:
- canonical: |
- <p>foo<br />
- baz</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
- baz</p>
- wysiwyg: |-
- <p>foo<br>
- baz</p>
-06_13__inlines__hard_line_breaks__004:
- canonical: |
- <p>foo<br />
- bar</p>
- static: |-
- <p data-sourcepos="1:1-2:8" dir="auto">foo<br>
- bar</p>
- wysiwyg: |-
- <p>foo<br>
- bar</p>
-06_13__inlines__hard_line_breaks__005:
- canonical: |
- <p>foo<br />
- bar</p>
- static: |-
- <p data-sourcepos="1:1-2:8" dir="auto">foo<br>
- bar</p>
- wysiwyg: |-
- <p>foo<br>
- bar</p>
-06_13__inlines__hard_line_breaks__006:
- canonical: |
- <p><em>foo<br />
- bar</em></p>
- static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>
- bar</em></p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__007:
- canonical: |
- <p><em>foo<br />
- bar</em></p>
- static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>
- bar</em></p>
- wysiwyg: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__008:
- canonical: |
- <p><code>code span</code></p>
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto"><code>code span</code></p>
- wysiwyg: |-
- <p><code>code span</code></p>
-06_13__inlines__hard_line_breaks__009:
- canonical: |
- <p><code>code\ span</code></p>
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto"><code>code\ span</code></p>
- wysiwyg: |-
- <p><code>code\ span</code></p>
-06_13__inlines__hard_line_breaks__010:
- canonical: "<p><a href=\"foo \nbar\"></p>\n"
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto"><a href="foo%20%20%0Abar" rel="nofollow noreferrer noopener" target="_blank"></a></p>
- wysiwyg: |-
- <p></p>
-06_13__inlines__hard_line_breaks__011:
- canonical: |
- <p><a href="foo\
- bar"></p>
- static: |-
- <p data-sourcepos="1:1-2:5" dir="auto"><a href="foo%5C%0Abar" rel="nofollow noreferrer noopener" target="_blank"></a></p>
- wysiwyg: |-
- <p></p>
-06_13__inlines__hard_line_breaks__012:
- canonical: |
- <p>foo\</p>
- static: |-
- <p data-sourcepos="1:1-1:4" dir="auto">foo\</p>
- wysiwyg: |-
- <p>foo\</p>
-06_13__inlines__hard_line_breaks__013:
- canonical: |
- <p>foo</p>
- static: |-
- <p data-sourcepos="1:1-1:5" dir="auto">foo</p>
- wysiwyg: |-
- <p>foo</p>
-06_13__inlines__hard_line_breaks__014:
- canonical: |
- <h3>foo\</h3>
- static: |-
- <h3 data-sourcepos="1:1-1:8" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo\</h3>
- wysiwyg: |-
- <h3>foo\</h3>
-06_13__inlines__hard_line_breaks__015:
- canonical: |
- <h3>foo</h3>
- static: |-
- <h3 data-sourcepos="1:1-1:7" dir="auto">
- <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
- wysiwyg: |-
- <h3>foo</h3>
-06_14__inlines__soft_line_breaks__001:
- canonical: |
- <p>foo
- baz</p>
- static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo
- baz</p>
- wysiwyg: |-
- <p>foo
- baz</p>
-06_14__inlines__soft_line_breaks__002:
- canonical: |
- <p>foo
- baz</p>
- static: |-
- <p data-sourcepos="1:1-2:4" dir="auto">foo
- baz</p>
- wysiwyg: |-
- <p>foo
- baz</p>
-06_15__inlines__textual_content__001:
- canonical: |
- <p>hello $.;'there</p>
- static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">hello $.;'there</p>
- wysiwyg: |-
- <p>hello $.;'there</p>
-06_15__inlines__textual_content__002:
- canonical: |
- <p>Foo χρῆν</p>
- static: |-
- <p data-sourcepos="1:1-1:13" dir="auto">Foo χρῆν</p>
- wysiwyg: |-
- <p>Foo χρῆν</p>
-06_15__inlines__textual_content__003:
- canonical: |
- <p>Multiple spaces</p>
- static: |-
- <p data-sourcepos="1:1-1:19" dir="auto">Multiple spaces</p>
- wysiwyg: |-
- <p>Multiple spaces</p>
-07_01__gitlab_specific_markdown__footnotes__001:
- canonical: ""
- static: |-
- <p data-sourcepos="1:1-1:27" dir="auto">footnote reference tag <sup class="footnote-ref"><a href="#fn-1-2118" id="fnref-1-2118" data-footnote-ref>1</a></sup></p>
- <section data-footnotes class="footnotes">
- <ol>
- <li id="fn-1-2118">
- <p data-sourcepos="3:7-3:19">footnote text <a href="#fnref-1-2118" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
- </li>
- </ol>
- </section>
- wysiwyg: |-
- Error - check implementation:
- Hast node of type "sup" not supported by this converter. Please, provide an specification.
diff --git a/spec/fixtures/glfm/example_snapshots/markdown.yml b/spec/fixtures/glfm/example_snapshots/markdown.yml
deleted file mode 100644
index d1fd16b10ce..00000000000
--- a/spec/fixtures/glfm/example_snapshots/markdown.yml
+++ /dev/null
@@ -1,2201 +0,0 @@
----
-02_01__preliminaries__tabs__001: "\tfoo\tbaz\t\tbim\n"
-02_01__preliminaries__tabs__002: " \tfoo\tbaz\t\tbim\n"
-02_01__preliminaries__tabs__003: " a\ta\n ὐ\ta\n"
-02_01__preliminaries__tabs__004: " - foo\n\n\tbar\n"
-02_01__preliminaries__tabs__005: "- foo\n\n\t\tbar\n"
-02_01__preliminaries__tabs__006: ">\t\tfoo\n"
-02_01__preliminaries__tabs__007: "-\t\tfoo\n"
-02_01__preliminaries__tabs__008: " foo\n\tbar\n"
-02_01__preliminaries__tabs__009: " - foo\n - bar\n\t - baz\n"
-02_01__preliminaries__tabs__010: "#\tFoo\n"
-02_01__preliminaries__tabs__011: "*\t*\t*\t\n"
-03_01__blocks_and_inlines__precedence__001: |
- - `one
- - two`
-04_01__leaf_blocks__thematic_breaks__001: |
- ***
- ---
- ___
-04_01__leaf_blocks__thematic_breaks__002: |
- +++
-04_01__leaf_blocks__thematic_breaks__003: |
- ===
-04_01__leaf_blocks__thematic_breaks__004: |
- --
- **
- __
-04_01__leaf_blocks__thematic_breaks__005: |2
- ***
- ***
- ***
-04_01__leaf_blocks__thematic_breaks__006: |2
- ***
-04_01__leaf_blocks__thematic_breaks__007: |
- Foo
- ***
-04_01__leaf_blocks__thematic_breaks__008: |
- _____________________________________
-04_01__leaf_blocks__thematic_breaks__009: |2
- - - -
-04_01__leaf_blocks__thematic_breaks__010: |2
- ** * ** * ** * **
-04_01__leaf_blocks__thematic_breaks__011: |
- - - - -
-04_01__leaf_blocks__thematic_breaks__012: "- - - - \n"
-04_01__leaf_blocks__thematic_breaks__013: |
- _ _ _ _ a
-
- a------
-
- ---a---
-04_01__leaf_blocks__thematic_breaks__014: |2
- *-*
-04_01__leaf_blocks__thematic_breaks__015: |
- - foo
- ***
- - bar
-04_01__leaf_blocks__thematic_breaks__016: |
- Foo
- ***
- bar
-04_01__leaf_blocks__thematic_breaks__017: |
- Foo
- ---
- bar
-04_01__leaf_blocks__thematic_breaks__018: |
- * Foo
- * * *
- * Bar
-04_01__leaf_blocks__thematic_breaks__019: |
- - Foo
- - * * *
-04_02__leaf_blocks__atx_headings__001: |
- # foo
- ## foo
- ### foo
- #### foo
- ##### foo
- ###### foo
-04_02__leaf_blocks__atx_headings__002: |
- ####### foo
-04_02__leaf_blocks__atx_headings__003: |
- #5 bolt
-
- #hashtag
-04_02__leaf_blocks__atx_headings__004: |
- \## foo
-04_02__leaf_blocks__atx_headings__005: |
- # foo *bar* \*baz\*
-04_02__leaf_blocks__atx_headings__006: "# foo \n"
-04_02__leaf_blocks__atx_headings__007: |2
- ### foo
- ## foo
- # foo
-04_02__leaf_blocks__atx_headings__008: |2
- # foo
-04_02__leaf_blocks__atx_headings__009: |
- foo
- # bar
-04_02__leaf_blocks__atx_headings__010: |
- ## foo ##
- ### bar ###
-04_02__leaf_blocks__atx_headings__011: |
- # foo ##################################
- ##### foo ##
-04_02__leaf_blocks__atx_headings__012: "### foo ### \n"
-04_02__leaf_blocks__atx_headings__013: |
- ### foo ### b
-04_02__leaf_blocks__atx_headings__014: |
- # foo#
-04_02__leaf_blocks__atx_headings__015: |
- ### foo \###
- ## foo #\##
- # foo \#
-04_02__leaf_blocks__atx_headings__016: |
- ****
- ## foo
- ****
-04_02__leaf_blocks__atx_headings__017: |
- Foo bar
- # baz
- Bar foo
-04_02__leaf_blocks__atx_headings__018: "## \n#\n### ###\n"
-04_03__leaf_blocks__setext_headings__001: |
- Foo *bar*
- =========
-
- Foo *bar*
- ---------
-04_03__leaf_blocks__setext_headings__002: |
- Foo *bar
- baz*
- ====
-04_03__leaf_blocks__setext_headings__003: " Foo *bar\nbaz*\t\n====\n"
-04_03__leaf_blocks__setext_headings__004: |
- Foo
- -------------------------
-
- Foo
- =
-04_03__leaf_blocks__setext_headings__005: |2
- Foo
- ---
-
- Foo
- -----
-
- Foo
- ===
-04_03__leaf_blocks__setext_headings__006: |2
- Foo
- ---
-
- Foo
- ---
-04_03__leaf_blocks__setext_headings__007: "Foo\n ---- \n"
-04_03__leaf_blocks__setext_headings__008: |
- Foo
- ---
-04_03__leaf_blocks__setext_headings__009: |
- Foo
- = =
-
- Foo
- --- -
-04_03__leaf_blocks__setext_headings__010: "Foo \n-----\n"
-04_03__leaf_blocks__setext_headings__011: |
- Foo\
- ----
-04_03__leaf_blocks__setext_headings__012: |
- `Foo
- ----
- `
-
- <a title="a lot
- ---
- of dashes"/>
-04_03__leaf_blocks__setext_headings__013: |
- > Foo
- ---
-04_03__leaf_blocks__setext_headings__014: |
- > foo
- bar
- ===
-04_03__leaf_blocks__setext_headings__015: |
- - Foo
- ---
-04_03__leaf_blocks__setext_headings__016: |
- Foo
- Bar
- ---
-04_03__leaf_blocks__setext_headings__017: |
- ---
- Foo
- ---
- Bar
- ---
- Baz
-04_03__leaf_blocks__setext_headings__018: |2
-
- ====
-04_03__leaf_blocks__setext_headings__019: |
- ---
- ---
-04_03__leaf_blocks__setext_headings__020: |
- - foo
- -----
-04_03__leaf_blocks__setext_headings__021: |2
- foo
- ---
-04_03__leaf_blocks__setext_headings__022: |
- > foo
- -----
-04_03__leaf_blocks__setext_headings__023: |
- \> foo
- ------
-04_03__leaf_blocks__setext_headings__024: |
- Foo
-
- bar
- ---
- baz
-04_03__leaf_blocks__setext_headings__025: |
- Foo
- bar
-
- ---
-
- baz
-04_03__leaf_blocks__setext_headings__026: |
- Foo
- bar
- * * *
- baz
-04_03__leaf_blocks__setext_headings__027: |
- Foo
- bar
- \---
- baz
-04_04__leaf_blocks__indented_code_blocks__001: |2
- a simple
- indented code block
-04_04__leaf_blocks__indented_code_blocks__002: |2
- - foo
-
- bar
-04_04__leaf_blocks__indented_code_blocks__003: |
- 1. foo
-
- - bar
-04_04__leaf_blocks__indented_code_blocks__004: |2
- <a/>
- *hi*
-
- - one
-04_04__leaf_blocks__indented_code_blocks__005: " chunk1\n\n chunk2\n \n \n
- \n chunk3\n"
-04_04__leaf_blocks__indented_code_blocks__006: " chunk1\n \n chunk2\n"
-04_04__leaf_blocks__indented_code_blocks__007: |+
- Foo
- bar
-
-04_04__leaf_blocks__indented_code_blocks__008: |2
- foo
- bar
-04_04__leaf_blocks__indented_code_blocks__009: |
- # Heading
- foo
- Heading
- ------
- foo
- ----
-04_04__leaf_blocks__indented_code_blocks__010: |2
- foo
- bar
-04_04__leaf_blocks__indented_code_blocks__011: "\n \n foo\n \n\n"
-04_04__leaf_blocks__indented_code_blocks__012: " foo \n"
-04_05__leaf_blocks__fenced_code_blocks__001: |
- ```
- <
- >
- ```
-04_05__leaf_blocks__fenced_code_blocks__002: |
- ~~~
- <
- >
- ~~~
-04_05__leaf_blocks__fenced_code_blocks__003: |
- ``
- foo
- ``
-04_05__leaf_blocks__fenced_code_blocks__004: |
- ```
- aaa
- ~~~
- ```
-04_05__leaf_blocks__fenced_code_blocks__005: |
- ~~~
- aaa
- ```
- ~~~
-04_05__leaf_blocks__fenced_code_blocks__006: |
- ````
- aaa
- ```
- ``````
-04_05__leaf_blocks__fenced_code_blocks__007: |
- ~~~~
- aaa
- ~~~
- ~~~~
-04_05__leaf_blocks__fenced_code_blocks__008: |
- ```
-04_05__leaf_blocks__fenced_code_blocks__009: |
- `````
-
- ```
- aaa
-04_05__leaf_blocks__fenced_code_blocks__010: |
- > ```
- > aaa
-
- bbb
-04_05__leaf_blocks__fenced_code_blocks__011: "```\n\n \n```\n"
-04_05__leaf_blocks__fenced_code_blocks__012: |
- ```
- ```
-04_05__leaf_blocks__fenced_code_blocks__013: |2
- ```
- aaa
- aaa
- ```
-04_05__leaf_blocks__fenced_code_blocks__014: |2
- ```
- aaa
- aaa
- aaa
- ```
-04_05__leaf_blocks__fenced_code_blocks__015: |2
- ```
- aaa
- aaa
- aaa
- ```
-04_05__leaf_blocks__fenced_code_blocks__016: |2
- ```
- aaa
- ```
-04_05__leaf_blocks__fenced_code_blocks__017: |
- ```
- aaa
- ```
-04_05__leaf_blocks__fenced_code_blocks__018: |2
- ```
- aaa
- ```
-04_05__leaf_blocks__fenced_code_blocks__019: |
- ```
- aaa
- ```
-04_05__leaf_blocks__fenced_code_blocks__020: |
- ``` ```
- aaa
-04_05__leaf_blocks__fenced_code_blocks__021: |
- ~~~~~~
- aaa
- ~~~ ~~
-04_05__leaf_blocks__fenced_code_blocks__022: |
- foo
- ```
- bar
- ```
- baz
-04_05__leaf_blocks__fenced_code_blocks__023: |
- foo
- ---
- ~~~
- bar
- ~~~
- # baz
-04_05__leaf_blocks__fenced_code_blocks__024: |
- ```ruby
- def foo(x)
- return 3
- end
- ```
-04_05__leaf_blocks__fenced_code_blocks__025: |
- ~~~~ ruby startline=3 $%@#$
- def foo(x)
- return 3
- end
- ~~~~~~~
-04_05__leaf_blocks__fenced_code_blocks__026: |
- ````;
- ````
-04_05__leaf_blocks__fenced_code_blocks__027: |
- ``` aa ```
- foo
-04_05__leaf_blocks__fenced_code_blocks__028: |
- ~~~ aa ``` ~~~
- foo
- ~~~
-04_05__leaf_blocks__fenced_code_blocks__029: |
- ```
- ``` aaa
- ```
-04_06__leaf_blocks__html_blocks__001: |
- <table><tr><td>
- <pre>
- **Hello**,
-
- _world_.
- </pre>
- </td></tr></table>
-04_06__leaf_blocks__html_blocks__002: |
- <table>
- <tr>
- <td>
- hi
- </td>
- </tr>
- </table>
-
- okay.
-04_06__leaf_blocks__html_blocks__003: |2
- <div>
- *hello*
- <foo><a>
-04_06__leaf_blocks__html_blocks__004: |
- </div>
- *foo*
-04_06__leaf_blocks__html_blocks__005: |
- <DIV CLASS="foo">
-
- *Markdown*
-
- </DIV>
-04_06__leaf_blocks__html_blocks__006: |
- <div id="foo"
- class="bar">
- </div>
-04_06__leaf_blocks__html_blocks__007: |
- <div id="foo" class="bar
- baz">
- </div>
-04_06__leaf_blocks__html_blocks__008: |
- <div>
- *foo*
-
- *bar*
-04_06__leaf_blocks__html_blocks__009: |
- <div id="foo"
- *hi*
-04_06__leaf_blocks__html_blocks__010: |
- <div class
- foo
-04_06__leaf_blocks__html_blocks__011: |
- <div *???-&&&-<---
- *foo*
-04_06__leaf_blocks__html_blocks__012: |
- <div><a href="bar">*foo*</a></div>
-04_06__leaf_blocks__html_blocks__013: |
- <table><tr><td>
- foo
- </td></tr></table>
-04_06__leaf_blocks__html_blocks__014: |
- <div></div>
- ``` c
- int x = 33;
- ```
-04_06__leaf_blocks__html_blocks__015: |
- <a href="foo">
- *bar*
- </a>
-04_06__leaf_blocks__html_blocks__016: |
- <Warning>
- *bar*
- </Warning>
-04_06__leaf_blocks__html_blocks__017: |
- <i class="foo">
- *bar*
- </i>
-04_06__leaf_blocks__html_blocks__018: |
- </ins>
- *bar*
-04_06__leaf_blocks__html_blocks__019: |
- <del>
- *foo*
- </del>
-04_06__leaf_blocks__html_blocks__020: |
- <del>
-
- *foo*
-
- </del>
-04_06__leaf_blocks__html_blocks__021: |
- <del>*foo*</del>
-04_06__leaf_blocks__html_blocks__022: |
- <pre language="haskell"><code>
- import Text.HTML.TagSoup
-
- main :: IO ()
- main = print $ parseTags tags
- </code></pre>
- okay
-04_06__leaf_blocks__html_blocks__023: |
- <script type="text/javascript">
- // JavaScript example
-
- document.getElementById("demo").innerHTML = "Hello JavaScript!";
- </script>
- okay
-04_06__leaf_blocks__html_blocks__024: |
- <style
- type="text/css">
- h1 {color:red;}
-
- p {color:blue;}
- </style>
- okay
-04_06__leaf_blocks__html_blocks__025: |
- <style
- type="text/css">
-
- foo
-04_06__leaf_blocks__html_blocks__026: |
- > <div>
- > foo
-
- bar
-04_06__leaf_blocks__html_blocks__027: |
- - <div>
- - foo
-04_06__leaf_blocks__html_blocks__028: |
- <style>p{color:red;}</style>
- *foo*
-04_06__leaf_blocks__html_blocks__029: |
- <!-- foo -->*bar*
- *baz*
-04_06__leaf_blocks__html_blocks__030: |
- <script>
- foo
- </script>1. *bar*
-04_06__leaf_blocks__html_blocks__031: |
- <!-- Foo
-
- bar
- baz -->
- okay
-04_06__leaf_blocks__html_blocks__032: |
- <?php
-
- echo '>';
-
- ?>
- okay
-04_06__leaf_blocks__html_blocks__033: |
- <!DOCTYPE html>
-04_06__leaf_blocks__html_blocks__034: |
- <![CDATA[
- function matchwo(a,b)
- {
- if (a < b && a < 0) then {
- return 1;
-
- } else {
-
- return 0;
- }
- }
- ]]>
- okay
-04_06__leaf_blocks__html_blocks__035: |2
- <!-- foo -->
-
- <!-- foo -->
-04_06__leaf_blocks__html_blocks__036: |2
- <div>
-
- <div>
-04_06__leaf_blocks__html_blocks__037: |
- Foo
- <div>
- bar
- </div>
-04_06__leaf_blocks__html_blocks__038: |
- <div>
- bar
- </div>
- *foo*
-04_06__leaf_blocks__html_blocks__039: |
- Foo
- <a href="bar">
- baz
-04_06__leaf_blocks__html_blocks__040: |
- <div>
-
- *Emphasized* text.
-
- </div>
-04_06__leaf_blocks__html_blocks__041: |
- <div>
- *Emphasized* text.
- </div>
-04_06__leaf_blocks__html_blocks__042: |
- <table>
-
- <tr>
-
- <td>
- Hi
- </td>
-
- </tr>
-
- </table>
-04_06__leaf_blocks__html_blocks__043: |
- <table>
-
- <tr>
-
- <td>
- Hi
- </td>
-
- </tr>
-
- </table>
-04_07__leaf_blocks__link_reference_definitions__001: |
- [foo]: /url "title"
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__002: " [foo]: \n /url \n 'the
- title' \n\n[foo]\n"
-04_07__leaf_blocks__link_reference_definitions__003: |
- [Foo*bar\]]:my_(url) 'title (with parens)'
-
- [Foo*bar\]]
-04_07__leaf_blocks__link_reference_definitions__004: |
- [Foo bar]:
- <my url>
- 'title'
-
- [Foo bar]
-04_07__leaf_blocks__link_reference_definitions__005: |
- [foo]: /url '
- title
- line1
- line2
- '
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__006: |
- [foo]: /url 'title
-
- with blank line'
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__007: |
- [foo]:
- /url
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__008: |
- [foo]:
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__009: |
- [foo]: <>
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__010: |
- [foo]: <bar>(baz)
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__011: |
- [foo]: /url\bar\*baz "foo\"bar\baz"
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__012: |
- [foo]
-
- [foo]: url
-04_07__leaf_blocks__link_reference_definitions__013: |
- [foo]
-
- [foo]: first
- [foo]: second
-04_07__leaf_blocks__link_reference_definitions__014: |
- [FOO]: /url
-
- [Foo]
-04_07__leaf_blocks__link_reference_definitions__015: |
- [ΑΓΩ]: /φου
-
- [αγω]
-04_07__leaf_blocks__link_reference_definitions__016: |
- [foo]: /url
-04_07__leaf_blocks__link_reference_definitions__017: |
- [
- foo
- ]: /url
- bar
-04_07__leaf_blocks__link_reference_definitions__018: |
- [foo]: /url "title" ok
-04_07__leaf_blocks__link_reference_definitions__019: |
- [foo]: /url
- "title" ok
-04_07__leaf_blocks__link_reference_definitions__020: |2
- [foo]: /url "title"
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__021: |
- ```
- [foo]: /url
- ```
-
- [foo]
-04_07__leaf_blocks__link_reference_definitions__022: |
- Foo
- [bar]: /baz
-
- [bar]
-04_07__leaf_blocks__link_reference_definitions__023: |
- # [Foo]
- [foo]: /url
- > bar
-04_07__leaf_blocks__link_reference_definitions__024: |
- [foo]: /url
- bar
- ===
- [foo]
-04_07__leaf_blocks__link_reference_definitions__025: |
- [foo]: /url
- ===
- [foo]
-04_07__leaf_blocks__link_reference_definitions__026: |
- [foo]: /foo-url "foo"
- [bar]: /bar-url
- "bar"
- [baz]: /baz-url
-
- [foo],
- [bar],
- [baz]
-04_07__leaf_blocks__link_reference_definitions__027: |
- [foo]
-
- > [foo]: /url
-04_07__leaf_blocks__link_reference_definitions__028: |
- [foo]: /url
-04_08__leaf_blocks__paragraphs__001: |
- aaa
-
- bbb
-04_08__leaf_blocks__paragraphs__002: |
- aaa
- bbb
-
- ccc
- ddd
-04_08__leaf_blocks__paragraphs__003: |
- aaa
-
-
- bbb
-04_08__leaf_blocks__paragraphs__004: |2
- aaa
- bbb
-04_08__leaf_blocks__paragraphs__005: |
- aaa
- bbb
- ccc
-04_08__leaf_blocks__paragraphs__006: |2
- aaa
- bbb
-04_08__leaf_blocks__paragraphs__007: |2
- aaa
- bbb
-04_08__leaf_blocks__paragraphs__008: "aaa \nbbb \n"
-04_09__leaf_blocks__blank_lines__001: " \n\naaa\n \n\n# aaa\n\n \n"
-04_10__leaf_blocks__tables_extension__001: |
- | foo | bar |
- | --- | --- |
- | baz | bim |
-04_10__leaf_blocks__tables_extension__002: |
- | abc | defghi |
- :-: | -----------:
- bar | baz
-04_10__leaf_blocks__tables_extension__003: |
- | f\|oo |
- | ------ |
- | b `\|` az |
- | b **\|** im |
-04_10__leaf_blocks__tables_extension__004: |
- | abc | def |
- | --- | --- |
- | bar | baz |
- > bar
-04_10__leaf_blocks__tables_extension__005: |
- | abc | def |
- | --- | --- |
- | bar | baz |
- bar
-
- bar
-04_10__leaf_blocks__tables_extension__006: |
- | abc | def |
- | --- |
- | bar |
-04_10__leaf_blocks__tables_extension__007: |
- | abc | def |
- | --- | --- |
- | bar |
- | bar | baz | boo |
-04_10__leaf_blocks__tables_extension__008: |
- | abc | def |
- | --- | --- |
-05_01__container_blocks__block_quotes__001: |
- > # Foo
- > bar
- > baz
-05_01__container_blocks__block_quotes__002: |
- ># Foo
- >bar
- > baz
-05_01__container_blocks__block_quotes__003: |2
- > # Foo
- > bar
- > baz
-05_01__container_blocks__block_quotes__004: |2
- > # Foo
- > bar
- > baz
-05_01__container_blocks__block_quotes__005: |
- > # Foo
- > bar
- baz
-05_01__container_blocks__block_quotes__006: |
- > bar
- baz
- > foo
-05_01__container_blocks__block_quotes__007: |
- > foo
- ---
-05_01__container_blocks__block_quotes__008: |
- > - foo
- - bar
-05_01__container_blocks__block_quotes__009: |
- > foo
- bar
-05_01__container_blocks__block_quotes__010: |
- > ```
- foo
- ```
-05_01__container_blocks__block_quotes__011: |
- > foo
- - bar
-05_01__container_blocks__block_quotes__012: |
- >
-05_01__container_blocks__block_quotes__013: ">\n> \n> \n"
-05_01__container_blocks__block_quotes__014: ">\n> foo\n> \n"
-05_01__container_blocks__block_quotes__015: |
- > foo
-
- > bar
-05_01__container_blocks__block_quotes__016: |
- > foo
- > bar
-05_01__container_blocks__block_quotes__017: |
- > foo
- >
- > bar
-05_01__container_blocks__block_quotes__018: |
- foo
- > bar
-05_01__container_blocks__block_quotes__019: |
- > aaa
- ***
- > bbb
-05_01__container_blocks__block_quotes__020: |
- > bar
- baz
-05_01__container_blocks__block_quotes__021: |
- > bar
-
- baz
-05_01__container_blocks__block_quotes__022: |
- > bar
- >
- baz
-05_01__container_blocks__block_quotes__023: |
- > > > foo
- bar
-05_01__container_blocks__block_quotes__024: |
- >>> foo
- > bar
- >>baz
-05_01__container_blocks__block_quotes__025: |
- > code
-
- > not code
-05_02__container_blocks__list_items__001: |
- A paragraph
- with two lines.
-
- indented code
-
- > A block quote.
-05_02__container_blocks__list_items__002: |
- 1. A paragraph
- with two lines.
-
- indented code
-
- > A block quote.
-05_02__container_blocks__list_items__003: |
- - one
-
- two
-05_02__container_blocks__list_items__004: |
- - one
-
- two
-05_02__container_blocks__list_items__005: |2
- - one
-
- two
-05_02__container_blocks__list_items__006: |2
- - one
-
- two
-05_02__container_blocks__list_items__007: |2
- > > 1. one
- >>
- >> two
-05_02__container_blocks__list_items__008: |
- >>- one
- >>
- > > two
-05_02__container_blocks__list_items__009: |
- -one
-
- 2.two
-05_02__container_blocks__list_items__010: |
- - foo
-
-
- bar
-05_02__container_blocks__list_items__011: |
- 1. foo
-
- ```
- bar
- ```
-
- baz
-
- > bam
-05_02__container_blocks__list_items__012: |
- - Foo
-
- bar
-
-
- baz
-05_02__container_blocks__list_items__013: |
- 123456789. ok
-05_02__container_blocks__list_items__014: |
- 1234567890. not ok
-05_02__container_blocks__list_items__015: |
- 0. ok
-05_02__container_blocks__list_items__016: |
- 003. ok
-05_02__container_blocks__list_items__017: |
- -1. not ok
-05_02__container_blocks__list_items__018: |
- - foo
-
- bar
-05_02__container_blocks__list_items__019: |2
- 10. foo
-
- bar
-05_02__container_blocks__list_items__020: |2
- indented code
-
- paragraph
-
- more code
-05_02__container_blocks__list_items__021: |
- 1. indented code
-
- paragraph
-
- more code
-05_02__container_blocks__list_items__022: |
- 1. indented code
-
- paragraph
-
- more code
-05_02__container_blocks__list_items__023: |2
- foo
-
- bar
-05_02__container_blocks__list_items__024: |
- - foo
-
- bar
-05_02__container_blocks__list_items__025: |
- - foo
-
- bar
-05_02__container_blocks__list_items__026: |
- -
- foo
- -
- ```
- bar
- ```
- -
- baz
-05_02__container_blocks__list_items__027: "- \n foo\n"
-05_02__container_blocks__list_items__028: |
- -
-
- foo
-05_02__container_blocks__list_items__029: |
- - foo
- -
- - bar
-05_02__container_blocks__list_items__030: "- foo\n- \n- bar\n"
-05_02__container_blocks__list_items__031: |
- 1. foo
- 2.
- 3. bar
-05_02__container_blocks__list_items__032: |
- *
-05_02__container_blocks__list_items__033: |
- foo
- *
-
- foo
- 1.
-05_02__container_blocks__list_items__034: |2
- 1. A paragraph
- with two lines.
-
- indented code
-
- > A block quote.
-05_02__container_blocks__list_items__035: |2
- 1. A paragraph
- with two lines.
-
- indented code
-
- > A block quote.
-05_02__container_blocks__list_items__036: |2
- 1. A paragraph
- with two lines.
-
- indented code
-
- > A block quote.
-05_02__container_blocks__list_items__037: |2
- 1. A paragraph
- with two lines.
-
- indented code
-
- > A block quote.
-05_02__container_blocks__list_items__038: |2
- 1. A paragraph
- with two lines.
-
- indented code
-
- > A block quote.
-05_02__container_blocks__list_items__039: |2
- 1. A paragraph
- with two lines.
-05_02__container_blocks__list_items__040: |
- > 1. > Blockquote
- continued here.
-05_02__container_blocks__list_items__041: |
- > 1. > Blockquote
- > continued here.
-05_02__container_blocks__list_items__042: |
- - foo
- - bar
- - baz
- - boo
-05_02__container_blocks__list_items__043: |
- - foo
- - bar
- - baz
- - boo
-05_02__container_blocks__list_items__044: |
- 10) foo
- - bar
-05_02__container_blocks__list_items__045: |
- 10) foo
- - bar
-05_02__container_blocks__list_items__046: |
- - - foo
-05_02__container_blocks__list_items__047: |
- 1. - 2. foo
-05_02__container_blocks__list_items__048: |
- - # Foo
- - Bar
- ---
- baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049: |
- - [ ] foo
- - [x] bar
- - [x] foo
- - [ ] bar
- - [x] baz
- - [ ] bim
- - foo
- - bar
- + baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050: |
- 1. foo
- 2. bar
- 3) baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051: |
- Foo
- - bar
- - baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052: |
- The number of windows in my house is
- 14. The number of doors is 6.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053: |
- The number of windows in my house is
- 1. The number of doors is 6.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054: |
- - foo
-
- - bar
-
-
- - baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055: |
- - foo
- - bar
- - baz
-
-
- bim
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056: |
- - foo
- - bar
-
- <!-- -->
-
- - baz
- - bim
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057: |
- - foo
-
- notcode
-
- - foo
-
- <!-- -->
-
- code
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058: |
- - a
- - b
- - c
- - d
- - e
- - f
- - g
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059: |
- 1. a
-
- 2. b
-
- 3. c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060: |
- - a
- - b
- - c
- - d
- - e
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061: |
- 1. a
-
- 2. b
-
- 3. c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062: |
- - a
- - b
-
- - c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063: |
- * a
- *
-
- * c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064: |
- - a
- - b
-
- c
- - d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065: |
- - a
- - b
-
- [ref]: /url
- - d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066: |
- - a
- - ```
- b
-
-
- ```
- - c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067: |
- - a
- - b
-
- c
- - d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068: |
- * a
- > b
- >
- * c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069: |
- - a
- > b
- ```
- c
- ```
- - d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070: |
- - a
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071: |
- - a
- - b
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072: |
- 1. ```
- foo
- ```
-
- bar
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073: |
- * foo
- * bar
-
- baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074: |
- - a
- - b
- - c
-
- - d
- - e
- - f
-06_01__inlines__001: |
- `hi`lo`
-06_02__inlines__backslash_escapes__001: |
- \!\"\#\$\%\&\'\(\)\*\+\,\-\.\/\:\;\<\=\>\?\@\[\\\]\^\_\`\{\|\}\~
-06_02__inlines__backslash_escapes__002: "\\\t\\A\\a\\ \\3\\φ\\«\n"
-06_02__inlines__backslash_escapes__003: |
- \*not emphasized*
- \<br/> not a tag
- \[not a link](/foo)
- \`not code`
- 1\. not a list
- \* not a list
- \# not a heading
- \[foo]: /url "not a reference"
- \&ouml; not a character entity
-06_02__inlines__backslash_escapes__004: |
- \\*emphasis*
-06_02__inlines__backslash_escapes__005: |
- foo\
- bar
-06_02__inlines__backslash_escapes__006: |
- `` \[\` ``
-06_02__inlines__backslash_escapes__007: |2
- \[\]
-06_02__inlines__backslash_escapes__008: |
- ~~~
- \[\]
- ~~~
-06_02__inlines__backslash_escapes__009: |
- <http://example.com?find=\*>
-06_02__inlines__backslash_escapes__010: |
- <a href="/bar\/)">
-06_02__inlines__backslash_escapes__011: |
- [foo](/bar\* "ti\*tle")
-06_02__inlines__backslash_escapes__012: |
- [foo]
-
- [foo]: /bar\* "ti\*tle"
-06_02__inlines__backslash_escapes__013: |
- ``` foo\+bar
- foo
- ```
-06_03__inlines__entity_and_numeric_character_references__001: |
- &nbsp; &amp; &copy; &AElig; &Dcaron;
- &frac34; &HilbertSpace; &DifferentialD;
- &ClockwiseContourIntegral; &ngE;
-06_03__inlines__entity_and_numeric_character_references__002: |
- &#35; &#1234; &#992; &#0;
-06_03__inlines__entity_and_numeric_character_references__003: |
- &#X22; &#XD06; &#xcab;
-06_03__inlines__entity_and_numeric_character_references__004: |
- &nbsp &x; &#; &#x;
- &#987654321;
- &#abcdef0;
- &ThisIsNotDefined; &hi?;
-06_03__inlines__entity_and_numeric_character_references__005: |
- &copy
-06_03__inlines__entity_and_numeric_character_references__006: |
- &MadeUpEntity;
-06_03__inlines__entity_and_numeric_character_references__007: |
- <a href="&ouml;&ouml;.html">
-06_03__inlines__entity_and_numeric_character_references__008: |
- [foo](/f&ouml;&ouml; "f&ouml;&ouml;")
-06_03__inlines__entity_and_numeric_character_references__009: |
- [foo]
-
- [foo]: /f&ouml;&ouml; "f&ouml;&ouml;"
-06_03__inlines__entity_and_numeric_character_references__010: |
- ``` f&ouml;&ouml;
- foo
- ```
-06_03__inlines__entity_and_numeric_character_references__011: |
- `f&ouml;&ouml;`
-06_03__inlines__entity_and_numeric_character_references__012: |2
- f&ouml;f&ouml;
-06_03__inlines__entity_and_numeric_character_references__013: |
- &#42;foo&#42;
- *foo*
-06_03__inlines__entity_and_numeric_character_references__014: |
- &#42; foo
-
- * foo
-06_03__inlines__entity_and_numeric_character_references__015: |
- foo&#10;&#10;bar
-06_03__inlines__entity_and_numeric_character_references__016: |
- &#9;foo
-06_03__inlines__entity_and_numeric_character_references__017: |
- [a](url &quot;tit&quot;)
-06_04__inlines__code_spans__001: |
- `foo`
-06_04__inlines__code_spans__002: |
- `` foo ` bar ``
-06_04__inlines__code_spans__003: |
- ` `` `
-06_04__inlines__code_spans__004: |
- ` `` `
-06_04__inlines__code_spans__005: |
- ` a`
-06_04__inlines__code_spans__006: |
- ` b `
-06_04__inlines__code_spans__007: |
- ` `
- ` `
-06_04__inlines__code_spans__008: "``\nfoo\nbar \nbaz\n``\n"
-06_04__inlines__code_spans__009: "``\nfoo \n``\n"
-06_04__inlines__code_spans__010: "`foo bar \nbaz`\n"
-06_04__inlines__code_spans__011: |
- `foo\`bar`
-06_04__inlines__code_spans__012: |
- ``foo`bar``
-06_04__inlines__code_spans__013: |
- ` foo `` bar `
-06_04__inlines__code_spans__014: |
- *foo`*`
-06_04__inlines__code_spans__015: |
- [not a `link](/foo`)
-06_04__inlines__code_spans__016: |
- `<a href="`">`
-06_04__inlines__code_spans__017: |
- <a href="`">`
-06_04__inlines__code_spans__018: |
- `<http://foo.bar.`baz>`
-06_04__inlines__code_spans__019: |
- <http://foo.bar.`baz>`
-06_04__inlines__code_spans__020: |
- ```foo``
-06_04__inlines__code_spans__021: |
- `foo
-06_04__inlines__code_spans__022: |
- `foo``bar``
-06_05__inlines__emphasis_and_strong_emphasis__001: |
- *foo bar*
-06_05__inlines__emphasis_and_strong_emphasis__002: |
- a * foo bar*
-06_05__inlines__emphasis_and_strong_emphasis__003: |
- a*"foo"*
-06_05__inlines__emphasis_and_strong_emphasis__004: |
- * a *
-06_05__inlines__emphasis_and_strong_emphasis__005: |
- foo*bar*
-06_05__inlines__emphasis_and_strong_emphasis__006: |
- 5*6*78
-06_05__inlines__emphasis_and_strong_emphasis__007: |
- _foo bar_
-06_05__inlines__emphasis_and_strong_emphasis__008: |
- _ foo bar_
-06_05__inlines__emphasis_and_strong_emphasis__009: |
- a_"foo"_
-06_05__inlines__emphasis_and_strong_emphasis__010: |
- foo_bar_
-06_05__inlines__emphasis_and_strong_emphasis__011: |
- 5_6_78
-06_05__inlines__emphasis_and_strong_emphasis__012: |
- пристаням_стремятся_
-06_05__inlines__emphasis_and_strong_emphasis__013: |
- aa_"bb"_cc
-06_05__inlines__emphasis_and_strong_emphasis__014: |
- foo-_(bar)_
-06_05__inlines__emphasis_and_strong_emphasis__015: |
- _foo*
-06_05__inlines__emphasis_and_strong_emphasis__016: |
- *foo bar *
-06_05__inlines__emphasis_and_strong_emphasis__017: |
- *foo bar
- *
-06_05__inlines__emphasis_and_strong_emphasis__018: |
- *(*foo)
-06_05__inlines__emphasis_and_strong_emphasis__019: |
- *(*foo*)*
-06_05__inlines__emphasis_and_strong_emphasis__020: |
- *foo*bar
-06_05__inlines__emphasis_and_strong_emphasis__021: |
- _foo bar _
-06_05__inlines__emphasis_and_strong_emphasis__022: |
- _(_foo)
-06_05__inlines__emphasis_and_strong_emphasis__023: |
- _(_foo_)_
-06_05__inlines__emphasis_and_strong_emphasis__024: |
- _foo_bar
-06_05__inlines__emphasis_and_strong_emphasis__025: |
- _пристаням_стремятся
-06_05__inlines__emphasis_and_strong_emphasis__026: |
- _foo_bar_baz_
-06_05__inlines__emphasis_and_strong_emphasis__027: |
- _(bar)_.
-06_05__inlines__emphasis_and_strong_emphasis__028: |
- **foo bar**
-06_05__inlines__emphasis_and_strong_emphasis__029: |
- ** foo bar**
-06_05__inlines__emphasis_and_strong_emphasis__030: |
- a**"foo"**
-06_05__inlines__emphasis_and_strong_emphasis__031: |
- foo**bar**
-06_05__inlines__emphasis_and_strong_emphasis__032: |
- __foo bar__
-06_05__inlines__emphasis_and_strong_emphasis__033: |
- __ foo bar__
-06_05__inlines__emphasis_and_strong_emphasis__034: |
- __
- foo bar__
-06_05__inlines__emphasis_and_strong_emphasis__035: |
- a__"foo"__
-06_05__inlines__emphasis_and_strong_emphasis__036: |
- foo__bar__
-06_05__inlines__emphasis_and_strong_emphasis__037: |
- 5__6__78
-06_05__inlines__emphasis_and_strong_emphasis__038: |
- пристаням__стремятся__
-06_05__inlines__emphasis_and_strong_emphasis__039: |
- __foo, __bar__, baz__
-06_05__inlines__emphasis_and_strong_emphasis__040: |
- foo-__(bar)__
-06_05__inlines__emphasis_and_strong_emphasis__041: |
- **foo bar **
-06_05__inlines__emphasis_and_strong_emphasis__042: |
- **(**foo)
-06_05__inlines__emphasis_and_strong_emphasis__043: |
- *(**foo**)*
-06_05__inlines__emphasis_and_strong_emphasis__044: |
- **Gomphocarpus (*Gomphocarpus physocarpus*, syn.
- *Asclepias physocarpa*)**
-06_05__inlines__emphasis_and_strong_emphasis__045: |
- **foo "*bar*" foo**
-06_05__inlines__emphasis_and_strong_emphasis__046: |
- **foo**bar
-06_05__inlines__emphasis_and_strong_emphasis__047: |
- __foo bar __
-06_05__inlines__emphasis_and_strong_emphasis__048: |
- __(__foo)
-06_05__inlines__emphasis_and_strong_emphasis__049: |
- _(__foo__)_
-06_05__inlines__emphasis_and_strong_emphasis__050: |
- __foo__bar
-06_05__inlines__emphasis_and_strong_emphasis__051: |
- __пристаням__стремятся
-06_05__inlines__emphasis_and_strong_emphasis__052: |
- __foo__bar__baz__
-06_05__inlines__emphasis_and_strong_emphasis__053: |
- __(bar)__.
-06_05__inlines__emphasis_and_strong_emphasis__054: |
- *foo [bar](/url)*
-06_05__inlines__emphasis_and_strong_emphasis__055: |
- *foo
- bar*
-06_05__inlines__emphasis_and_strong_emphasis__056: |
- _foo __bar__ baz_
-06_05__inlines__emphasis_and_strong_emphasis__057: |
- _foo _bar_ baz_
-06_05__inlines__emphasis_and_strong_emphasis__058: |
- __foo_ bar_
-06_05__inlines__emphasis_and_strong_emphasis__059: |
- *foo *bar**
-06_05__inlines__emphasis_and_strong_emphasis__060: |
- *foo **bar** baz*
-06_05__inlines__emphasis_and_strong_emphasis__061: |
- *foo**bar**baz*
-06_05__inlines__emphasis_and_strong_emphasis__062: |
- *foo**bar*
-06_05__inlines__emphasis_and_strong_emphasis__063: |
- ***foo** bar*
-06_05__inlines__emphasis_and_strong_emphasis__064: |
- *foo **bar***
-06_05__inlines__emphasis_and_strong_emphasis__065: |
- *foo**bar***
-06_05__inlines__emphasis_and_strong_emphasis__066: |
- foo***bar***baz
-06_05__inlines__emphasis_and_strong_emphasis__067: |
- foo******bar*********baz
-06_05__inlines__emphasis_and_strong_emphasis__068: |
- *foo **bar *baz* bim** bop*
-06_05__inlines__emphasis_and_strong_emphasis__069: |
- *foo [*bar*](/url)*
-06_05__inlines__emphasis_and_strong_emphasis__070: |
- ** is not an empty emphasis
-06_05__inlines__emphasis_and_strong_emphasis__071: |
- **** is not an empty strong emphasis
-06_05__inlines__emphasis_and_strong_emphasis__072: |
- **foo [bar](/url)**
-06_05__inlines__emphasis_and_strong_emphasis__073: |
- **foo
- bar**
-06_05__inlines__emphasis_and_strong_emphasis__074: |
- __foo _bar_ baz__
-06_05__inlines__emphasis_and_strong_emphasis__075: |
- __foo __bar__ baz__
-06_05__inlines__emphasis_and_strong_emphasis__076: |
- ____foo__ bar__
-06_05__inlines__emphasis_and_strong_emphasis__077: |
- **foo **bar****
-06_05__inlines__emphasis_and_strong_emphasis__078: |
- **foo *bar* baz**
-06_05__inlines__emphasis_and_strong_emphasis__079: |
- **foo*bar*baz**
-06_05__inlines__emphasis_and_strong_emphasis__080: |
- ***foo* bar**
-06_05__inlines__emphasis_and_strong_emphasis__081: |
- **foo *bar***
-06_05__inlines__emphasis_and_strong_emphasis__082: |
- **foo *bar **baz**
- bim* bop**
-06_05__inlines__emphasis_and_strong_emphasis__083: |
- **foo [*bar*](/url)**
-06_05__inlines__emphasis_and_strong_emphasis__084: |
- __ is not an empty emphasis
-06_05__inlines__emphasis_and_strong_emphasis__085: |
- ____ is not an empty strong emphasis
-06_05__inlines__emphasis_and_strong_emphasis__086: |
- foo ***
-06_05__inlines__emphasis_and_strong_emphasis__087: |
- foo *\**
-06_05__inlines__emphasis_and_strong_emphasis__088: |
- foo *_*
-06_05__inlines__emphasis_and_strong_emphasis__089: |
- foo *****
-06_05__inlines__emphasis_and_strong_emphasis__090: |
- foo **\***
-06_05__inlines__emphasis_and_strong_emphasis__091: |
- foo **_**
-06_05__inlines__emphasis_and_strong_emphasis__092: |
- **foo*
-06_05__inlines__emphasis_and_strong_emphasis__093: |
- *foo**
-06_05__inlines__emphasis_and_strong_emphasis__094: |
- ***foo**
-06_05__inlines__emphasis_and_strong_emphasis__095: |
- ****foo*
-06_05__inlines__emphasis_and_strong_emphasis__096: |
- **foo***
-06_05__inlines__emphasis_and_strong_emphasis__097: |
- *foo****
-06_05__inlines__emphasis_and_strong_emphasis__098: |
- foo ___
-06_05__inlines__emphasis_and_strong_emphasis__099: |
- foo _\__
-06_05__inlines__emphasis_and_strong_emphasis__100: |
- foo _*_
-06_05__inlines__emphasis_and_strong_emphasis__101: |
- foo _____
-06_05__inlines__emphasis_and_strong_emphasis__102: |
- foo __\___
-06_05__inlines__emphasis_and_strong_emphasis__103: |
- foo __*__
-06_05__inlines__emphasis_and_strong_emphasis__104: |
- __foo_
-06_05__inlines__emphasis_and_strong_emphasis__105: |
- _foo__
-06_05__inlines__emphasis_and_strong_emphasis__106: |
- ___foo__
-06_05__inlines__emphasis_and_strong_emphasis__107: |
- ____foo_
-06_05__inlines__emphasis_and_strong_emphasis__108: |
- __foo___
-06_05__inlines__emphasis_and_strong_emphasis__109: |
- _foo____
-06_05__inlines__emphasis_and_strong_emphasis__110: |
- **foo**
-06_05__inlines__emphasis_and_strong_emphasis__111: |
- *_foo_*
-06_05__inlines__emphasis_and_strong_emphasis__112: |
- __foo__
-06_05__inlines__emphasis_and_strong_emphasis__113: |
- _*foo*_
-06_05__inlines__emphasis_and_strong_emphasis__114: |
- ****foo****
-06_05__inlines__emphasis_and_strong_emphasis__115: |
- ____foo____
-06_05__inlines__emphasis_and_strong_emphasis__116: |
- ******foo******
-06_05__inlines__emphasis_and_strong_emphasis__117: |
- ***foo***
-06_05__inlines__emphasis_and_strong_emphasis__118: |
- _____foo_____
-06_05__inlines__emphasis_and_strong_emphasis__119: |
- *foo _bar* baz_
-06_05__inlines__emphasis_and_strong_emphasis__120: |
- *foo __bar *baz bim__ bam*
-06_05__inlines__emphasis_and_strong_emphasis__121: |
- **foo **bar baz**
-06_05__inlines__emphasis_and_strong_emphasis__122: |
- *foo *bar baz*
-06_05__inlines__emphasis_and_strong_emphasis__123: |
- *[bar*](/url)
-06_05__inlines__emphasis_and_strong_emphasis__124: |
- _foo [bar_](/url)
-06_05__inlines__emphasis_and_strong_emphasis__125: |
- *<img src="foo" title="*"/>
-06_05__inlines__emphasis_and_strong_emphasis__126: |
- **<a href="**">
-06_05__inlines__emphasis_and_strong_emphasis__127: |
- __<a href="__">
-06_05__inlines__emphasis_and_strong_emphasis__128: |
- *a `*`*
-06_05__inlines__emphasis_and_strong_emphasis__129: |
- _a `_`_
-06_05__inlines__emphasis_and_strong_emphasis__130: |
- **a<http://foo.bar/?q=**>
-06_05__inlines__emphasis_and_strong_emphasis__131: |
- __a<http://foo.bar/?q=__>
-06_06__inlines__strikethrough_extension__001: |
- ~~Hi~~ Hello, world!
-06_06__inlines__strikethrough_extension__002: |
- This ~~has a
-
- new paragraph~~.
-06_07__inlines__links__001: |
- [link](/uri "title")
-06_07__inlines__links__002: |
- [link](/uri)
-06_07__inlines__links__003: |
- [link]()
-06_07__inlines__links__004: |
- [link](<>)
-06_07__inlines__links__005: |
- [link](/my uri)
-06_07__inlines__links__006: |
- [link](</my uri>)
-06_07__inlines__links__007: |
- [link](foo
- bar)
-06_07__inlines__links__008: |
- [link](<foo
- bar>)
-06_07__inlines__links__009: |
- [a](<b)c>)
-06_07__inlines__links__010: |
- [link](<foo\>)
-06_07__inlines__links__011: |
- [a](<b)c
- [a](<b)c>
- [a](<b>c)
-06_07__inlines__links__012: |
- [link](\(foo\))
-06_07__inlines__links__013: |
- [link](foo(and(bar)))
-06_07__inlines__links__014: |
- [link](foo\(and\(bar\))
-06_07__inlines__links__015: |
- [link](<foo(and(bar)>)
-06_07__inlines__links__016: |
- [link](foo\)\:)
-06_07__inlines__links__017: |
- [link](#fragment)
-
- [link](http://example.com#fragment)
-
- [link](http://example.com?foo=3#frag)
-06_07__inlines__links__018: |
- [link](foo\bar)
-06_07__inlines__links__019: |
- [link](foo%20b&auml;)
-06_07__inlines__links__020: |
- [link]("title")
-06_07__inlines__links__021: |
- [link](/url "title")
- [link](/url 'title')
- [link](/url (title))
-06_07__inlines__links__022: |
- [link](/url "title \"&quot;")
-06_07__inlines__links__023: |
- [link](/url "title")
-06_07__inlines__links__024: |
- [link](/url "title "and" title")
-06_07__inlines__links__025: |
- [link](/url 'title "and" title')
-06_07__inlines__links__026: |
- [link]( /uri
- "title" )
-06_07__inlines__links__027: |
- [link] (/uri)
-06_07__inlines__links__028: |
- [link [foo [bar]]](/uri)
-06_07__inlines__links__029: |
- [link] bar](/uri)
-06_07__inlines__links__030: |
- [link [bar](/uri)
-06_07__inlines__links__031: |
- [link \[bar](/uri)
-06_07__inlines__links__032: |
- [link *foo **bar** `#`*](/uri)
-06_07__inlines__links__033: |
- [![moon](moon.jpg)](/uri)
-06_07__inlines__links__034: |
- [foo [bar](/uri)](/uri)
-06_07__inlines__links__035: |
- [foo *[bar [baz](/uri)](/uri)*](/uri)
-06_07__inlines__links__036: |
- ![[[foo](uri1)](uri2)](uri3)
-06_07__inlines__links__037: |
- *[foo*](/uri)
-06_07__inlines__links__038: |
- [foo *bar](baz*)
-06_07__inlines__links__039: |
- *foo [bar* baz]
-06_07__inlines__links__040: |
- [foo <bar attr="](baz)">
-06_07__inlines__links__041: |
- [foo`](/uri)`
-06_07__inlines__links__042: |
- [foo<http://example.com/?search=](uri)>
-06_07__inlines__links__043: |
- [foo][bar]
-
- [bar]: /url "title"
-06_07__inlines__links__044: |
- [link [foo [bar]]][ref]
-
- [ref]: /uri
-06_07__inlines__links__045: |
- [link \[bar][ref]
-
- [ref]: /uri
-06_07__inlines__links__046: |
- [link *foo **bar** `#`*][ref]
-
- [ref]: /uri
-06_07__inlines__links__047: |
- [![moon](moon.jpg)][ref]
-
- [ref]: /uri
-06_07__inlines__links__048: |
- [foo [bar](/uri)][ref]
-
- [ref]: /uri
-06_07__inlines__links__049: |
- [foo *bar [baz][ref]*][ref]
-
- [ref]: /uri
-06_07__inlines__links__050: |
- *[foo*][ref]
-
- [ref]: /uri
-06_07__inlines__links__051: |
- [foo *bar][ref]
-
- [ref]: /uri
-06_07__inlines__links__052: |
- [foo <bar attr="][ref]">
-
- [ref]: /uri
-06_07__inlines__links__053: |
- [foo`][ref]`
-
- [ref]: /uri
-06_07__inlines__links__054: |
- [foo<http://example.com/?search=][ref]>
-
- [ref]: /uri
-06_07__inlines__links__055: |
- [foo][BaR]
-
- [bar]: /url "title"
-06_07__inlines__links__056: |
- [Толпой][Толпой] is a Russian word.
-
- [ТОЛПОЙ]: /url
-06_07__inlines__links__057: |
- [Foo
- bar]: /url
-
- [Baz][Foo bar]
-06_07__inlines__links__058: |
- [foo] [bar]
-
- [bar]: /url "title"
-06_07__inlines__links__059: |
- [foo]
- [bar]
-
- [bar]: /url "title"
-06_07__inlines__links__060: |
- [foo]: /url1
-
- [foo]: /url2
-
- [bar][foo]
-06_07__inlines__links__061: |
- [bar][foo\!]
-
- [foo!]: /url
-06_07__inlines__links__062: |
- [foo][ref[]
-
- [ref[]: /uri
-06_07__inlines__links__063: |
- [foo][ref[bar]]
-
- [ref[bar]]: /uri
-06_07__inlines__links__064: |
- [[[foo]]]
-
- [[[foo]]]: /url
-06_07__inlines__links__065: |
- [foo][ref\[]
-
- [ref\[]: /uri
-06_07__inlines__links__066: |
- [bar\\]: /uri
-
- [bar\\]
-06_07__inlines__links__067: |
- []
-
- []: /uri
-06_07__inlines__links__068: |
- [
- ]
-
- [
- ]: /uri
-06_07__inlines__links__069: |
- [foo][]
-
- [foo]: /url "title"
-06_07__inlines__links__070: |
- [*foo* bar][]
-
- [*foo* bar]: /url "title"
-06_07__inlines__links__071: |
- [Foo][]
-
- [foo]: /url "title"
-06_07__inlines__links__072: "[foo] \n[]\n\n[foo]: /url \"title\"\n"
-06_07__inlines__links__073: |
- [foo]
-
- [foo]: /url "title"
-06_07__inlines__links__074: |
- [*foo* bar]
-
- [*foo* bar]: /url "title"
-06_07__inlines__links__075: |
- [[*foo* bar]]
-
- [*foo* bar]: /url "title"
-06_07__inlines__links__076: |
- [[bar [foo]
-
- [foo]: /url
-06_07__inlines__links__077: |
- [Foo]
-
- [foo]: /url "title"
-06_07__inlines__links__078: |
- [foo] bar
-
- [foo]: /url
-06_07__inlines__links__079: |
- \[foo]
-
- [foo]: /url "title"
-06_07__inlines__links__080: |
- [foo*]: /url
-
- *[foo*]
-06_07__inlines__links__081: |
- [foo][bar]
-
- [foo]: /url1
- [bar]: /url2
-06_07__inlines__links__082: |
- [foo][]
-
- [foo]: /url1
-06_07__inlines__links__083: |
- [foo]()
-
- [foo]: /url1
-06_07__inlines__links__084: |
- [foo](not a link)
-
- [foo]: /url1
-06_07__inlines__links__085: |
- [foo][bar][baz]
-
- [baz]: /url
-06_07__inlines__links__086: |
- [foo][bar][baz]
-
- [baz]: /url1
- [bar]: /url2
-06_07__inlines__links__087: |
- [foo][bar][baz]
-
- [baz]: /url1
- [foo]: /url2
-06_08__inlines__images__001: |
- ![foo](/url "title")
-06_08__inlines__images__002: |
- ![foo *bar*]
-
- [foo *bar*]: train.jpg "train & tracks"
-06_08__inlines__images__003: |
- ![foo ![bar](/url)](/url2)
-06_08__inlines__images__004: |
- ![foo [bar](/url)](/url2)
-06_08__inlines__images__005: |
- ![foo *bar*][]
-
- [foo *bar*]: train.jpg "train & tracks"
-06_08__inlines__images__006: |
- ![foo *bar*][foobar]
-
- [FOOBAR]: train.jpg "train & tracks"
-06_08__inlines__images__007: |
- ![foo](train.jpg)
-06_08__inlines__images__008: |
- My ![foo bar](/path/to/train.jpg "title" )
-06_08__inlines__images__009: |
- ![foo](<url>)
-06_08__inlines__images__010: |
- ![](/url)
-06_08__inlines__images__011: |
- ![foo][bar]
-
- [bar]: /url
-06_08__inlines__images__012: |
- ![foo][bar]
-
- [BAR]: /url
-06_08__inlines__images__013: |
- ![foo][]
-
- [foo]: /url "title"
-06_08__inlines__images__014: |
- ![*foo* bar][]
-
- [*foo* bar]: /url "title"
-06_08__inlines__images__015: |
- ![Foo][]
-
- [foo]: /url "title"
-06_08__inlines__images__016: "![foo] \n[]\n\n[foo]: /url \"title\"\n"
-06_08__inlines__images__017: |
- ![foo]
-
- [foo]: /url "title"
-06_08__inlines__images__018: |
- ![*foo* bar]
-
- [*foo* bar]: /url "title"
-06_08__inlines__images__019: |
- ![[foo]]
-
- [[foo]]: /url "title"
-06_08__inlines__images__020: |
- ![Foo]
-
- [foo]: /url "title"
-06_08__inlines__images__021: |
- !\[foo]
-
- [foo]: /url "title"
-06_08__inlines__images__022: |
- \![foo]
-
- [foo]: /url "title"
-06_09__inlines__autolinks__001: |
- <http://foo.bar.baz>
-06_09__inlines__autolinks__002: |
- <http://foo.bar.baz/test?q=hello&id=22&boolean>
-06_09__inlines__autolinks__003: |
- <irc://foo.bar:2233/baz>
-06_09__inlines__autolinks__004: |
- <MAILTO:FOO@BAR.BAZ>
-06_09__inlines__autolinks__005: |
- <a+b+c:d>
-06_09__inlines__autolinks__006: |
- <made-up-scheme://foo,bar>
-06_09__inlines__autolinks__007: |
- <http://../>
-06_09__inlines__autolinks__008: |
- <localhost:5001/foo>
-06_09__inlines__autolinks__009: |
- <http://foo.bar/baz bim>
-06_09__inlines__autolinks__010: |
- <http://example.com/\[\>
-06_09__inlines__autolinks__011: |
- <foo@bar.example.com>
-06_09__inlines__autolinks__012: |
- <foo+special@Bar.baz-bar0.com>
-06_09__inlines__autolinks__013: |
- <foo\+@bar.example.com>
-06_09__inlines__autolinks__014: |
- <>
-06_09__inlines__autolinks__015: |
- < http://foo.bar >
-06_09__inlines__autolinks__016: |
- <m:abc>
-06_09__inlines__autolinks__017: |
- <foo.bar.baz>
-06_09__inlines__autolinks__018: |
- http://example.com
-06_09__inlines__autolinks__019: |
- foo@bar.example.com
-06_10__inlines__autolinks_extension__001: |
- www.commonmark.org
-06_10__inlines__autolinks_extension__002: |
- Visit www.commonmark.org/help for more information.
-06_10__inlines__autolinks_extension__003: |
- Visit www.commonmark.org.
-
- Visit www.commonmark.org/a.b.
-06_10__inlines__autolinks_extension__004: |
- www.google.com/search?q=Markup+(business)
-
- www.google.com/search?q=Markup+(business)))
-
- (www.google.com/search?q=Markup+(business))
-
- (www.google.com/search?q=Markup+(business)
-06_10__inlines__autolinks_extension__005: |
- www.google.com/search?q=(business))+ok
-06_10__inlines__autolinks_extension__006: |
- www.google.com/search?q=commonmark&hl=en
-
- www.google.com/search?q=commonmark&hl;
-06_10__inlines__autolinks_extension__007: |
- www.commonmark.org/he<lp
-06_10__inlines__autolinks_extension__008: |
- http://commonmark.org
-
- (Visit https://encrypted.google.com/search?q=Markup+(business))
-
- Anonymous FTP is available at ftp://foo.bar.baz.
-06_10__inlines__autolinks_extension__009: |
- foo@bar.baz
-06_10__inlines__autolinks_extension__010: |
- hello@mail+xyz.example isn't valid, but hello+xyz@mail.example is.
-06_10__inlines__autolinks_extension__011: |
- a.b-c_d@a.b
-
- a.b-c_d@a.b.
-
- a.b-c_d@a.b-
-
- a.b-c_d@a.b_
-06_11__inlines__raw_html__001: |
- <a><bab><c2c>
-06_11__inlines__raw_html__002: |
- <a/><b2/>
-06_11__inlines__raw_html__003: |
- <a /><b2
- data="foo" >
-06_11__inlines__raw_html__004: |
- <a foo="bar" bam = 'baz <em>"</em>'
- _boolean zoop:33=zoop:33 />
-06_11__inlines__raw_html__005: |
- Foo <responsive-image src="foo.jpg" />
-06_11__inlines__raw_html__006: |
- <33> <__>
-06_11__inlines__raw_html__007: |
- <a h*#ref="hi">
-06_11__inlines__raw_html__008: |
- <a href="hi'> <a href=hi'>
-06_11__inlines__raw_html__009: |
- < a><
- foo><bar/ >
- <foo bar=baz
- bim!bop />
-06_11__inlines__raw_html__010: |
- <a href='bar'title=title>
-06_11__inlines__raw_html__011: |
- </a></foo >
-06_11__inlines__raw_html__012: |
- </a href="foo">
-06_11__inlines__raw_html__013: |
- foo <!-- this is a
- comment - with hyphen -->
-06_11__inlines__raw_html__014: |
- foo <!-- not a comment -- two hyphens -->
-06_11__inlines__raw_html__015: |
- foo <!--> foo -->
-
- foo <!-- foo--->
-06_11__inlines__raw_html__016: |
- foo <?php echo $a; ?>
-06_11__inlines__raw_html__017: |
- foo <!ELEMENT br EMPTY>
-06_11__inlines__raw_html__018: |
- foo <![CDATA[>&<]]>
-06_11__inlines__raw_html__019: |
- foo <a href="&ouml;">
-06_11__inlines__raw_html__020: |
- foo <a href="\*">
-06_11__inlines__raw_html__021: |
- <a href="\"">
-06_12__inlines__disallowed_raw_html_extension__001: |
- <strong> <title> <style> <em>
-
- <blockquote>
- <xmp> is disallowed. <XMP> is also disallowed.
- </blockquote>
-06_13__inlines__hard_line_breaks__001: "foo \nbaz\n"
-06_13__inlines__hard_line_breaks__002: |
- foo\
- baz
-06_13__inlines__hard_line_breaks__003: "foo \nbaz\n"
-06_13__inlines__hard_line_breaks__004: "foo \n bar\n"
-06_13__inlines__hard_line_breaks__005: |
- foo\
- bar
-06_13__inlines__hard_line_breaks__006: "*foo \nbar*\n"
-06_13__inlines__hard_line_breaks__007: |
- *foo\
- bar*
-06_13__inlines__hard_line_breaks__008: "`code \nspan`\n"
-06_13__inlines__hard_line_breaks__009: |
- `code\
- span`
-06_13__inlines__hard_line_breaks__010: "<a href=\"foo \nbar\">\n"
-06_13__inlines__hard_line_breaks__011: |
- <a href="foo\
- bar">
-06_13__inlines__hard_line_breaks__012: |
- foo\
-06_13__inlines__hard_line_breaks__013: "foo \n"
-06_13__inlines__hard_line_breaks__014: |
- ### foo\
-06_13__inlines__hard_line_breaks__015: "### foo \n"
-06_14__inlines__soft_line_breaks__001: |
- foo
- baz
-06_14__inlines__soft_line_breaks__002: "foo \n baz\n"
-06_15__inlines__textual_content__001: |
- hello $.;'there
-06_15__inlines__textual_content__002: |
- Foo χρῆν
-06_15__inlines__textual_content__003: |
- Multiple spaces
-07_01__gitlab_specific_markdown__footnotes__001: |
- footnote reference tag [^1]
-
- [^1]: footnote text
diff --git a/spec/fixtures/glfm/example_snapshots/prosemirror_json.yml b/spec/fixtures/glfm/example_snapshots/prosemirror_json.yml
deleted file mode 100644
index 04196c26af0..00000000000
--- a/spec/fixtures/glfm/example_snapshots/prosemirror_json.yml
+++ /dev/null
@@ -1,17018 +0,0 @@
----
-02_01__preliminaries__tabs__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo\tbaz\t\tbim"
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo\tbaz\t\tbim"
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "a\ta\nὐ\ta"
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": " foo"
- }
- ]
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": " foo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo\nbar"
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- }
-02_01__preliminaries__tabs__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- }
- ]
- }
-03_01__blocks_and_inlines__precedence__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "`one"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "two`"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "+++"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "==="
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "--\n**\n__"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "***"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\n***"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_ _ _ _ a"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a------"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "---a---"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "-"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-04_01__leaf_blocks__thematic_breaks__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 4
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 5
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 6
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "####### foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "#5 bolt"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "#hashtag"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "## foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": " *baz*"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "# foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\n# bar"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 5
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "foo ### b"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "foo#"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "foo ###"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "foo ###"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "foo #"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo bar"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Bar foo"
- }
- ]
- }
- ]
- }
-04_02__leaf_blocks__atx_headings__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- }
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- }
- },
- {
- "type": "heading",
- "attrs": {
- "level": 3
- }
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar\nbaz"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar\nbaz"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "Foo\n---\n\nFoo"
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\n---"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\n= ="
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo\\"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "`Foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "`"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "<a title=\"a lot"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "of dashes\"/>"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\nbar\n==="
- }
- ]
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo\nBar"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Bar"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Baz"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "===="
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "horizontalRule"
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__023: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "> foo"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__024: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__025: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\nbar"
- }
- ]
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__026: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\nbar"
- }
- ]
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-04_03__leaf_blocks__setext_headings__027: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\nbar\n---\nbaz"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "a simple\n indented code block"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "<a/>\n*hi*\n\n- one"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "chunk1\n\nchunk2\n\n\n\nchunk3"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "chunk1\n \n chunk2"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\nbar"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Heading"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Heading"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": " foo\nbar"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_04__leaf_blocks__indented_code_blocks__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo "
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "<\n >"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "<\n >"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\n~~~"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\n```"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\n```"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\n~~~"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- }
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "\n```\naaa"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bbb"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "\n "
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- }
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\naaa"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\naaa\naaa"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\n aaa\naaa"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "```\naaa\n```"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\n ```"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "\naaa"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa\n~~~ ~~"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__023: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__024: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": "ruby",
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "def foo(x)\n return 3\nend"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__025: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": "ruby",
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "def foo(x)\n return 3\nend"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__026: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": ";",
- "class": "code highlight"
- }
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__027: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "aa"
- },
- {
- "type": "text",
- "text": "\nfoo"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__028: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": "aa",
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_05__leaf_blocks__fenced_code_blocks__029: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "``` aaa"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__001: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__002: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__003: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "\n*foo*"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__005: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__006: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__007: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__008: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__012: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__013: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__014: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "foo",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "\n*bar*\n"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__016: |-
- Error - check implementation:
- Hast node of type "warning" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "\n*bar*\n"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "\n*bar*"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "strike"
- }
- ],
- "text": "\n*foo*\n"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__020: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-04_06__leaf_blocks__html_blocks__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- },
- {
- "type": "strike"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "\nimport Text.HTML.TagSoup\n\nmain :: IO ()\nmain = print $ parseTags tags"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "okay"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__023: |-
- Error - check implementation:
- Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__024: |-
- Error - check implementation:
- Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__025: |-
- Error - check implementation:
- Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__026: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__027: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__028: |-
- Error - check implementation:
- Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__029: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__030: |-
- Error - check implementation:
- Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__031: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__032: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__033: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__034: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__035: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-04_06__leaf_blocks__html_blocks__036: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__037: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__038: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__039: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\n"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "bar",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "\nbaz"
- }
- ]
- }
- ]
- }
-04_06__leaf_blocks__html_blocks__040: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__041: |-
- Error - check implementation:
- Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__042: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__043: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "the title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "my_(url)",
- "target": "_blank",
- "class": null,
- "title": "title (with parens)",
- "canonicalSrc": null
- }
- }
- ],
- "text": "Foo*bar]"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "my%20url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "Foo bar"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "\ntitle\nline1\nline2\n",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]: /url 'title"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "with blank line'"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]:"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__010: |-
- Error - check implementation:
- Hast node of type "bar" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url%5Cbar*baz",
- "target": "_blank",
- "class": null,
- "title": "foo\"bar\\baz",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "first",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "Foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/%CF%86%CE%BF%CF%85",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "αγω"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]: /url \"title\" ok"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "\"title\" ok"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "[foo]: /url \"title\""
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "[foo]: /url"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo\n[bar]: /baz"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[bar]"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__023: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "Foo"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__024: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__025: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "===\n"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__026: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/foo-url",
- "target": "_blank",
- "class": null,
- "title": "foo",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": ",\n"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/bar-url",
- "target": "_blank",
- "class": null,
- "title": "bar",
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": ",\n"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/baz-url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "baz"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__027: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
- ]
- }
-04_07__leaf_blocks__link_reference_definitions__028: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bbb"
- }
- ]
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa\nbbb"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "ccc\nddd"
- }
- ]
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bbb"
- }
- ]
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa\nbbb"
- }
- ]
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa\nbbb\nccc"
- }
- ]
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa\nbbb"
- }
- ]
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bbb"
- }
- ]
- }
- ]
- }
-04_08__leaf_blocks__paragraphs__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa"
- },
- {
- "type": "hardBreak"
- },
- {
- "type": "text",
- "text": "\nbbb"
- }
- ]
- }
- ]
- }
-04_09__leaf_blocks__blank_lines__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__001: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__002: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__003: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__004: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__005: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| abc | def |\n| --- |\n| bar |"
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__007: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_10__leaf_blocks__tables_extension__008: |-
- Error - check implementation:
- Hast node of type "table" not supported by this converter. Please, provide an specification.
-05_01__container_blocks__block_quotes__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "> # Foo\n> bar\n> baz"
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar\nbaz\nfoo"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- }
- ]
- }
-05_01__container_blocks__block_quotes__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- }
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- }
- }
- ]
- }
-05_01__container_blocks__block_quotes__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\n- bar"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\nbar"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aaa"
- }
- ]
- }
- ]
- },
- {
- "type": "horizontalRule"
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bbb"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__023: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\nbar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__024: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\nbar\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__025: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "code"
- }
- ]
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "not code"
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A paragraph\nwith two lines."
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A block quote."
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A paragraph\nwith two lines."
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A block quote."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "one"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "two"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "one"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "two"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "one"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": " two"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "one"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "two"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "one"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "two"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "one"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "two"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "-one"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "2.two"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bam"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar\n\n\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "ok"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "1234567890. not ok"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "ok"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "ok"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "-1. not ok"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "paragraph"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "more code"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "paragraph"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "more code"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": " indented code"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "paragraph"
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "more code"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__023: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__024: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__025: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__026: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__027: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__028: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__029: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__030: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__031: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__032: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__033: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\n*"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\n1."
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__034: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A paragraph\nwith two lines."
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A block quote."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__035: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A paragraph\nwith two lines."
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A block quote."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__036: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A paragraph\nwith two lines."
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A block quote."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__037: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "1. A paragraph\n with two lines.\n\n indented code\n\n > A block quote."
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__038: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A paragraph\nwith two lines."
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "indented code"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A block quote."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__039: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "A paragraph\nwith two lines."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__040: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Blockquote\ncontinued here."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__041: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Blockquote\ncontinued here."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__042: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "boo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__043: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "boo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__044: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__045: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__046: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__047: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__048: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "heading",
- "attrs": {
- "level": 1
- },
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "heading",
- "attrs": {
- "level": 2
- },
- "content": [
- {
- "type": "text",
- "text": "Bar\nbaz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "The number of windows in my house is\n14. The number of doors is 6."
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "The number of windows in my house is"
- }
- ]
- },
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "The number of doors is 6."
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bim"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "d"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "e"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "f"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "g"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "d\n- e"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "3. c"
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "d"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "d"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "b\n\n"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "d"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "d"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "orderedList",
- "attrs": {
- "start": 1,
- "parens": false
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph"
- },
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "b"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "c"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "d"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "e"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "f"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-06_01__inlines__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "hi"
- },
- {
- "type": "text",
- "text": "lo`"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "\\\t\\A\\a\\ \\3\\φ\\«"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*not emphasized*\n<br/> not a tag\n[not a link](/foo)\n`not code`\n1. not a list\n* not a list\n# not a heading\n[foo]: /url \"not a reference\"\n&ouml; not a character entity"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "\\"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "emphasis"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "hardBreak"
- },
- {
- "type": "text",
- "text": "\nbar"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "\\[\\`"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "\\[\\]"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "\\[\\]"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://example.com?find=%5C*",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://example.com?find=\\*"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-06_02__inlines__backslash_escapes__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/bar*",
- "target": "_blank",
- "class": null,
- "title": "ti*tle",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/bar*",
- "target": "_blank",
- "class": null,
- "title": "ti*tle",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_02__inlines__backslash_escapes__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": "foo+bar",
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "  & © Æ Ď\n¾ ℋ ⅆ\n∲ ≧̸"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "# Ӓ Ϡ �"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "\" ആ ಫ"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "&nbsp &x; &#; &#x;\n&#987654321;\n&#abcdef0;\n&ThisIsNotDefined; &hi?;"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "&copy"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "&MadeUpEntity;"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/f%C3%B6%C3%B6",
- "target": "_blank",
- "class": null,
- "title": "föö",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/f%C3%B6%C3%B6",
- "target": "_blank",
- "class": null,
- "title": "föö",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": "föö",
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "f&ouml;&ouml;"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "codeBlock",
- "attrs": {
- "language": null,
- "class": "code highlight"
- },
- "content": [
- {
- "type": "text",
- "text": "f&ouml;f&ouml;"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*foo*\n"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "* foo"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\n\nbar"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "\tfoo"
- }
- ]
- }
- ]
- }
-06_03__inlines__entity_and_numeric_character_references__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[a](url \"tit\")"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo ` bar"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "``"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": " `` "
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": " a"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": " b "
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-06_04__inlines__code_spans__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo bar baz"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo "
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo bar baz"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo\\"
- },
- {
- "type": "text",
- "text": "bar`"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo`bar"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "foo `` bar"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "*"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[not a "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "link](/foo"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "<a href=\""
- },
- {
- "type": "text",
- "text": "\">`"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "`",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "`"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "<http://foo.bar."
- },
- {
- "type": "text",
- "text": "baz>`"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://foo.bar.%60baz",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://foo.bar.`baz"
- },
- {
- "type": "text",
- "text": "`"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "```foo``"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "`foo"
- }
- ]
- }
- ]
- }
-06_04__inlines__code_spans__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "`foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a * foo bar*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a*\"foo\"*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "* a *"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "5"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "6"
- },
- {
- "type": "text",
- "text": "78"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_ foo bar_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a_\"foo\"_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo_bar_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "5_6_78"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "пристаням_стремятся_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "aa_\"bb\"_cc"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo-"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "(bar)"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_foo*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*foo bar *"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*foo bar\n*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*(*foo)"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "(foo"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_foo bar _"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_(_foo)"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__023: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "(foo"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__024: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_foo_bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__025: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_пристаням_стремятся"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__026: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo_bar_baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__027: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "(bar)"
- },
- {
- "type": "text",
- "text": "."
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__028: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__029: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "** foo bar**"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__030: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a**\"foo\"**"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__031: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__032: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__033: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__ foo bar__"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__034: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__\nfoo bar__"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__035: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a__\"foo\"__"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__036: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo__bar__"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__037: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "5__6__78"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__038: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "пристаням__стремятся__"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__039: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo, bar"
- },
- {
- "type": "text",
- "text": ", baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__040: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo-"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "(bar)"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__041: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "**foo bar **"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__042: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "**(**foo)"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__043: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "("
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__044: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "Gomphocarpus ("
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "Gomphocarpus physocarpus"
- },
- {
- "type": "text",
- "text": ", syn.\n"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "Asclepias physocarpa"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__045: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo \""
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": "\" foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__046: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__047: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__foo bar __"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__048: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__(__foo)"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__049: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "("
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__050: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__foo__bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__051: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__пристаням__стремятся"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__052: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo__bar__baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__053: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "(bar)"
- },
- {
- "type": "text",
- "text": "."
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__054: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__055: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo\nbar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__056: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": " baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__057: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo bar"
- },
- {
- "type": "text",
- "text": " baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__058: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__059: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__060: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": " baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__061: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__062: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo**bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__063: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- },
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__064: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__065: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__066: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- },
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__067: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": "***baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__068: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "baz"
- },
- {
- "type": "text",
- "text": " bim bop"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__069: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- },
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__070: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "** is not an empty emphasis"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__071: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "**** is not an empty strong emphasis"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__072: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__073: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo\nbar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__074: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": " baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__075: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo bar"
- },
- {
- "type": "text",
- "text": " baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__076: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__077: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__078: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": " baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__079: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": "baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__080: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- },
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__081: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__082: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "baz"
- },
- {
- "type": "text",
- "text": "\nbim bop"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__083: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- },
- {
- "type": "italic"
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__084: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__ is not an empty emphasis"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__085: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "____ is not an empty strong emphasis"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__086: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo ***"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__087: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__088: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__089: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo *****"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__090: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__091: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__092: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__093: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__094: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__095: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "***"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__096: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__097: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "***"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__098: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo ___"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__099: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__100: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__101: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo _____"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__102: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__103: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__104: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__105: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__106: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__107: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "___"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__108: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__109: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "___"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__110: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__111: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__112: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__113: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__114: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__115: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__116: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__117: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- },
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__118: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- },
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__119: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo _bar"
- },
- {
- "type": "text",
- "text": " baz_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__120: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar *baz bim"
- },
- {
- "type": "text",
- "text": " bam"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__121: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "**foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__122: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar baz"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__123: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__124: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "_foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__125: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*"
- },
- {
- "type": "image",
- "attrs": {
- "src": "foo",
- "alt": null,
- "title": "*",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__126: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "**"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__127: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__128: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "a "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "*"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__129: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "a "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "_"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__130: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "**a"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://foo.bar/?q=**",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://foo.bar/?q=**"
- }
- ]
- }
- ]
- }
-06_05__inlines__emphasis_and_strong_emphasis__131: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "__a"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://foo.bar/?q=__",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://foo.bar/?q=__"
- }
- ]
- }
- ]
- }
-06_06__inlines__strikethrough_extension__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "strike"
- }
- ],
- "text": "Hi"
- },
- {
- "type": "text",
- "text": " Hello, world!"
- }
- ]
- }
- ]
- }
-06_06__inlines__strikethrough_extension__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "This ~~has a"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "new paragraph~~."
- }
- ]
- }
- ]
- }
-06_07__inlines__links__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[link](/my uri)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/my%20uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[link](foo\nbar)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__008: |-
- Error - check implementation:
- Hast node of type "foo" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "b)c",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "a"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[link](<foo>)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[a](<b)c\n[a](<b)c>\n[a]("
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "c)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "(foo)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "foo(and(bar))",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "foo(and(bar)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "foo(and(bar)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "foo):",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "#fragment",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://example.com#fragment",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://example.com?foo=3#frag",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "foo%5Cbar",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "foo%20b%C3%A4",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "%22title%22",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "linklinklink"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title \"\"",
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__023: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url%C2%A0%22title%22",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__024: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[link](/url \"title \"and\" title\")"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__025: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title \"and\" title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__026: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "link"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__027: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[link] (/uri)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__028: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link [foo [bar]]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__029: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[link] bar](/uri)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__030: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[link "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__031: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link [bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__032: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "#"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__033: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__034: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": "](/uri)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__035: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "[bar "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "baz"
- },
- {
- "type": "text",
- "text": "](/uri)](/uri)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__036: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "uri3",
- "alt": "[foo](uri2)",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_07__inlines__links__037: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo*"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__038: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "baz*",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo *bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__039: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo [bar"
- },
- {
- "type": "text",
- "text": " baz]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__040: |-
- Error - check implementation:
- Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__041: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "](/uri)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__042: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://example.com/?search=%5D(uri)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://example.com/?search=](uri)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__043: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__044: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link [foo [bar]]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__045: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link [bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__046: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "link "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "#"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__047: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__048: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- },
- {
- "type": "text",
- "text": "]"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "ref"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__049: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "italic"
- }
- ],
- "text": "bar "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "baz"
- },
- {
- "type": "text",
- "text": "]"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "ref"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__050: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo*"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__051: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo *bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__052: |-
- Error - check implementation:
- Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__053: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "][ref]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__054: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://example.com/?search=%5D%5Bref%5D",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://example.com/?search=][ref]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__055: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__056: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "Толпой"
- },
- {
- "type": "text",
- "text": " is a Russian word."
- }
- ]
- }
- ]
- }
-06_07__inlines__links__057: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "Baz"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__058: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo] "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__059: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]\n"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__060: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url1",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__061: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[bar][foo!]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__062: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo][ref[]"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[ref[]: /uri"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__063: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo][ref[bar]]"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[ref[bar]]: /uri"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__064: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[[[foo]]]"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[[[foo]]]: /url"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__065: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__066: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uri",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar\\"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__067: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[]"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[]: /uri"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__068: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[\n]"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[\n]: /uri"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__069: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__070: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- },
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__071: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "Foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__072: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "\n[]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__073: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__074: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- },
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__075: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "["
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- },
- {
- "type": "italic"
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__076: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[[bar "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__077: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "Foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__078: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": " bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__079: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__080: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "*"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo*"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__081: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url2",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__082: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url1",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__083: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__084: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url1",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "text": "(not a link)"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__085: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__086: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url2",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url1",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "baz"
- }
- ]
- }
- ]
- }
-06_07__inlines__links__087: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[foo]"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url1",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "bar"
- }
- ]
- }
- ]
- }
-06_08__inlines__images__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "train.jpg",
- "alt": "foo bar",
- "title": "train & tracks",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url2",
- "alt": "foo bar",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url2",
- "alt": "foo bar",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "train.jpg",
- "alt": "foo bar",
- "title": "train & tracks",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "train.jpg",
- "alt": "foo bar",
- "title": "train & tracks",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "train.jpg",
- "alt": "foo",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "My "
- },
- {
- "type": "image",
- "attrs": {
- "src": "/path/to/train.jpg",
- "alt": "foo bar",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "url",
- "alt": "foo",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo",
- "title": null,
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo bar",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "Foo",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- },
- {
- "type": "text",
- "text": "\n[]"
- }
- ]
- }
- ]
- }
-06_08__inlines__images__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "foo bar",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "![[foo]]"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[[foo]]: /url \"title\""
- }
- ]
- }
- ]
- }
-06_08__inlines__images__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "image",
- "attrs": {
- "src": "/url",
- "alt": "Foo",
- "title": "title",
- "uploading": false,
- "canonicalSrc": null
- }
- }
- ]
- }
- ]
- }
-06_08__inlines__images__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "![foo]"
- }
- ]
- }
- ]
- }
-06_08__inlines__images__022: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "!"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/url",
- "target": "_blank",
- "class": null,
- "title": "title",
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://foo.bar.baz",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://foo.bar.baz"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://foo.bar.baz/test?q=hello&id=22&boolean",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://foo.bar.baz/test?q=hello&id=22&boolean"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "irc://foo.bar:2233/baz",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "irc://foo.bar:2233/baz"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "MAILTO:FOO@BAR.BAZ",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "MAILTO:FOO@BAR.BAZ"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "a+b+c:d",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "a+b+c:d"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "made-up-scheme://foo,bar",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "made-up-scheme://foo,bar"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://../",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://../"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "localhost:5001/foo",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "localhost:5001/foo"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<"
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://foo.bar/baz",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://foo.bar/baz"
- },
- {
- "type": "text",
- "text": " bim>"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://example.com/%5C%5B%5C",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://example.com/\\[\\"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "mailto:foo@bar.example.com",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo@bar.example.com"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "mailto:foo+special@Bar.baz-bar0.com",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo+special@Bar.baz-bar0.com"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__013: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'end')
-06_09__inlines__autolinks__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<>"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "< "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://foo.bar",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://foo.bar"
- },
- {
- "type": "text",
- "text": " >"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__016: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<m:abc>"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__017: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<foo.bar.baz>"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__018: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://example.com",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://example.com"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "mailto:foo@bar.example.com",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo@bar.example.com"
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.commonmark.org",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.commonmark.org"
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Visit "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.commonmark.org/help",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.commonmark.org/help"
- },
- {
- "type": "text",
- "text": " for more information."
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Visit "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.commonmark.org",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.commonmark.org"
- },
- {
- "type": "text",
- "text": "."
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Visit "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.commonmark.org/a.b",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.commonmark.org/a.b"
- },
- {
- "type": "text",
- "text": "."
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.google.com/search?q=Markup+(business)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.google.com/search?q=Markup+(business)"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.google.com/search?q=Markup+(business)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.google.com/search?q=Markup+(business)"
- },
- {
- "type": "text",
- "text": "))"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "("
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.google.com/search?q=Markup+(business)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.google.com/search?q=Markup+(business)"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "("
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.google.com/search?q=Markup+(business)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.google.com/search?q=Markup+(business)"
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.google.com/search?q=(business))+ok",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.google.com/search?q=(business))+ok"
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.google.com/search?q=commonmark&hl=en",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.google.com/search?q=commonmark&hl=en"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.google.com/search?q=commonmark",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.google.com/search?q=commonmark"
- },
- {
- "type": "text",
- "text": "&hl;"
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://www.commonmark.org/he",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "www.commonmark.org/he"
- },
- {
- "type": "text",
- "text": "<lp"
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "http://commonmark.org",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "http://commonmark.org"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "(Visit "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "https://encrypted.google.com/search?q=Markup+(business)",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "https://encrypted.google.com/search?q=Markup+(business)"
- },
- {
- "type": "text",
- "text": ")"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Anonymous FTP is available at ftp://foo.bar.baz."
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "mailto:foo@bar.baz",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "foo@bar.baz"
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "hello@mail+xyz.example isn't valid, but "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "mailto:hello+xyz@mail.example",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "hello+xyz@mail.example"
- },
- {
- "type": "text",
- "text": " is."
- }
- ]
- }
- ]
- }
-06_10__inlines__autolinks_extension__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "mailto:a.b-c_d@a.b",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "a.b-c_d@a.b"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "mailto:a.b-c_d@a.b",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": null
- }
- }
- ],
- "text": "a.b-c_d@a.b"
- },
- {
- "type": "text",
- "text": "."
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a.b-c_d@a.b-"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "a.b-c_d@a.b_"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__001: |-
- Error - check implementation:
- Hast node of type "bab" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__002: |-
- Error - check implementation:
- Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__003: |-
- Error - check implementation:
- Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-06_11__inlines__raw_html__005: |-
- Error - check implementation:
- Hast node of type "responsive-image" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__006: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<33> <__>"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__007: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<a h*#ref=\"hi\">"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<a href=\"hi'> <a href=hi'>"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "< a><\nfoo><bar/ >\n<foo bar=baz\nbim!bop />"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<a href='bar'title=title>"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-06_11__inlines__raw_html__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "</a href=\"foo\">"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__013: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo <!-- not a comment -- two hyphens -->"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo <!--> foo -->"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo <!-- foo--->"
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__016: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__017: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__018: |-
- Error - check implementation:
- Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
-06_11__inlines__raw_html__019: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__020: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo "
- }
- ]
- }
- ]
- }
-06_11__inlines__raw_html__021: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "<a href=\"\"\">"
- }
- ]
- }
- ]
- }
-06_12__inlines__disallowed_raw_html_extension__001: |-
- Error - check implementation:
- Hast node of type "title" not supported by this converter. Please, provide an specification.
-06_13__inlines__hard_line_breaks__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "hardBreak"
- },
- {
- "type": "text",
- "text": "\nbaz"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "hardBreak"
- },
- {
- "type": "text",
- "text": "\nbaz"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "hardBreak"
- },
- {
- "type": "text",
- "text": "\nbaz"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__004: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "hardBreak"
- },
- {
- "type": "text",
- "text": "\nbar"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__005: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- },
- {
- "type": "hardBreak"
- },
- {
- "type": "text",
- "text": "\nbar"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__006: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__007: |-
- Error - check implementation:
- Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__008: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "code span"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__009: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "code\\ span"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__010: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-06_13__inlines__hard_line_breaks__011: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph"
- }
- ]
- }
-06_13__inlines__hard_line_breaks__012: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\\"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__013: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__014: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "foo\\"
- }
- ]
- }
- ]
- }
-06_13__inlines__hard_line_breaks__015: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "heading",
- "attrs": {
- "level": 3
- },
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- }
-06_14__inlines__soft_line_breaks__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\nbaz"
- }
- ]
- }
- ]
- }
-06_14__inlines__soft_line_breaks__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo\nbaz"
- }
- ]
- }
- ]
- }
-06_15__inlines__textual_content__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "hello $.;'there"
- }
- ]
- }
- ]
- }
-06_15__inlines__textual_content__002: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Foo χρῆν"
- }
- ]
- }
- ]
- }
-06_15__inlines__textual_content__003: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Multiple spaces"
- }
- ]
- }
- ]
- }
-07_01__gitlab_specific_markdown__footnotes__001: |-
- Error - check implementation:
- Hast node of type "sup" not supported by this converter. Please, provide an specification.
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index e721525f00c..12dbabf833b 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -2361,14 +2361,53 @@
"releases": [
{
"id": 1,
+ "tag": "release-1.0",
+ "description": "Some release notes",
+ "project_id": 5,
+ "created_at": "2019-12-25T10:17:14.621Z",
+ "updated_at": "2019-12-25T10:17:14.621Z",
+ "author_id": null,
+ "name": "release-1.0",
+ "sha": "902de3a8bd5573f4a049b1457d28bc1592baaa2e",
+ "released_at": "2019-12-25T10:17:14.615Z",
+ "links": [
+ {
+ "id": 1,
+ "release_id": 1,
+ "url": "http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download",
+ "name": "release-1.0.dmg",
+ "created_at": "2019-12-25T10:17:14.621Z",
+ "updated_at": "2019-12-25T10:17:14.621Z"
+ }
+ ],
+ "milestone_releases": [
+ {
+ "milestone_id": 1349,
+ "release_id": 9172,
+ "milestone": {
+ "id": 1,
+ "title": "test milestone",
+ "project_id": 8,
+ "description": "test milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1
+ }
+ }
+ ]
+ },
+ {
+ "id": 2,
"tag": "release-1.1",
"description": "Some release notes",
"project_id": 5,
"created_at": "2019-12-26T10:17:14.621Z",
"updated_at": "2019-12-26T10:17:14.621Z",
- "author_id": 1,
+ "author_id": 16,
"name": "release-1.1",
- "sha": "901de3a8bd5573f4a049b1457d28bc1592ba6bf9",
+ "sha": "902de3a8bd5573f4a049b1457d28bc1592ba6bg9",
"released_at": "2019-12-26T10:17:14.615Z",
"links": [
{
@@ -2397,6 +2436,45 @@
}
}
]
+ },
+ {
+ "id": 3,
+ "tag": "release-1.2",
+ "description": "Some release notes",
+ "project_id": 5,
+ "created_at": "2019-12-27T10:17:14.621Z",
+ "updated_at": "2019-12-27T10:17:14.621Z",
+ "author_id": 1,
+ "name": "release-1.2",
+ "sha": "903de3a8bd5573f4a049b1457d28bc1592ba6bf9",
+ "released_at": "2019-12-27T10:17:14.615Z",
+ "links": [
+ {
+ "id": 1,
+ "release_id": 1,
+ "url": "http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download",
+ "name": "release-1.2.dmg",
+ "created_at": "2019-12-27T10:17:14.621Z",
+ "updated_at": "2019-12-27T10:17:14.621Z"
+ }
+ ],
+ "milestone_releases": [
+ {
+ "milestone_id": 1349,
+ "release_id": 9172,
+ "milestone": {
+ "id": 1,
+ "title": "test milestone",
+ "project_id": 8,
+ "description": "test milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1
+ }
+ }
+ ]
}
],
"project_members": [
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/releases.ndjson b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/releases.ndjson
index a194898cb5a..dfbde1f2598 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/releases.ndjson
+++ b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/releases.ndjson
@@ -1 +1,3 @@
-{"id":1,"tag":"release-1.1","description":"Some release notes","project_id":5,"created_at":"2019-12-26T10:17:14.621Z","updated_at":"2019-12-26T10:17:14.621Z","author_id":1,"name":"release-1.1","sha":"901de3a8bd5573f4a049b1457d28bc1592ba6bf9","released_at":"2019-12-26T10:17:14.615Z","links":[{"id":1,"release_id":1,"url":"http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download","name":"release-1.1.dmg","created_at":"2019-12-26T10:17:14.621Z","updated_at":"2019-12-26T10:17:14.621Z"}],"milestone_releases":[{"milestone_id":1349,"release_id":9172,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1}}]}
+{"id":1,"tag":"release-1.0","description":"Some release notes","project_id":5,"created_at":"2019-12-25T10:17:14.621Z","updated_at":"2019-12-25T10:17:14.621Z","author_id":null,"name":"release-1.0","sha":"901de3a8bd5573f4a049b1457d28bc1592baaa2e","released_at":"2019-12-25T10:17:14.615Z","links":[{"id":1,"release_id":1,"url":"http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download","name":"release-1.0.dmg","created_at":"2019-12-25T10:17:14.621Z","updated_at":"2019-12-25T10:17:14.621Z"}],"milestone_releases":[{"milestone_id":1349,"release_id":9172,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1}}]}
+{"id":2,"tag":"release-1.1","description":"Some release notes","project_id":5,"created_at":"2019-12-26T10:17:14.621Z","updated_at":"2019-12-26T10:17:14.621Z","author_id":16,"name":"release-1.1","sha":"902de3a8bd5573f4a049b1457d28bc1592ba6bg9","released_at":"2019-12-26T10:17:14.615Z","links":[{"id":1,"release_id":1,"url":"http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download","name":"release-1.1.dmg","created_at":"2019-12-26T10:17:14.621Z","updated_at":"2019-12-26T10:17:14.621Z"}],"milestone_releases":[{"milestone_id":1349,"release_id":9172,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1}}]}
+{"id":3,"tag":"release-1.2","description":"Some release notes","project_id":5,"created_at":"2019-12-27T10:17:14.621Z","updated_at":"2019-12-27T10:17:14.621Z","author_id":1,"name":"release-1.2","sha":"903de3a8bd5573f4a049b1457d28bc1592ba6bf9","released_at":"2019-12-27T10:17:14.615Z","links":[{"id":1,"release_id":1,"url":"http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download","name":"release-1.2.dmg","created_at":"2019-12-27T10:17:14.621Z","updated_at":"2019-12-27T10:17:14.621Z"}],"milestone_releases":[{"milestone_id":1349,"release_id":9172,"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1}}]}
diff --git a/spec/fixtures/product_intelligence/survey_response_schema.json b/spec/fixtures/product_intelligence/survey_response_schema.json
deleted file mode 100644
index 03d2d170137..00000000000
--- a/spec/fixtures/product_intelligence/survey_response_schema.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
- "description": "Schema for a Gitlab survey_response event",
- "self": {
- "vendor": "com.gitlab",
- "name": "survey_response",
- "version": "1-0-1",
- "format": "jsonschema"
- },
- "type": "object",
- "additionalProperties": false,
- "required": ["survey_id", "response"],
- "properties": {
- "survey_id": {
- "description": "Survey ID",
- "type": "integer",
- "minimum": 0,
- "maximum": 2147483647
- },
- "response": {
- "description": "Response",
- "type": "string",
- "maxLength": 10000
- },
- "instance_id": {
- "description": "Instance ID",
- "type": ["integer", "null"],
- "minimum": 0,
- "maximum": 2147483647
- },
- "user_id": {
- "description": "User ID",
- "type": ["integer", "null"],
- "minimum": 0,
- "maximum": 2147483647
- },
- "email": {
- "description": "Email",
- "type": ["string", "null"],
- "maxLength": 255
- },
- "name": {
- "description": "Name",
- "type": ["string", "null"],
- "maxLength": 255
- },
- "username": {
- "description": "Username",
- "type": ["string", "null"],
- "maxLength": 255
- },
- "onboarding_progress": {
- "description": "Onboarding progress",
- "type": ["integer", "null"],
- "minimum": 0,
- "maximum": 2147483647
- }
- }
-}
diff --git a/spec/frontend/__helpers__/init_vue_mr_page_helper.js b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
index 6b719a32480..83ed0a869dc 100644
--- a/spec/frontend/__helpers__/init_vue_mr_page_helper.js
+++ b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import initMRPage from '~/mr_notes';
-import diffFileMockData from '../diffs/mock_data/diff_file';
+import { getDiffFileMock } from '../diffs/mock_data/diff_file';
import { userDataMock, notesDataMock, noteableDataMock } from '../notes/mock_data';
export default function initVueMRPage() {
@@ -39,7 +39,7 @@ export default function initVueMRPage() {
const mock = new MockAdapter(axios);
mock.onGet(diffsAppEndpoint).reply(200, {
branch_name: 'foo',
- diff_files: [diffFileMockData],
+ diff_files: [getDiffFileMock()],
});
initMRPage();
diff --git a/spec/frontend/__helpers__/matchers/index.js b/spec/frontend/__helpers__/matchers/index.js
index 9b83ced10e1..5da6676cdc1 100644
--- a/spec/frontend/__helpers__/matchers/index.js
+++ b/spec/frontend/__helpers__/matchers/index.js
@@ -2,3 +2,4 @@ export * from './to_have_sprite_icon';
export * from './to_have_tracking_attributes';
export * from './to_match_interpolated_text';
export * from './to_validate_json_schema';
+export * from './to_match_expected_for_markdown';
diff --git a/spec/frontend/__helpers__/matchers/to_match_expected_for_markdown.js b/spec/frontend/__helpers__/matchers/to_match_expected_for_markdown.js
new file mode 100644
index 00000000000..829f6ba9770
--- /dev/null
+++ b/spec/frontend/__helpers__/matchers/to_match_expected_for_markdown.js
@@ -0,0 +1,60 @@
+export function toMatchExpectedForMarkdown(
+ received,
+ deserializationTarget,
+ name,
+ markdown,
+ errMsg,
+ expected,
+) {
+ const options = {
+ comment: `Markdown deserialization to ${deserializationTarget}`,
+ isNot: this.isNot,
+ promise: this.promise,
+ };
+
+ const EXPECTED_LABEL = 'Expected';
+ const RECEIVED_LABEL = 'Received';
+ const isExpand = (expand) => expand !== false;
+ const forMarkdownName = `for Markdown example '${name}':\n${markdown}`;
+ const matcherName = `toMatchExpected${
+ deserializationTarget === 'HTML' ? 'Html' : 'Json'
+ }ForMarkdown`;
+
+ let pass;
+
+ // If both expected and received are deserialization errors, force pass = true,
+ // because the actual error messages can vary across environments and cause
+ // false failures (e.g. due to jest '--coverage' being passed in CI).
+ const errMsgRegExp = new RegExp(errMsg);
+ const errMsgRegExp2 = new RegExp(errMsg);
+
+ if (errMsgRegExp.test(expected) && errMsgRegExp2.test(received)) {
+ pass = true;
+ } else {
+ pass = received === expected;
+ }
+
+ const message = pass
+ ? () =>
+ // eslint-disable-next-line prefer-template
+ this.utils.matcherHint(matcherName, undefined, undefined, options) +
+ '\n\n' +
+ `Expected HTML to NOT match:\n${expected}\n\n${forMarkdownName}`
+ : () => {
+ return (
+ // eslint-disable-next-line prefer-template
+ this.utils.matcherHint(matcherName, undefined, undefined, options) +
+ '\n\n' +
+ this.utils.printDiffOrStringify(
+ expected,
+ received,
+ EXPECTED_LABEL,
+ RECEIVED_LABEL,
+ isExpand(this.expand),
+ ) +
+ `\n\n${forMarkdownName}`
+ );
+ };
+
+ return { actual: received, expected, message, name: matcherName, pass };
+}
diff --git a/spec/frontend/__helpers__/mock_user_callout_dismisser.js b/spec/frontend/__helpers__/mock_user_callout_dismisser.js
index 652f36028dc..f115e2289af 100644
--- a/spec/frontend/__helpers__/mock_user_callout_dismisser.js
+++ b/spec/frontend/__helpers__/mock_user_callout_dismisser.js
@@ -1,3 +1,5 @@
+import UserCalloutDismisser from '~/vue_shared/components/user_callout_dismisser.vue';
+
/**
* Mock factory for the UserCalloutDismisser component.
* @param {slotProps} The slot props to pass to the default slot content.
@@ -6,11 +8,24 @@
export const makeMockUserCalloutDismisser = ({
dismiss = () => {},
shouldShowCallout = true,
+ isLoadingQuery = false,
} = {}) => ({
+ props: UserCalloutDismisser.props,
+ data() {
+ return {
+ isLoadingQuery,
+ shouldShowCallout,
+ dismiss,
+ };
+ },
+ mounted() {
+ this.$emit('queryResult', { shouldShowCallout });
+ },
render() {
return this.$scopedSlots.default({
dismiss,
shouldShowCallout,
+ isLoadingQuery,
});
},
});
diff --git a/spec/frontend/__helpers__/performance.js b/spec/frontend/__helpers__/performance.js
new file mode 100644
index 00000000000..3bdf163c22b
--- /dev/null
+++ b/spec/frontend/__helpers__/performance.js
@@ -0,0 +1,8 @@
+// FIXME(vslobodin): Remove this stub once we have migrated to Jest 28.
+// NOTE: Do not try to optimize these stubs as Jest 27 overwrites
+// the "global.performance" object in every suite where fake timers are enabled.
+export const stubPerformanceWebAPI = () => {
+ global.performance.getEntriesByName = () => [];
+ global.performance.mark = () => {};
+ global.performance.measure = () => {};
+};
diff --git a/spec/frontend/__helpers__/set_window_location_helper.js b/spec/frontend/__helpers__/set_window_location_helper.js
index 573a089f111..d81c502b337 100644
--- a/spec/frontend/__helpers__/set_window_location_helper.js
+++ b/spec/frontend/__helpers__/set_window_location_helper.js
@@ -30,7 +30,7 @@
* // window.location.href is now 'http://test.host/a/b/foo.html?bar=1#qux
*
* Both approaches also automatically update the rest of the properties on
- * `window.locaton`. For instance:
+ * `window.location`. For instance:
*
* setWindowLocation('http://test.host/a/b/foo.html?bar=1#qux');
* // window.location.origin is now 'http://test.host'
diff --git a/spec/frontend/__helpers__/shared_test_setup.js b/spec/frontend/__helpers__/shared_test_setup.js
index 011e1142c76..4d6486544ca 100644
--- a/spec/frontend/__helpers__/shared_test_setup.js
+++ b/spec/frontend/__helpers__/shared_test_setup.js
@@ -15,6 +15,7 @@ import '~/commons/bootstrap';
// This module has some fairly decent visual test coverage in it's own repository.
jest.mock('@gitlab/favicon-overlay');
+jest.mock('~/lib/utils/axios_utils', () => jest.requireActual('helpers/mocks/axios_utils'));
process.on('unhandledRejection', global.promiseRejectionHandler);
diff --git a/spec/frontend/__helpers__/web_worker_fake.js b/spec/frontend/__helpers__/web_worker_fake.js
index 041a9bd8540..fb37e41a853 100644
--- a/spec/frontend/__helpers__/web_worker_fake.js
+++ b/spec/frontend/__helpers__/web_worker_fake.js
@@ -14,8 +14,7 @@ const createRelativeRequire = (filename) => {
const rel = path.relative(__dirname, path.dirname(filename));
const base = path.resolve(__dirname, rel);
- // reason: Dynamic require should be fine here since the code is dynamically evaluated anyways.
- // eslint-disable-next-line import/no-dynamic-require, global-require
+ // eslint-disable-next-line global-require
return (pathArg) => require(transformRequirePath(base, pathArg));
};
diff --git a/spec/frontend/access_tokens/components/access_token_table_app_spec.js b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
index b45abe418e4..6013fa3ec39 100644
--- a/spec/frontend/access_tokens/components/access_token_table_app_spec.js
+++ b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
@@ -1,4 +1,4 @@
-import { GlPagination, GlTable } from '@gitlab/ui';
+import { GlButton, GlPagination, GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue';
@@ -164,8 +164,8 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expect(cells.at(3).text()).toBe(__('Never'));
expect(cells.at(4).text()).toBe(__('Never'));
expect(cells.at(5).text()).toBe('Maintainer');
- let anchor = cells.at(6).find('a');
- expect(anchor.attributes()).toMatchObject({
+ let button = cells.at(6).findComponent(GlButton);
+ expect(button.attributes()).toMatchObject({
'aria-label': __('Revoke'),
'data-qa-selector': __('revoke_button'),
href: '/-/profile/personal_access_tokens/1/revoke',
@@ -176,8 +176,7 @@ describe('~/access_tokens/components/access_token_table_app', () => {
{ accessTokenType },
),
});
-
- expect(anchor.classes()).toContain('btn-danger-secondary');
+ expect(button.props('category')).toBe('tertiary');
// Second row
expect(cells.at(7).text()).toBe('b');
@@ -186,9 +185,9 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expect(cells.at(10).text()).not.toBe(__('Never'));
expect(cells.at(11).text()).toBe(__('Expired'));
expect(cells.at(12).text()).toBe('Maintainer');
- anchor = cells.at(13).find('a');
- expect(anchor.attributes('href')).toBe('/-/profile/personal_access_tokens/2/revoke');
- expect(anchor.classes()).toEqual(['btn', 'btn-danger', 'btn-md', 'gl-button', 'btn-icon']);
+ button = cells.at(13).findComponent(GlButton);
+ expect(button.attributes('href')).toBe('/-/profile/personal_access_tokens/2/revoke');
+ expect(button.props('category')).toBe('tertiary');
});
it('sorts rows alphabetically', async () => {
diff --git a/spec/frontend/access_tokens/components/projects_field_spec.js b/spec/frontend/access_tokens/components/projects_field_spec.js
index a9e0799d114..1c4fe7bb168 100644
--- a/spec/frontend/access_tokens/components/projects_field_spec.js
+++ b/spec/frontend/access_tokens/components/projects_field_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { within, fireEvent } from '@testing-library/dom';
import { mount } from '@vue/test-utils';
import ProjectsField from '~/access_tokens/components/projects_field.vue';
@@ -118,11 +119,10 @@ describe('ProjectsField', () => {
});
describe('when radio is changed back to "All projects"', () => {
- beforeEach(() => {
- fireEvent.click(findAllProjectsRadio());
- });
+ it('removes the hidden input value', async () => {
+ fireEvent.change(findAllProjectsRadio());
+ await nextTick();
- it('removes the hidden input value', () => {
expect(findHiddenInput().attributes('value')).toBe('');
});
});
diff --git a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
index 3fdbacb6efa..2c2151bfb41 100644
--- a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
+++ b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
@@ -25,7 +25,7 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
>
<div
- class="mt-2"
+ class="gl-mt-3"
>
<gl-search-box-by-type-stub
clearbuttontitle="Clear"
diff --git a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
index 14f94e671a4..d6c5c5f963a 100644
--- a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
+++ b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
@@ -52,7 +52,7 @@ describe('DevopsScore', () => {
it('contains a link to the feature documentation', () => {
expect(findDocsLink().exists()).toBe(true);
expect(findDocsLink().attributes('href')).toBe(
- '/help/user/admin_area/analytics/dev_ops_report',
+ '/help/user/admin_area/analytics/dev_ops_reports',
);
});
});
diff --git a/spec/frontend/admin/deploy_keys/components/table_spec.js b/spec/frontend/admin/deploy_keys/components/table_spec.js
index 49bda7100fb..a18506c0916 100644
--- a/spec/frontend/admin/deploy_keys/components/table_spec.js
+++ b/spec/frontend/admin/deploy_keys/components/table_spec.js
@@ -27,6 +27,7 @@ describe('DeployKeysTable', () => {
const deployKey = responseBody[0];
const deployKey2 = responseBody[1];
+ const deployKeyWithoutMd5Fingerprint = responseBody[2];
const createComponent = (provide = {}) => {
wrapper = mountExtended(DeployKeysTable, {
@@ -57,9 +58,10 @@ describe('DeployKeysTable', () => {
const timeAgoTooltip = findTimeAgoTooltip(expectedRowIndex);
expect(wrapper.findByText(expectedDeployKey.title).exists()).toBe(true);
- expect(wrapper.findByText(expectedDeployKey.fingerprint, { selector: 'code' }).exists()).toBe(
- true,
- );
+
+ expect(
+ wrapper.findByText(expectedDeployKey.fingerprint_sha256, { selector: 'span' }).exists(),
+ ).toBe(true);
expect(timeAgoTooltip.exists()).toBe(true);
expect(timeAgoTooltip.props('time')).toBe(expectedDeployKey.created_at);
expect(editButton.exists()).toBe(true);
@@ -67,6 +69,13 @@ describe('DeployKeysTable', () => {
expect(findRemoveButton(expectedRowIndex).exists()).toBe(true);
};
+ const expectDeployKeyWithFingerprintIsRendered = (expectedDeployKey, expectedRowIndex) => {
+ expect(wrapper.findByText(expectedDeployKey.fingerprint, { selector: 'span' }).exists()).toBe(
+ true,
+ );
+ expectDeployKeyIsRendered(expectedDeployKey, expectedRowIndex);
+ };
+
const itRendersTheEmptyState = () => {
it('renders empty state', () => {
const emptyState = wrapper.findComponent(GlEmptyState);
@@ -127,8 +136,12 @@ describe('DeployKeysTable', () => {
});
it('renders deploy keys in table', () => {
- expectDeployKeyIsRendered(deployKey, 0);
- expectDeployKeyIsRendered(deployKey2, 1);
+ expectDeployKeyWithFingerprintIsRendered(deployKey, 0);
+ expectDeployKeyWithFingerprintIsRendered(deployKey2, 1);
+ });
+
+ it('renders deploy keys that do not have an MD5 fingerprint', () => {
+ expectDeployKeyIsRendered(deployKeyWithoutMd5Fingerprint, 2);
});
describe('when delete button is clicked', () => {
@@ -157,7 +170,7 @@ describe('DeployKeysTable', () => {
beforeEach(() => {
Api.deployKeys.mockResolvedValueOnce({
data: [deployKey],
- headers: { 'x-total': '2' },
+ headers: { 'x-total': '3' },
});
createComponent();
@@ -179,7 +192,7 @@ describe('DeployKeysTable', () => {
describe('when pagination is changed', () => {
it('calls API with `page` parameter', async () => {
const pagination = findPagination();
- expectDeployKeyIsRendered(deployKey, 0);
+ expectDeployKeyWithFingerprintIsRendered(deployKey, 0);
Api.deployKeys.mockResolvedValue({
data: [deployKey2],
@@ -199,7 +212,7 @@ describe('DeployKeysTable', () => {
page: 2,
public: true,
});
- expectDeployKeyIsRendered(deployKey2, 0);
+ expectDeployKeyWithFingerprintIsRendered(deployKey2, 0);
});
});
});
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
index 5b4f954b672..31a0c2b07e4 100644
--- a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
+++ b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
@@ -1,6 +1,6 @@
import { GlButton, GlModal } from '@gitlab/ui';
-import { within, fireEvent } from '@testing-library/dom';
-import { shallowMount, mount } from '@vue/test-utils';
+import { within } from '@testing-library/dom';
+import { shallowMount, mount, createWrapper } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import SignupForm from '~/pages/admin/application_settings/general/components/signup_form.vue';
@@ -121,7 +121,7 @@ describe('Signup Form', () => {
describe('when user clicks on file radio', () => {
beforeEach(() => {
- fireEvent.click(findDenyListFileRadio());
+ createWrapper(findDenyListFileRadio()).setChecked(true);
});
it('has raw list not selected', () => {
@@ -165,7 +165,7 @@ describe('Signup Form', () => {
describe('when user clicks on raw list radio', () => {
beforeEach(() => {
- fireEvent.click(findDenyListRawRadio());
+ createWrapper(findDenyListRawRadio()).setChecked(true);
});
it('has raw list selected', () => {
diff --git a/spec/frontend/admin/signup_restrictions/mock_data.js b/spec/frontend/admin/signup_restrictions/mock_data.js
index 135fc8caae0..9e001e122a4 100644
--- a/spec/frontend/admin/signup_restrictions/mock_data.js
+++ b/spec/frontend/admin/signup_restrictions/mock_data.js
@@ -18,6 +18,10 @@ export const rawMockData = {
emailRestrictions: 'user1@domain.com, user2@domain.com',
afterSignUpText: 'Congratulations on your successful sign-up!',
pendingUserCount: '0',
+ passwordNumberRequired: 'true',
+ passwordLowercaseRequired: 'true',
+ passwordUppercaseRequired: 'true',
+ passwordSymbolRequired: 'true',
};
export const mockData = {
@@ -40,4 +44,8 @@ export const mockData = {
emailRestrictions: 'user1@domain.com, user2@domain.com',
afterSignUpText: 'Congratulations on your successful sign-up!',
pendingUserCount: '0',
+ passwordNumberRequired: true,
+ passwordLowercaseRequired: true,
+ passwordUppercaseRequired: true,
+ passwordSymbolRequired: true,
};
diff --git a/spec/frontend/admin/signup_restrictions/utils_spec.js b/spec/frontend/admin/signup_restrictions/utils_spec.js
index fd5c4c3317b..f07e14430f9 100644
--- a/spec/frontend/admin/signup_restrictions/utils_spec.js
+++ b/spec/frontend/admin/signup_restrictions/utils_spec.js
@@ -14,6 +14,10 @@ describe('utils', () => {
'domainDenylistEnabled',
'denylistTypeRawSelected',
'emailRestrictionsEnabled',
+ 'passwordNumberRequired',
+ 'passwordLowercaseRequired',
+ 'passwordUppercaseRequired',
+ 'passwordSymbolRequired',
],
}),
).toEqual(mockData);
diff --git a/spec/frontend/admin/statistics_panel/components/app_spec.js b/spec/frontend/admin/statistics_panel/components/app_spec.js
index 3cfb6feeb86..bac542e72fb 100644
--- a/spec/frontend/admin/statistics_panel/components/app_spec.js
+++ b/spec/frontend/admin/statistics_panel/components/app_spec.js
@@ -64,7 +64,7 @@ describe('Admin statistics app', () => {
createComponent();
expect(findStats(index).text()).toContain(label);
- expect(findStats(index).text()).toContain(count);
+ expect(findStats(index).text()).toContain(count.toString());
});
});
});
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
index b90a30b5b89..e04c43ae3f2 100644
--- a/spec/frontend/admin/users/components/user_actions_spec.js
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -77,12 +77,6 @@ describe('AdminUserActions component', () => {
expect(findActionsDropdown().exists()).toBe(true);
});
- it('renders the tooltip', () => {
- const tooltip = getBinding(findActionsDropdown().element, 'gl-tooltip');
-
- expect(tooltip.value).toBe(I18N_USER_ACTIONS.userAdministration);
- });
-
describe('when there are actions that require confirmation', () => {
beforeEach(() => {
initComponent({ actions: CONFIRMATION_ACTIONS });
diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
index ec5b6a5597b..4693d5a47e4 100644
--- a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -17,6 +17,7 @@ exports[`Alert integration settings form default state should match the default
<gl-form-checkbox-stub
checked="true"
data-qa-selector="create_issue_checkbox"
+ id="2"
>
<span>
Create an incident. Incidents are created for each alert triggered.
@@ -87,7 +88,9 @@ exports[`Alert integration settings form default state should match the default
labeldescription=""
optionaltext="(optional)"
>
- <gl-form-checkbox-stub>
+ <gl-form-checkbox-stub
+ id="3"
+ >
<span>
Send a single email notification to Owners and Maintainers for new alerts.
</span>
@@ -101,6 +104,7 @@ exports[`Alert integration settings form default state should match the default
>
<gl-form-checkbox-stub
checked="true"
+ id="4"
>
<span>
Automatically close associated incident when a recovery alert notification resolves an alert
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
index 018303fcae7..7d9d2875cf8 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
@@ -265,7 +265,6 @@ describe('AlertsSettingsForm', () => {
});
it('should not allow a user to test invalid JSON', async () => {
- jest.useFakeTimers();
await findJsonTextArea().setValue('Invalid JSON');
jest.runAllTimers();
@@ -278,7 +277,6 @@ describe('AlertsSettingsForm', () => {
});
it('should allow for the form to be automatically saved if the test payload is successfully submitted', async () => {
- jest.useFakeTimers();
await findJsonTextArea().setValue('{ "value": "value" }');
jest.runAllTimers();
diff --git a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
index 2691e11e616..ba8215f4e00 100644
--- a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
+++ b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
@@ -7,6 +7,7 @@ exports[`Keep latest artifact checkbox when application keep latest artifact set
<b-form-checkbox-stub
checked="true"
class="gl-form-checkbox"
+ id="4"
value="true"
>
<strong
diff --git a/spec/frontend/batch_comments/components/draft_note_spec.js b/spec/frontend/batch_comments/components/draft_note_spec.js
index 6a997ebaaa8..ccca4a2c3e9 100644
--- a/spec/frontend/batch_comments/components/draft_note_spec.js
+++ b/spec/frontend/batch_comments/components/draft_note_spec.js
@@ -33,13 +33,16 @@ describe('Batch comments draft note component', () => {
const findSubmitReviewButton = () => wrapper.findComponent(PublishButton);
const findAddCommentButton = () => wrapper.findComponent(GlButton);
- const createComponent = (propsData = { draft }) => {
+ const createComponent = (propsData = { draft }, glFeatures = {}) => {
wrapper = shallowMount(DraftNote, {
store,
propsData,
stubs: {
NoteableNote: NoteableNoteStub,
},
+ provide: {
+ glFeatures,
+ },
});
jest.spyOn(wrapper.vm.$store, 'dispatch').mockImplementation();
@@ -96,6 +99,12 @@ describe('Batch comments draft note component', () => {
expect(publishNowButton.props().disabled).toBe(true);
expect(publishNowButton.props().loading).toBe(false);
});
+
+ it('hides button when mr_review_submit_comment is enabled', () => {
+ createComponent({ draft }, { mrReviewSubmitComment: true });
+
+ expect(findAddCommentButton().exists()).toBe(false);
+ });
});
describe('submit review', () => {
diff --git a/spec/frontend/batch_comments/components/preview_dropdown_spec.js b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
index bf3bbf4de26..079b64225e4 100644
--- a/spec/frontend/batch_comments/components/preview_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
@@ -1,8 +1,15 @@
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { TEST_HOST } from 'helpers/test_constants';
+import { visitUrl } from '~/lib/utils/url_utility';
import PreviewDropdown from '~/batch_comments/components/preview_dropdown.vue';
+jest.mock('~/lib/utils/url_utility', () => ({
+ visitUrl: jest.fn(),
+ setUrlParams: jest.requireActual('~/lib/utils/url_utility').setUrlParams,
+}));
+
Vue.use(Vuex);
let wrapper;
@@ -27,6 +34,11 @@ function factory({ viewDiffsFileByFile = false, draftsCount = 1, sortedDrafts =
actions: { scrollToDraft },
getters: { draftsCount: () => draftsCount, sortedDrafts: () => sortedDrafts },
},
+ notes: {
+ getters: {
+ getNoteableData: () => ({ diff_head_sha: '123' }),
+ },
+ },
},
});
@@ -67,5 +79,19 @@ describe('Batch comments preview dropdown', () => {
expect(scrollToDraft).toHaveBeenCalledWith(expect.anything(), { id: 1 });
});
+
+ it('changes window location to navigate to commit', async () => {
+ factory({
+ viewDiffsFileByFile: false,
+ sortedDrafts: [{ id: 1, position: { head_sha: '1234' } }],
+ });
+
+ wrapper.findByTestId('preview-item').vm.$emit('click');
+
+ await nextTick();
+
+ expect(scrollToDraft).not.toHaveBeenCalled();
+ expect(visitUrl).toHaveBeenCalledWith(`${TEST_HOST}/?commit_id=1234#note_1`);
+ });
});
});
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
index 172b510645d..9f50b12bac2 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
@@ -298,14 +298,18 @@ describe('Batch comments store actions', () => {
const draft = {
discussion_id: '1',
id: '2',
+ file_path: 'lib/example.js',
};
actions.scrollToDraft({ dispatch, rootGetters }, draft);
- expect(dispatch.mock.calls[0]).toEqual([
- 'expandDiscussion',
- { discussionId: '1' },
- { root: true },
+ expect(dispatch.mock.calls).toEqual([
+ [
+ 'diffs/setFileCollapsedAutomatically',
+ { filePath: draft.file_path, collapsed: false },
+ { root: true },
+ ],
+ ['expandDiscussion', { discussionId: '1' }, { root: true }],
]);
expect(window.mrTabs.tabShown).toHaveBeenCalledWith('diffs');
diff --git a/spec/frontend/behaviors/shortcuts/keybindings_spec.js b/spec/frontend/behaviors/shortcuts/keybindings_spec.js
index 3ad44a16ae1..1f7e1b24e78 100644
--- a/spec/frontend/behaviors/shortcuts/keybindings_spec.js
+++ b/spec/frontend/behaviors/shortcuts/keybindings_spec.js
@@ -11,9 +11,7 @@ import {
} from '~/behaviors/shortcuts/keybindings';
describe('~/behaviors/shortcuts/keybindings', () => {
- beforeAll(() => {
- useLocalStorageSpy();
- });
+ useLocalStorageSpy();
const setupCustomizations = (customizationsAsString) => {
localStorage.clear();
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
deleted file mode 100644
index b2559af182b..00000000000
--- a/spec/frontend/blob/viewer/index_spec.js
+++ /dev/null
@@ -1,189 +0,0 @@
-/* eslint-disable no-new */
-
-import MockAdapter from 'axios-mock-adapter';
-import $ from 'jquery';
-import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { setTestTimeout } from 'helpers/timeout';
-import { BlobViewer } from '~/blob/viewer/index';
-import axios from '~/lib/utils/axios_utils';
-
-const execImmediately = (callback) => {
- callback();
-};
-
-describe('Blob viewer', () => {
- let blob;
- let mock;
-
- const jQueryMock = {
- tooltip: jest.fn(),
- };
-
- setTestTimeout(2000);
-
- beforeEach(() => {
- window.gon.features = { refactorBlobViewer: false }; // This file is based on the old (non-refactored) blob viewer
- jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
- $.fn.extend(jQueryMock);
- mock = new MockAdapter(axios);
-
- loadHTMLFixture('blob/show_readme.html');
- $('#modal-upload-blob').remove();
-
- mock.onGet(/blob\/.+\/README\.md/).reply(200, {
- html: '<div>testing</div>',
- });
-
- blob = new BlobViewer();
- });
-
- afterEach(() => {
- mock.restore();
- window.location.hash = '';
-
- resetHTMLFixture();
- });
-
- it('loads source file after switching views', async () => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- await axios.waitForAll();
-
- expect(
- document
- .querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
- .classList.contains('hidden'),
- ).toBeFalsy();
- });
-
- it('loads source file when line number is in hash', async () => {
- window.location.hash = '#L1';
-
- new BlobViewer();
-
- await axios.waitForAll();
-
- expect(
- document
- .querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
- .classList.contains('hidden'),
- ).toBeFalsy();
- });
-
- it('doesnt reload file if already loaded', () => {
- const asyncClick = async () => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- await axios.waitForAll();
- };
-
- return asyncClick()
- .then(() => asyncClick())
- .then(() => {
- expect(document.querySelector('.blob-viewer[data-type="simple"]').dataset.loaded).toBe(
- 'true',
- );
- });
- });
-
- describe('copy blob button', () => {
- let copyButton;
- let copyButtonTooltip;
-
- beforeEach(() => {
- copyButton = document.querySelector('.js-copy-blob-source-btn');
- copyButtonTooltip = document.querySelector('.js-copy-blob-source-btn-tooltip');
- });
-
- it('disabled on load', () => {
- expect(copyButton.classList.contains('disabled')).toBeTruthy();
- });
-
- it('has tooltip when disabled', () => {
- expect(copyButtonTooltip.getAttribute('title')).toBe(
- 'Switch to the source to copy the file contents',
- );
- });
-
- it('is blurred when clicked and disabled', () => {
- jest.spyOn(copyButton, 'blur').mockImplementation(() => {});
-
- copyButton.click();
-
- expect(copyButton.blur).toHaveBeenCalled();
- });
-
- it('is not blurred when clicked and not disabled', () => {
- jest.spyOn(copyButton, 'blur').mockImplementation(() => {});
-
- copyButton.classList.remove('disabled');
- copyButton.click();
-
- expect(copyButton.blur).not.toHaveBeenCalled();
- });
-
- it('enables after switching to simple view', async () => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- await axios.waitForAll();
-
- expect(copyButton.classList.contains('disabled')).toBeFalsy();
- });
-
- it('updates tooltip after switching to simple view', async () => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
-
- await axios.waitForAll();
-
- expect(copyButtonTooltip.getAttribute('title')).toBe('Copy file contents');
- });
- });
-
- describe('switchToViewer', () => {
- it('removes active class from old viewer button', () => {
- blob.switchToViewer('simple');
-
- expect(
- document.querySelector('.js-blob-viewer-switch-btn.active[data-viewer="rich"]'),
- ).toBeNull();
- });
-
- it('adds active class to new viewer button', () => {
- const simpleBtn = document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]');
-
- jest.spyOn(simpleBtn, 'blur').mockImplementation(() => {});
-
- blob.switchToViewer('simple');
-
- expect(simpleBtn.classList.contains('selected')).toBeTruthy();
-
- expect(simpleBtn.blur).toHaveBeenCalled();
- });
-
- it('makes request for initial view', () => {
- expect(mock.history).toMatchObject({
- get: [{ url: expect.stringMatching(/README\.md\?.*viewer=rich/) }],
- });
- });
-
- describe.each`
- views
- ${['simple']}
- ${['simple', 'rich']}
- `('when view switches to $views', ({ views }) => {
- beforeEach(async () => {
- views.forEach((view) => blob.switchToViewer(view));
- await axios.waitForAll();
- });
-
- it('sends 1 AJAX request for new view', async () => {
- expect(mock.history).toMatchObject({
- get: [
- { url: expect.stringMatching(/README\.md\?.*viewer=rich/) },
- { url: expect.stringMatching(/README\.md\?.*viewer=simple/) },
- ],
- });
- });
- });
- });
-});
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index aad89cf8261..17a5383a31e 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -6,7 +6,7 @@ import Vuex from 'vuex';
import BoardCard from '~/boards/components/board_card.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
import { inactiveId } from '~/boards/constants';
-import { mockLabelList, mockIssue } from '../mock_data';
+import { mockLabelList, mockIssue, DEFAULT_COLOR } from '../mock_data';
describe('Board card', () => {
let wrapper;
@@ -180,4 +180,40 @@ describe('Board card', () => {
expect(wrapper.classes()).toContain('gl-cursor-grab');
});
});
+
+ describe('when Epic colors are enabled', () => {
+ it('applies the correct color', () => {
+ window.gon.features = { epicColorHighlight: true };
+ createStore();
+ mountComponent({
+ item: {
+ ...mockIssue,
+ color: DEFAULT_COLOR,
+ },
+ });
+
+ expect(wrapper.classes()).toEqual(
+ expect.arrayContaining(['gl-pl-4', 'gl-border-l-solid', 'gl-border-4']),
+ );
+ expect(wrapper.attributes('style')).toContain(`border-color: ${DEFAULT_COLOR}`);
+ });
+ });
+
+ describe('when Epic colors are not enabled', () => {
+ it('applies the correct color', () => {
+ window.gon.features = { epicColorHighlight: false };
+ createStore();
+ mountComponent({
+ item: {
+ ...mockIssue,
+ color: DEFAULT_COLOR,
+ },
+ });
+
+ expect(wrapper.classes()).not.toEqual(
+ expect.arrayContaining(['gl-pl-4', 'gl-border-l-solid', 'gl-border-4']),
+ );
+ expect(wrapper.attributes('style')).toBeUndefined();
+ });
+ });
});
diff --git a/spec/frontend/boards/components/config_toggle_spec.js b/spec/frontend/boards/components/config_toggle_spec.js
new file mode 100644
index 00000000000..47d4692453d
--- /dev/null
+++ b/spec/frontend/boards/components/config_toggle_spec.js
@@ -0,0 +1,59 @@
+import Vuex from 'vuex';
+import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import ConfigToggle from '~/boards/components/config_toggle.vue';
+import eventHub from '~/boards/eventhub';
+import store from '~/boards/stores';
+import { mockTracking } from 'helpers/tracking_helper';
+
+describe('ConfigToggle', () => {
+ let wrapper;
+
+ Vue.use(Vuex);
+
+ const createComponent = (provide = {}) =>
+ shallowMount(ConfigToggle, {
+ store,
+ provide: {
+ canAdminList: true,
+ ...provide,
+ },
+ });
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a button with label `View scope` when `canAdminList` is `false`', () => {
+ wrapper = createComponent({ canAdminList: false });
+ expect(findButton().text()).toBe('View scope');
+ });
+
+ it('renders a button with label `Edit board` when `canAdminList` is `true`', () => {
+ wrapper = createComponent();
+ expect(findButton().text()).toBe('Edit board');
+ });
+
+ it('emits `showBoardModal` when button is clicked', () => {
+ const eventHubSpy = jest.spyOn(eventHub, '$emit');
+ wrapper = createComponent();
+
+ findButton().vm.$emit('click', { preventDefault: () => {} });
+
+ expect(eventHubSpy).toHaveBeenCalledWith('showBoardModal', 'edit');
+ });
+
+ it('tracks clicking the button', () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ wrapper = createComponent();
+
+ findButton().vm.$emit('click', { preventDefault: () => {} });
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_button', {
+ label: 'edit_board',
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/toggle_focus_spec.js b/spec/frontend/boards/components/toggle_focus_spec.js
new file mode 100644
index 00000000000..3cbaac91f8d
--- /dev/null
+++ b/spec/frontend/boards/components/toggle_focus_spec.js
@@ -0,0 +1,47 @@
+import { GlButton } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import ToggleFocus from '~/boards/components/toggle_focus.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+describe('ToggleFocus', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ToggleFocus, {
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ attachTo: document.body,
+ });
+ };
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a button with `maximize` icon', () => {
+ createComponent();
+
+ expect(findButton().props('icon')).toBe('maximize');
+ expect(findButton().attributes('aria-label')).toBe(ToggleFocus.i18n.toggleFocusMode);
+ });
+
+ it('contains a tooltip with title', () => {
+ createComponent();
+ const tooltip = getBinding(findButton().element, 'gl-tooltip');
+
+ expect(tooltip).toBeDefined();
+ expect(findButton().attributes('title')).toBe(ToggleFocus.i18n.toggleFocusMode);
+ });
+
+ it('toggles the icon when the button is clicked', async () => {
+ createComponent();
+ findButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findButton().props('icon')).toBe('minimize');
+ });
+});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 6ec39be5d29..1ee05d81f37 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -790,3 +790,5 @@ export const epicBoardListQueryResponse = (totalWeight = 5) => ({
},
},
});
+
+export const DEFAULT_COLOR = '#1068bf';
diff --git a/spec/frontend/boards/project_select_spec.js b/spec/frontend/boards/project_select_spec.js
index bd79060c54f..c45cd545155 100644
--- a/spec/frontend/boards/project_select_spec.js
+++ b/spec/frontend/boards/project_select_spec.js
@@ -1,16 +1,16 @@
import {
GlDropdown,
GlDropdownItem,
+ GlFormInput,
GlSearchBoxByType,
GlLoadingIcon,
- GlFormInput,
} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import waitForPromises from 'helpers/wait_for_promises';
import ProjectSelect from '~/boards/components/project_select.vue';
import defaultState from '~/boards/stores/state';
+import waitForPromises from 'helpers/wait_for_promises';
import { mockList, mockActiveGroupProjects } from './mock_data';
@@ -21,7 +21,7 @@ describe('ProjectSelect component', () => {
let store;
const findLabel = () => wrapper.find("[data-testid='header-label']");
- const findGlDropdown = () => wrapper.find(GlDropdown);
+ const findGlDropdown = () => wrapper.findComponent(GlDropdown);
const findGlDropdownLoadingIcon = () =>
findGlDropdown().find('button:first-child').find(GlLoadingIcon);
const findGlSearchBoxByType = () => wrapper.find(GlSearchBoxByType);
@@ -137,7 +137,7 @@ describe('ProjectSelect component', () => {
await nextTick();
const searchInput = findGlDropdown().findComponent(GlFormInput).element;
- expect(document.activeElement).toEqual(searchInput);
+ expect(document.activeElement).toBe(searchInput);
});
});
diff --git a/spec/frontend/ci_lint/mock_data.js b/spec/frontend/ci_lint/mock_data.js
index 28ea0f55bf8..660b2ad6e8b 100644
--- a/spec/frontend/ci_lint/mock_data.js
+++ b/spec/frontend/ci_lint/mock_data.js
@@ -1,5 +1,16 @@
import { mockJobs } from 'jest/pipeline_editor/mock_data';
+export const mockLintDataError = {
+ data: {
+ lintCI: {
+ errors: ['Error message'],
+ warnings: ['Warning message'],
+ valid: false,
+ jobs: mockJobs,
+ },
+ },
+};
+
export const mockLintDataValid = {
data: {
lintCI: {
diff --git a/spec/frontend/ci_secure_files/components/secure_files_list_spec.js b/spec/frontend/ci_secure_files/components/secure_files_list_spec.js
index ad5f8a56ced..04d38a3281a 100644
--- a/spec/frontend/ci_secure_files/components/secure_files_list_spec.js
+++ b/spec/frontend/ci_secure_files/components/secure_files_list_spec.js
@@ -1,10 +1,12 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlModal } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { mount } from '@vue/test-utils';
import axios from '~/lib/utils/axios_utils';
import SecureFilesList from '~/ci_secure_files/components/secure_files_list.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import Api from '~/api';
import { secureFiles } from '../mock_data';
@@ -22,15 +24,18 @@ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${dummyProj
describe('SecureFilesList', () => {
let wrapper;
let mock;
+ let trackingSpy;
beforeEach(() => {
originalGon = window.gon;
+ trackingSpy = mockTracking(undefined, undefined, jest.spyOn);
window.gon = { ...dummyGon };
});
afterEach(() => {
wrapper.destroy();
mock.restore();
+ unmockTracking();
window.gon = originalGon;
});
@@ -52,7 +57,9 @@ describe('SecureFilesList', () => {
const findPagination = () => wrapper.findAll('ul.pagination');
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findUploadButton = () => wrapper.findAll('span.gl-button-text');
- const findDeleteButton = () => wrapper.findAll('tbody tr td button.btn-danger');
+ const findDeleteModal = () => wrapper.findComponent(GlModal);
+ const findUploadInput = () => wrapper.findAll('input[type="file"]').at(0);
+ const findDeleteButton = () => wrapper.findAll('[data-testid="delete-button"]');
describe('when secure files exist in a project', () => {
beforeEach(async () => {
@@ -64,7 +71,7 @@ describe('SecureFilesList', () => {
});
it('displays a table with expected headers', () => {
- const headers = ['Filename', 'Uploaded'];
+ const headers = ['File name', 'Uploaded date'];
headers.forEach((header, i) => {
expect(findHeaderAt(i).text()).toBe(header);
});
@@ -78,6 +85,30 @@ describe('SecureFilesList', () => {
expect(findCell(0, 0).text()).toBe(secureFile.name);
expect(findCell(0, 1).find(TimeAgoTooltip).props('time')).toBe(secureFile.created_at);
});
+
+ describe('event tracking', () => {
+ it('sends tracking information on list load', () => {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'render_secure_files_list', {});
+ });
+
+ it('sends tracking information on file upload', async () => {
+ Api.uploadProjectSecureFile = jest.fn().mockResolvedValue();
+ Object.defineProperty(findUploadInput().element, 'files', { value: [{}] });
+ findUploadInput().trigger('change');
+
+ await waitForPromises();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'upload_secure_file', {});
+ });
+
+ it('sends tracking information on file deletion', async () => {
+ Api.deleteProjectSecureFile = jest.fn().mockResolvedValue();
+ findDeleteModal().vm.$emit('ok');
+ await waitForPromises();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'delete_secure_file', {});
+ });
+ });
});
describe('when no secure files exist in a project', () => {
@@ -90,14 +121,14 @@ describe('SecureFilesList', () => {
});
it('displays a table with expected headers', () => {
- const headers = ['Filename', 'Uploaded'];
+ const headers = ['File name', 'Uploaded date'];
headers.forEach((header, i) => {
expect(findHeaderAt(i).text()).toBe(header);
});
});
it('displays a table with a no records message', () => {
- expect(findCell(0, 0).text()).toBe('There are no records to show');
+ expect(findCell(0, 0).text()).toBe('There are no secure files yet.');
});
});
diff --git a/spec/frontend/clusters_list/components/agent_table_spec.js b/spec/frontend/clusters_list/components/agent_table_spec.js
index 2a43b45a2f5..b78f0a3686c 100644
--- a/spec/frontend/clusters_list/components/agent_table_spec.js
+++ b/spec/frontend/clusters_list/components/agent_table_spec.js
@@ -70,10 +70,10 @@ describe('AgentTable', () => {
});
it.each`
- status | iconName | lineNumber
- ${'Never connected'} | ${'status-neutral'} | ${0}
- ${'Connected'} | ${'status-success'} | ${1}
- ${'Not connected'} | ${'severity-critical'} | ${2}
+ status | iconName | lineNumber
+ ${'Never connected'} | ${'status-neutral'} | ${0}
+ ${'Connected'} | ${'status-success'} | ${1}
+ ${'Not connected'} | ${'status-alert'} | ${2}
`(
'displays agent connection status as "$status" at line $lineNumber',
({ status, iconName, lineNumber }) => {
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
index c150a7f05d0..5c7635c1617 100644
--- a/spec/frontend/clusters_list/components/clusters_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -103,11 +103,9 @@ describe('Clusters', () => {
});
describe('when is loaded as a child component', () => {
- beforeEach(() => {
+ it("shouldn't render pagination buttons", () => {
createWrapper({ limit: 6 });
- });
- it("shouldn't render pagination buttons", () => {
expect(findPaginatedButtons().exists()).toBe(false);
});
});
diff --git a/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js b/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js
index 646d068e795..154035a46ed 100644
--- a/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js
+++ b/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js
@@ -44,6 +44,12 @@ describe('content_editor/components/bubble_menus/code_block', () => {
});
};
+ const preTag = ({ language, content = 'test' } = {}) => {
+ const languageAttr = language ? ` lang="${language}"` : '';
+
+ return `<pre class="js-syntax-highlight"${languageAttr}>${content}</pre>`;
+ };
+
const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
const findDropdownItemsData = () =>
findDropdownItems().wrappers.map((x) => ({
@@ -62,7 +68,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
});
it('renders bubble menu component', async () => {
- tiptapEditor.commands.insertContent('<pre>test</pre>');
+ tiptapEditor.commands.insertContent(preTag());
bubbleMenu = wrapper.findComponent(BubbleMenu);
await emitEditorEvent({ event: 'transaction', tiptapEditor });
@@ -72,7 +78,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
});
it('selects plaintext language by default', async () => {
- tiptapEditor.commands.insertContent('<pre>test</pre>');
+ tiptapEditor.commands.insertContent(preTag());
bubbleMenu = wrapper.findComponent(BubbleMenu);
await emitEditorEvent({ event: 'transaction', tiptapEditor });
@@ -81,7 +87,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
});
it('selects appropriate language based on the code block', async () => {
- tiptapEditor.commands.insertContent('<pre lang="javascript">var a = 2;</pre>');
+ tiptapEditor.commands.insertContent(preTag({ language: 'javascript' }));
bubbleMenu = wrapper.findComponent(BubbleMenu);
await emitEditorEvent({ event: 'transaction', tiptapEditor });
@@ -90,7 +96,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
});
it('selects diagram sytnax for mermaid', async () => {
- tiptapEditor.commands.insertContent('<pre lang="mermaid">test</pre>');
+ tiptapEditor.commands.insertContent(preTag({ language: 'mermaid' }));
bubbleMenu = wrapper.findComponent(BubbleMenu);
await emitEditorEvent({ event: 'transaction', tiptapEditor });
@@ -99,7 +105,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
});
it("selects Custom (syntax) if the language doesn't exist in the list", async () => {
- tiptapEditor.commands.insertContent('<pre lang="nomnoml">test</pre>');
+ tiptapEditor.commands.insertContent(preTag({ language: 'nomnoml' }));
bubbleMenu = wrapper.findComponent(BubbleMenu);
await emitEditorEvent({ event: 'transaction', tiptapEditor });
@@ -109,19 +115,20 @@ describe('content_editor/components/bubble_menus/code_block', () => {
describe('copy button', () => {
it('copies the text of the code block', async () => {
+ const content = 'var a = Math.PI / 2;';
jest.spyOn(navigator.clipboard, 'writeText');
- tiptapEditor.commands.insertContent('<pre lang="javascript">var a = Math.PI / 2;</pre>');
+ tiptapEditor.commands.insertContent(preTag({ language: 'javascript', content }));
await wrapper.findByTestId('copy-code-block').vm.$emit('click');
- expect(navigator.clipboard.writeText).toHaveBeenCalledWith('var a = Math.PI / 2;');
+ expect(navigator.clipboard.writeText).toHaveBeenCalledWith(content);
});
});
describe('delete button', () => {
it('deletes the code block', async () => {
- tiptapEditor.commands.insertContent('<pre lang="javascript">var a = 2;</pre>');
+ tiptapEditor.commands.insertContent(preTag({ language: 'javascript' }));
await wrapper.findByTestId('delete-code-block').vm.$emit('click');
@@ -164,7 +171,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
describe('when opened and search is changed', () => {
beforeEach(async () => {
- tiptapEditor.commands.insertContent('<pre lang="javascript">var a = 2;</pre>');
+ tiptapEditor.commands.insertContent(preTag({ language: 'javascript' }));
wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'js');
diff --git a/spec/frontend/content_editor/components/bubble_menus/formatting_spec.js b/spec/frontend/content_editor/components/bubble_menus/formatting_spec.js
index 6479c0ba008..1e2f58d9e40 100644
--- a/spec/frontend/content_editor/components/bubble_menus/formatting_spec.js
+++ b/spec/frontend/content_editor/components/bubble_menus/formatting_spec.js
@@ -46,12 +46,14 @@ describe('content_editor/components/bubble_menus/formatting', () => {
});
describe.each`
- testId | controlProps
- ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold', size: 'medium', category: 'tertiary' }}
- ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic', size: 'medium', category: 'tertiary' }}
- ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike', size: 'medium', category: 'tertiary' }}
- ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode', size: 'medium', category: 'tertiary' }}
- ${'link'} | ${{ contentType: 'link', iconName: 'link', label: 'Insert link', editorCommand: 'toggleLink', editorCommandParams: { href: '' }, size: 'medium', category: 'tertiary' }}
+ testId | controlProps
+ ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }}
+ ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }}
+ ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike' }}
+ ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
+ ${'superscript'} | ${{ contentType: 'superscript', iconName: 'superscript', label: 'Superscript', editorCommand: 'toggleSuperscript' }}
+ ${'subscript'} | ${{ contentType: 'subscript', iconName: 'subscript', label: 'Subscript', editorCommand: 'toggleSubscript' }}
+ ${'link'} | ${{ contentType: 'link', iconName: 'link', label: 'Insert link', editorCommand: 'toggleLink', editorCommandParams: { href: '' } }}
`('given a $testId toolbar control', ({ testId, controlProps }) => {
beforeEach(() => {
buildWrapper();
@@ -60,9 +62,13 @@ describe('content_editor/components/bubble_menus/formatting', () => {
it('renders the toolbar control with the provided properties', () => {
expect(wrapper.findByTestId(testId).exists()).toBe(true);
- Object.keys(controlProps).forEach((propName) => {
- expect(wrapper.findByTestId(testId).props(propName)).toEqual(controlProps[propName]);
- });
+ expect(wrapper.findByTestId(testId).props()).toEqual(
+ expect.objectContaining({
+ ...controlProps,
+ size: 'medium',
+ category: 'tertiary',
+ }),
+ );
});
it('tracks the execution of toolbar controls', () => {
diff --git a/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
index 0334a18c9a1..351fd967719 100644
--- a/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
@@ -2,22 +2,26 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarMoreDropdown from '~/content_editor/components/toolbar_more_dropdown.vue';
import Diagram from '~/content_editor/extensions/diagram';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
-import { createTestEditor, mockChainedCommands } from '../test_utils';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils';
describe('content_editor/components/toolbar_more_dropdown', () => {
let wrapper;
let tiptapEditor;
+ let eventHub;
const buildEditor = () => {
tiptapEditor = createTestEditor({
extensions: [Diagram, HorizontalRule],
});
+ eventHub = eventHubFactory();
};
const buildWrapper = (propsData = {}) => {
wrapper = mountExtended(ToolbarMoreDropdown, {
provide: {
tiptapEditor,
+ eventHub,
},
propsData,
});
@@ -33,19 +37,30 @@ describe('content_editor/components/toolbar_more_dropdown', () => {
});
describe.each`
- label | contentType | data
- ${'Mermaid diagram'} | ${'diagram'} | ${{ language: 'mermaid' }}
- ${'PlantUML diagram'} | ${'diagram'} | ${{ language: 'plantuml' }}
- ${'Horizontal rule'} | ${'horizontalRule'} | ${undefined}
- `('when option $label is clicked', ({ label, contentType, data }) => {
- it(`inserts a ${contentType}`, async () => {
- const commands = mockChainedCommands(tiptapEditor, ['setNode', 'focus', 'run']);
+ name | contentType | command | params
+ ${'Code block'} | ${'codeBlock'} | ${'setNode'} | ${['codeBlock']}
+ ${'Details block'} | ${'details'} | ${'toggleList'} | ${['details', 'detailsContent']}
+ ${'Bullet list'} | ${'bulletList'} | ${'toggleList'} | ${['bulletList', 'listItem']}
+ ${'Ordered list'} | ${'orderedList'} | ${'toggleList'} | ${['orderedList', 'listItem']}
+ ${'Task list'} | ${'taskList'} | ${'toggleList'} | ${['taskList', 'taskItem']}
+ ${'Mermaid diagram'} | ${'diagram'} | ${'setNode'} | ${['diagram', { language: 'mermaid' }]}
+ ${'PlantUML diagram'} | ${'diagram'} | ${'setNode'} | ${['diagram', { language: 'plantuml' }]}
+ ${'Horizontal rule'} | ${'horizontalRule'} | ${'setHorizontalRule'} | ${[]}
+ `('when option $label is clicked', ({ name, command, contentType, params }) => {
+ let commands;
+ let btn;
+
+ beforeEach(async () => {
+ commands = mockChainedCommands(tiptapEditor, [command, 'focus', 'run']);
+ btn = wrapper.findByRole('menuitem', { name });
+ });
- const btn = wrapper.findByRole('menuitem', { name: label });
+ it(`inserts a ${contentType}`, async () => {
await btn.trigger('click');
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
expect(commands.focus).toHaveBeenCalled();
- expect(commands.setNode).toHaveBeenCalledWith(contentType, data);
+ expect(commands[command]).toHaveBeenCalledWith(...params);
expect(commands.run).toHaveBeenCalled();
expect(wrapper.emitted('execute')).toEqual([[{ contentType }]]);
diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js
index d98a9a52aff..2acb6e14ce0 100644
--- a/spec/frontend/content_editor/components/top_toolbar_spec.js
+++ b/spec/frontend/content_editor/components/top_toolbar_spec.js
@@ -24,17 +24,15 @@ describe('content_editor/components/top_toolbar', () => {
describe.each`
testId | controlProps
+ ${'text-styles'} | ${{}}
${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }}
${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }}
- ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike' }}
- ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }}
+ ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
+ ${'link'} | ${{}}
${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
- ${'details'} | ${{ contentType: 'details', iconName: 'details-block', label: 'Add a collapsible section', editorCommand: 'toggleDetails' }}
- ${'code-block'} | ${{ contentType: 'codeBlock', iconName: 'doc-code', label: 'Insert a code block', editorCommand: 'toggleCodeBlock' }}
- ${'text-styles'} | ${{}}
- ${'link'} | ${{}}
+ ${'task-list'} | ${{ contentType: 'taskList', iconName: 'list-task', label: 'Add a task list', editorCommand: 'toggleTaskList' }}
${'image'} | ${{}}
${'table'} | ${{}}
${'more'} | ${{}}
diff --git a/spec/frontend/content_editor/extensions/html_nodes_spec.js b/spec/frontend/content_editor/extensions/html_nodes_spec.js
new file mode 100644
index 00000000000..24c68239025
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/html_nodes_spec.js
@@ -0,0 +1,42 @@
+import HTMLNodes from '~/content_editor/extensions/html_nodes';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/html_nodes', () => {
+ let tiptapEditor;
+ let doc;
+ let div;
+ let pre;
+ let p;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [...HTMLNodes] });
+
+ ({
+ builders: { doc, p, pre, div },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ ...HTMLNodes.reduce(
+ (builders, htmlNode) => ({
+ ...builders,
+ [htmlNode.name]: { nodeType: htmlNode.name },
+ }),
+ {},
+ ),
+ },
+ }));
+ });
+
+ it.each`
+ input | insertedNodes
+ ${'<div><p>foo</p></div>'} | ${() => div(p('foo'))}
+ ${'<pre><p>foo</p></pre>'} | ${() => pre(p('foo'))}
+ `('parses and creates nodes for $input', ({ input, insertedNodes }) => {
+ const expectedDoc = doc(insertedNodes());
+
+ tiptapEditor.commands.setContent(input);
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ expect(tiptapEditor.getHTML()).toEqual(input);
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/paste_markdown_spec.js b/spec/frontend/content_editor/extensions/paste_markdown_spec.js
index 5d46c2c0650..53efda6aee2 100644
--- a/spec/frontend/content_editor/extensions/paste_markdown_spec.js
+++ b/spec/frontend/content_editor/extensions/paste_markdown_spec.js
@@ -14,7 +14,7 @@ import {
import waitForPromises from 'helpers/wait_for_promises';
import { createTestEditor, createDocBuilder, waitUntilNextDocTransaction } from '../test_utils';
-const CODE_BLOCK_HTML = '<pre lang="javascript">var a = 2;</pre>';
+const CODE_BLOCK_HTML = '<pre class="js-syntax-highlight" lang="javascript">var a = 2;</pre>';
const DIAGRAM_HTML =
'<img data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,WzxmcmFtZT5EZWNvcmF0b3IgcGF0dGVybl0=">';
const FRONTMATTER_HTML = '<pre lang="yaml" data-lang-params="frontmatter">key: value</pre>';
diff --git a/spec/frontend/content_editor/markdown_snapshot_spec.js b/spec/frontend/content_editor/markdown_snapshot_spec.js
new file mode 100644
index 00000000000..63ca66172e6
--- /dev/null
+++ b/spec/frontend/content_editor/markdown_snapshot_spec.js
@@ -0,0 +1,10 @@
+import path from 'path';
+import { describeMarkdownSnapshots } from 'jest/content_editor/markdown_snapshot_spec_helper';
+
+jest.mock('~/emoji');
+
+const glfmSpecificationDir = path.join(__dirname, '..', '..', '..', 'glfm_specification');
+
+// See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
+// for documentation on this spec.
+describeMarkdownSnapshots('CE markdown snapshots in ContentEditor', glfmSpecificationDir);
diff --git a/spec/frontend/content_editor/markdown_snapshot_spec_helper.js b/spec/frontend/content_editor/markdown_snapshot_spec_helper.js
new file mode 100644
index 00000000000..05fa8e6a6b2
--- /dev/null
+++ b/spec/frontend/content_editor/markdown_snapshot_spec_helper.js
@@ -0,0 +1,102 @@
+// See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
+// for documentation on this spec.
+
+import fs from 'fs';
+import path from 'path';
+import jsYaml from 'js-yaml';
+import { pick } from 'lodash';
+import {
+ IMPLEMENTATION_ERROR_MSG,
+ renderHtmlAndJsonForAllExamples,
+} from './render_html_and_json_for_all_examples';
+
+const filterExamples = (examples) => {
+ const focusedMarkdownExamples = process.env.FOCUSED_MARKDOWN_EXAMPLES?.split(',') || [];
+ if (!focusedMarkdownExamples.length) {
+ return examples;
+ }
+ return pick(examples, focusedMarkdownExamples);
+};
+
+const loadExamples = (dir, fileName) => {
+ const yaml = fs.readFileSync(path.join(dir, fileName));
+ const examples = jsYaml.safeLoad(yaml, {});
+ return filterExamples(examples);
+};
+
+// eslint-disable-next-line jest/no-export
+export const describeMarkdownSnapshots = (description, glfmSpecificationDir) => {
+ let actualHtmlAndJsonExamples;
+ let skipRunningSnapshotWysiwygHtmlTests;
+ let skipRunningSnapshotProsemirrorJsonTests;
+
+ const exampleStatuses = loadExamples(
+ path.join(glfmSpecificationDir, 'input', 'gitlab_flavored_markdown'),
+ 'glfm_example_status.yml',
+ );
+ const glfmExampleSnapshotsDir = path.join(glfmSpecificationDir, 'example_snapshots');
+ const markdownExamples = loadExamples(glfmExampleSnapshotsDir, 'markdown.yml');
+ const expectedHtmlExamples = loadExamples(glfmExampleSnapshotsDir, 'html.yml');
+ const expectedProseMirrorJsonExamples = loadExamples(
+ glfmExampleSnapshotsDir,
+ 'prosemirror_json.yml',
+ );
+
+ beforeAll(async () => {
+ return renderHtmlAndJsonForAllExamples(markdownExamples).then((examples) => {
+ actualHtmlAndJsonExamples = examples;
+ });
+ });
+
+ describe(description, () => {
+ const exampleNames = Object.keys(markdownExamples);
+
+ describe.each(exampleNames)('%s', (name) => {
+ const exampleNamePrefix = 'verifies conversion of GLFM to';
+ skipRunningSnapshotWysiwygHtmlTests =
+ exampleStatuses[name]?.skip_running_snapshot_wysiwyg_html_tests;
+ skipRunningSnapshotProsemirrorJsonTests =
+ exampleStatuses[name]?.skip_running_snapshot_prosemirror_json_tests;
+
+ const markdown = markdownExamples[name];
+
+ if (skipRunningSnapshotWysiwygHtmlTests) {
+ it.todo(`${exampleNamePrefix} HTML: ${skipRunningSnapshotWysiwygHtmlTests}`);
+ } else {
+ it(`${exampleNamePrefix} HTML`, async () => {
+ const expectedHtml = expectedHtmlExamples[name].wysiwyg;
+ const { html: actualHtml } = actualHtmlAndJsonExamples[name];
+
+ // noinspection JSUnresolvedFunction (required to avoid RubyMine type inspection warning, because custom matchers auto-imported via Jest test setup are not automatically resolved - see https://youtrack.jetbrains.com/issue/WEB-42350/matcher-for-jest-is-not-recognized-but-it-is-runable)
+ expect(actualHtml).toMatchExpectedForMarkdown(
+ 'HTML',
+ name,
+ markdown,
+ IMPLEMENTATION_ERROR_MSG,
+ expectedHtml,
+ );
+ });
+ }
+
+ if (skipRunningSnapshotProsemirrorJsonTests) {
+ it.todo(
+ `${exampleNamePrefix} ProseMirror JSON: ${skipRunningSnapshotProsemirrorJsonTests}`,
+ );
+ } else {
+ it(`${exampleNamePrefix} ProseMirror JSON`, async () => {
+ const expectedJson = expectedProseMirrorJsonExamples[name];
+ const { json: actualJson } = actualHtmlAndJsonExamples[name];
+
+ // noinspection JSUnresolvedFunction
+ expect(actualJson).toMatchExpectedForMarkdown(
+ 'JSON',
+ name,
+ markdown,
+ IMPLEMENTATION_ERROR_MSG,
+ expectedJson,
+ );
+ });
+ }
+ });
+ });
+};
diff --git a/spec/frontend/content_editor/remark_markdown_processing_spec.js b/spec/frontend/content_editor/remark_markdown_processing_spec.js
index 60dc540e192..48adceaab58 100644
--- a/spec/frontend/content_editor/remark_markdown_processing_spec.js
+++ b/spec/frontend/content_editor/remark_markdown_processing_spec.js
@@ -6,6 +6,7 @@ import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight
import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
import FootnoteReference from '~/content_editor/extensions/footnote_reference';
import HardBreak from '~/content_editor/extensions/hard_break';
+import HTMLNodes from '~/content_editor/extensions/html_nodes';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
import Image from '~/content_editor/extensions/image';
@@ -52,6 +53,7 @@ const tiptapEditor = createTestEditor({
TableCell,
TaskList,
TaskItem,
+ ...HTMLNodes,
],
});
@@ -64,6 +66,7 @@ const {
bulletList,
code,
codeBlock,
+ div,
footnoteDefinition,
footnoteReference,
hardBreak,
@@ -74,6 +77,7 @@ const {
link,
listItem,
orderedList,
+ pre,
strike,
table,
tableRow,
@@ -108,14 +112,21 @@ const {
tableRow: { nodeType: TableRow.name },
taskItem: { nodeType: TaskItem.name },
taskList: { nodeType: TaskList.name },
+ ...HTMLNodes.reduce(
+ (builders, htmlNode) => ({
+ ...builders,
+ [htmlNode.name]: { nodeType: htmlNode.name },
+ }),
+ {},
+ ),
},
});
describe('Client side Markdown processing', () => {
- const deserialize = async (content) => {
+ const deserialize = async (markdown) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
schema: tiptapEditor.schema,
- content,
+ markdown,
});
return document;
@@ -127,8 +138,8 @@ describe('Client side Markdown processing', () => {
pristineDoc: document,
});
- const sourceAttrs = (sourceMapKey, sourceMarkdown) => ({
- sourceMapKey,
+ const source = (sourceMarkdown) => ({
+ sourceMapKey: expect.any(String),
sourceMarkdown,
});
@@ -136,63 +147,48 @@ describe('Client side Markdown processing', () => {
{
markdown: '__bold text__',
expectedDoc: doc(
- paragraph(
- sourceAttrs('0:13', '__bold text__'),
- bold(sourceAttrs('0:13', '__bold text__'), 'bold text'),
- ),
+ paragraph(source('__bold text__'), bold(source('__bold text__'), 'bold text')),
),
},
{
markdown: '**bold text**',
expectedDoc: doc(
- paragraph(
- sourceAttrs('0:13', '**bold text**'),
- bold(sourceAttrs('0:13', '**bold text**'), 'bold text'),
- ),
+ paragraph(source('**bold text**'), bold(source('**bold text**'), 'bold text')),
),
},
{
markdown: '<strong>bold text</strong>',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:26', '<strong>bold text</strong>'),
- bold(sourceAttrs('0:26', '<strong>bold text</strong>'), 'bold text'),
+ source('<strong>bold text</strong>'),
+ bold(source('<strong>bold text</strong>'), 'bold text'),
),
),
},
{
markdown: '<b>bold text</b>',
expectedDoc: doc(
- paragraph(
- sourceAttrs('0:16', '<b>bold text</b>'),
- bold(sourceAttrs('0:16', '<b>bold text</b>'), 'bold text'),
- ),
+ paragraph(source('<b>bold text</b>'), bold(source('<b>bold text</b>'), 'bold text')),
),
},
{
markdown: '_italic text_',
expectedDoc: doc(
- paragraph(
- sourceAttrs('0:13', '_italic text_'),
- italic(sourceAttrs('0:13', '_italic text_'), 'italic text'),
- ),
+ paragraph(source('_italic text_'), italic(source('_italic text_'), 'italic text')),
),
},
{
markdown: '*italic text*',
expectedDoc: doc(
- paragraph(
- sourceAttrs('0:13', '*italic text*'),
- italic(sourceAttrs('0:13', '*italic text*'), 'italic text'),
- ),
+ paragraph(source('*italic text*'), italic(source('*italic text*'), 'italic text')),
),
},
{
markdown: '<em>italic text</em>',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:20', '<em>italic text</em>'),
- italic(sourceAttrs('0:20', '<em>italic text</em>'), 'italic text'),
+ source('<em>italic text</em>'),
+ italic(source('<em>italic text</em>'), 'italic text'),
),
),
},
@@ -200,28 +196,25 @@ describe('Client side Markdown processing', () => {
markdown: '<i>italic text</i>',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:18', '<i>italic text</i>'),
- italic(sourceAttrs('0:18', '<i>italic text</i>'), 'italic text'),
+ source('<i>italic text</i>'),
+ italic(source('<i>italic text</i>'), 'italic text'),
),
),
},
{
markdown: '`inline code`',
expectedDoc: doc(
- paragraph(
- sourceAttrs('0:13', '`inline code`'),
- code(sourceAttrs('0:13', '`inline code`'), 'inline code'),
- ),
+ paragraph(source('`inline code`'), code(source('`inline code`'), 'inline code')),
),
},
{
markdown: '**`inline code bold`**',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:22', '**`inline code bold`**'),
+ source('**`inline code bold`**'),
bold(
- sourceAttrs('0:22', '**`inline code bold`**'),
- code(sourceAttrs('2:20', '`inline code bold`'), 'inline code bold'),
+ source('**`inline code bold`**'),
+ code(source('`inline code bold`'), 'inline code bold'),
),
),
),
@@ -230,10 +223,10 @@ describe('Client side Markdown processing', () => {
markdown: '_`inline code italics`_',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:23', '_`inline code italics`_'),
+ source('_`inline code italics`_'),
italic(
- sourceAttrs('0:23', '_`inline code italics`_'),
- code(sourceAttrs('1:22', '`inline code italics`'), 'inline code italics'),
+ source('_`inline code italics`_'),
+ code(source('`inline code italics`'), 'inline code italics'),
),
),
),
@@ -246,8 +239,8 @@ describe('Client side Markdown processing', () => {
`,
expectedDoc: doc(
paragraph(
- sourceAttrs('0:28', '<i class="foo">\n *bar*\n</i>'),
- italic(sourceAttrs('0:28', '<i class="foo">\n *bar*\n</i>'), '\n *bar*\n'),
+ source('<i class="foo">\n *bar*\n</i>'),
+ italic(source('<i class="foo">\n *bar*\n</i>'), '\n *bar*\n'),
),
),
},
@@ -259,8 +252,8 @@ describe('Client side Markdown processing', () => {
`,
expectedDoc: doc(
paragraph(
- sourceAttrs('0:27', '<img src="bar" alt="foo" />'),
- image({ ...sourceAttrs('0:27', '<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
+ source('<img src="bar" alt="foo" />'),
+ image({ ...source('<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
),
),
},
@@ -273,15 +266,12 @@ describe('Client side Markdown processing', () => {
`,
expectedDoc: doc(
bulletList(
- sourceAttrs('0:13', '- List item 1'),
- listItem(
- sourceAttrs('0:13', '- List item 1'),
- paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
- ),
+ source('- List item 1'),
+ listItem(source('- List item 1'), paragraph(source('List item 1'), 'List item 1')),
),
paragraph(
- sourceAttrs('15:42', '<img src="bar" alt="foo" />'),
- image({ ...sourceAttrs('15:42', '<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
+ source('<img src="bar" alt="foo" />'),
+ image({ ...source('<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
),
),
},
@@ -289,10 +279,10 @@ describe('Client side Markdown processing', () => {
markdown: '[GitLab](https://gitlab.com "Go to GitLab")',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:43', '[GitLab](https://gitlab.com "Go to GitLab")'),
+ source('[GitLab](https://gitlab.com "Go to GitLab")'),
link(
{
- ...sourceAttrs('0:43', '[GitLab](https://gitlab.com "Go to GitLab")'),
+ ...source('[GitLab](https://gitlab.com "Go to GitLab")'),
href: 'https://gitlab.com',
title: 'Go to GitLab',
},
@@ -305,12 +295,12 @@ describe('Client side Markdown processing', () => {
markdown: '**[GitLab](https://gitlab.com "Go to GitLab")**',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:47', '**[GitLab](https://gitlab.com "Go to GitLab")**'),
+ source('**[GitLab](https://gitlab.com "Go to GitLab")**'),
bold(
- sourceAttrs('0:47', '**[GitLab](https://gitlab.com "Go to GitLab")**'),
+ source('**[GitLab](https://gitlab.com "Go to GitLab")**'),
link(
{
- ...sourceAttrs('2:45', '[GitLab](https://gitlab.com "Go to GitLab")'),
+ ...source('[GitLab](https://gitlab.com "Go to GitLab")'),
href: 'https://gitlab.com',
title: 'Go to GitLab',
},
@@ -324,10 +314,10 @@ describe('Client side Markdown processing', () => {
markdown: 'www.commonmark.org',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:18', 'www.commonmark.org'),
+ source('www.commonmark.org'),
link(
{
- ...sourceAttrs('0:18', 'www.commonmark.org'),
+ ...source('www.commonmark.org'),
href: 'http://www.commonmark.org',
},
'www.commonmark.org',
@@ -339,11 +329,11 @@ describe('Client side Markdown processing', () => {
markdown: 'Visit www.commonmark.org/help for more information.',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:51', 'Visit www.commonmark.org/help for more information.'),
+ source('Visit www.commonmark.org/help for more information.'),
'Visit ',
link(
{
- ...sourceAttrs('6:29', 'www.commonmark.org/help'),
+ ...source('www.commonmark.org/help'),
href: 'http://www.commonmark.org/help',
},
'www.commonmark.org/help',
@@ -356,11 +346,11 @@ describe('Client side Markdown processing', () => {
markdown: 'hello@mail+xyz.example isn’t valid, but hello+xyz@mail.example is.',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:66', 'hello@mail+xyz.example isn’t valid, but hello+xyz@mail.example is.'),
+ source('hello@mail+xyz.example isn’t valid, but hello+xyz@mail.example is.'),
'hello@mail+xyz.example isn’t valid, but ',
link(
{
- ...sourceAttrs('40:62', 'hello+xyz@mail.example'),
+ ...source('hello+xyz@mail.example'),
href: 'mailto:hello+xyz@mail.example',
},
'hello+xyz@mail.example',
@@ -373,11 +363,12 @@ describe('Client side Markdown processing', () => {
markdown: '[https://gitlab.com>',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:20', '[https://gitlab.com>'),
+ source('[https://gitlab.com>'),
'[',
link(
{
- ...sourceAttrs(),
+ sourceMapKey: null,
+ sourceMarkdown: null,
href: 'https://gitlab.com',
},
'https://gitlab.com',
@@ -392,9 +383,9 @@ This is a paragraph with a\\
hard line break`,
expectedDoc: doc(
paragraph(
- sourceAttrs('0:43', 'This is a paragraph with a\\\nhard line break'),
+ source('This is a paragraph with a\\\nhard line break'),
'This is a paragraph with a',
- hardBreak(sourceAttrs('26:28', '\\\n')),
+ hardBreak(source('\\\n')),
'\nhard line break',
),
),
@@ -403,9 +394,9 @@ hard line break`,
markdown: '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:57', '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
+ source('![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
image({
- ...sourceAttrs('0:57', '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
+ ...source('![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
alt: 'GitLab Logo',
src: 'https://gitlab.com/logo.png',
title: 'GitLab Logo',
@@ -415,49 +406,43 @@ hard line break`,
},
{
markdown: '---',
- expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '---'))),
+ expectedDoc: doc(horizontalRule(source('---'))),
},
{
markdown: '***',
- expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '***'))),
+ expectedDoc: doc(horizontalRule(source('***'))),
},
{
markdown: '___',
- expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '___'))),
+ expectedDoc: doc(horizontalRule(source('___'))),
},
{
markdown: '<hr>',
- expectedDoc: doc(horizontalRule(sourceAttrs('0:4', '<hr>'))),
+ expectedDoc: doc(horizontalRule(source('<hr>'))),
},
{
markdown: '# Heading 1',
- expectedDoc: doc(heading({ ...sourceAttrs('0:11', '# Heading 1'), level: 1 }, 'Heading 1')),
+ expectedDoc: doc(heading({ ...source('# Heading 1'), level: 1 }, 'Heading 1')),
},
{
markdown: '## Heading 2',
- expectedDoc: doc(heading({ ...sourceAttrs('0:12', '## Heading 2'), level: 2 }, 'Heading 2')),
+ expectedDoc: doc(heading({ ...source('## Heading 2'), level: 2 }, 'Heading 2')),
},
{
markdown: '### Heading 3',
- expectedDoc: doc(heading({ ...sourceAttrs('0:13', '### Heading 3'), level: 3 }, 'Heading 3')),
+ expectedDoc: doc(heading({ ...source('### Heading 3'), level: 3 }, 'Heading 3')),
},
{
markdown: '#### Heading 4',
- expectedDoc: doc(
- heading({ ...sourceAttrs('0:14', '#### Heading 4'), level: 4 }, 'Heading 4'),
- ),
+ expectedDoc: doc(heading({ ...source('#### Heading 4'), level: 4 }, 'Heading 4')),
},
{
markdown: '##### Heading 5',
- expectedDoc: doc(
- heading({ ...sourceAttrs('0:15', '##### Heading 5'), level: 5 }, 'Heading 5'),
- ),
+ expectedDoc: doc(heading({ ...source('##### Heading 5'), level: 5 }, 'Heading 5')),
},
{
markdown: '###### Heading 6',
- expectedDoc: doc(
- heading({ ...sourceAttrs('0:16', '###### Heading 6'), level: 6 }, 'Heading 6'),
- ),
+ expectedDoc: doc(heading({ ...source('###### Heading 6'), level: 6 }, 'Heading 6')),
},
{
markdown: `
@@ -465,9 +450,7 @@ Heading
one
======
`,
- expectedDoc: doc(
- heading({ ...sourceAttrs('0:18', 'Heading\none\n======'), level: 1 }, 'Heading\none'),
- ),
+ expectedDoc: doc(heading({ ...source('Heading\none\n======'), level: 1 }, 'Heading\none')),
},
{
markdown: `
@@ -475,9 +458,7 @@ Heading
two
-------
`,
- expectedDoc: doc(
- heading({ ...sourceAttrs('0:19', 'Heading\ntwo\n-------'), level: 2 }, 'Heading\ntwo'),
- ),
+ expectedDoc: doc(heading({ ...source('Heading\ntwo\n-------'), level: 2 }, 'Heading\ntwo')),
},
{
markdown: `
@@ -486,15 +467,9 @@ two
`,
expectedDoc: doc(
bulletList(
- sourceAttrs('0:27', '- List item 1\n- List item 2'),
- listItem(
- sourceAttrs('0:13', '- List item 1'),
- paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
- ),
- listItem(
- sourceAttrs('14:27', '- List item 2'),
- paragraph(sourceAttrs('16:27', 'List item 2'), 'List item 2'),
- ),
+ source('- List item 1\n- List item 2'),
+ listItem(source('- List item 1'), paragraph(source('List item 1'), 'List item 1')),
+ listItem(source('- List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
},
@@ -505,15 +480,9 @@ two
`,
expectedDoc: doc(
bulletList(
- sourceAttrs('0:27', '* List item 1\n* List item 2'),
- listItem(
- sourceAttrs('0:13', '* List item 1'),
- paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
- ),
- listItem(
- sourceAttrs('14:27', '* List item 2'),
- paragraph(sourceAttrs('16:27', 'List item 2'), 'List item 2'),
- ),
+ source('* List item 1\n* List item 2'),
+ listItem(source('* List item 1'), paragraph(source('List item 1'), 'List item 1')),
+ listItem(source('* List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
},
@@ -524,15 +493,9 @@ two
`,
expectedDoc: doc(
bulletList(
- sourceAttrs('0:27', '+ List item 1\n+ List item 2'),
- listItem(
- sourceAttrs('0:13', '+ List item 1'),
- paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
- ),
- listItem(
- sourceAttrs('14:27', '+ List item 2'),
- paragraph(sourceAttrs('16:27', 'List item 2'), 'List item 2'),
- ),
+ source('+ List item 1\n+ List item 2'),
+ listItem(source('+ List item 1'), paragraph(source('List item 1'), 'List item 1')),
+ listItem(source('+ List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
},
@@ -543,15 +506,9 @@ two
`,
expectedDoc: doc(
orderedList(
- sourceAttrs('0:29', '1. List item 1\n1. List item 2'),
- listItem(
- sourceAttrs('0:14', '1. List item 1'),
- paragraph(sourceAttrs('3:14', 'List item 1'), 'List item 1'),
- ),
- listItem(
- sourceAttrs('15:29', '1. List item 2'),
- paragraph(sourceAttrs('18:29', 'List item 2'), 'List item 2'),
- ),
+ source('1. List item 1\n1. List item 2'),
+ listItem(source('1. List item 1'), paragraph(source('List item 1'), 'List item 1')),
+ listItem(source('1. List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
},
@@ -562,15 +519,9 @@ two
`,
expectedDoc: doc(
orderedList(
- sourceAttrs('0:29', '1. List item 1\n2. List item 2'),
- listItem(
- sourceAttrs('0:14', '1. List item 1'),
- paragraph(sourceAttrs('3:14', 'List item 1'), 'List item 1'),
- ),
- listItem(
- sourceAttrs('15:29', '2. List item 2'),
- paragraph(sourceAttrs('18:29', 'List item 2'), 'List item 2'),
- ),
+ source('1. List item 1\n2. List item 2'),
+ listItem(source('1. List item 1'), paragraph(source('List item 1'), 'List item 1')),
+ listItem(source('2. List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
},
@@ -581,15 +532,9 @@ two
`,
expectedDoc: doc(
orderedList(
- sourceAttrs('0:29', '1) List item 1\n2) List item 2'),
- listItem(
- sourceAttrs('0:14', '1) List item 1'),
- paragraph(sourceAttrs('3:14', 'List item 1'), 'List item 1'),
- ),
- listItem(
- sourceAttrs('15:29', '2) List item 2'),
- paragraph(sourceAttrs('18:29', 'List item 2'), 'List item 2'),
- ),
+ source('1) List item 1\n2) List item 2'),
+ listItem(source('1) List item 1'), paragraph(source('List item 1'), 'List item 1')),
+ listItem(source('2) List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
},
@@ -600,15 +545,15 @@ two
`,
expectedDoc: doc(
bulletList(
- sourceAttrs('0:33', '- List item 1\n - Sub list item 1'),
+ source('- List item 1\n - Sub list item 1'),
listItem(
- sourceAttrs('0:33', '- List item 1\n - Sub list item 1'),
- paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
+ source('- List item 1\n - Sub list item 1'),
+ paragraph(source('List item 1'), 'List item 1'),
bulletList(
- sourceAttrs('16:33', '- Sub list item 1'),
+ source('- Sub list item 1'),
listItem(
- sourceAttrs('16:33', '- Sub list item 1'),
- paragraph(sourceAttrs('18:33', 'Sub list item 1'), 'Sub list item 1'),
+ source('- Sub list item 1'),
+ paragraph(source('Sub list item 1'), 'Sub list item 1'),
),
),
),
@@ -624,19 +569,13 @@ two
`,
expectedDoc: doc(
bulletList(
- sourceAttrs(
- '0:66',
- '- List item 1 paragraph 1\n\n List item 1 paragraph 2\n- List item 2',
- ),
- listItem(
- sourceAttrs('0:52', '- List item 1 paragraph 1\n\n List item 1 paragraph 2'),
- paragraph(sourceAttrs('2:25', 'List item 1 paragraph 1'), 'List item 1 paragraph 1'),
- paragraph(sourceAttrs('29:52', 'List item 1 paragraph 2'), 'List item 1 paragraph 2'),
- ),
+ source('- List item 1 paragraph 1\n\n List item 1 paragraph 2\n- List item 2'),
listItem(
- sourceAttrs('53:66', '- List item 2'),
- paragraph(sourceAttrs('55:66', 'List item 2'), 'List item 2'),
+ source('- List item 1 paragraph 1\n\n List item 1 paragraph 2'),
+ paragraph(source('List item 1 paragraph 1'), 'List item 1 paragraph 1'),
+ paragraph(source('List item 1 paragraph 2'), 'List item 1 paragraph 2'),
),
+ listItem(source('- List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
},
@@ -646,13 +585,13 @@ two
`,
expectedDoc: doc(
bulletList(
- sourceAttrs('0:41', '- List item with an image ![bar](foo.png)'),
+ source('- List item with an image ![bar](foo.png)'),
listItem(
- sourceAttrs('0:41', '- List item with an image ![bar](foo.png)'),
+ source('- List item with an image ![bar](foo.png)'),
paragraph(
- sourceAttrs('2:41', 'List item with an image ![bar](foo.png)'),
+ source('List item with an image ![bar](foo.png)'),
'List item with an image',
- image({ ...sourceAttrs('26:41', '![bar](foo.png)'), alt: 'bar', src: 'foo.png' }),
+ image({ ...source('![bar](foo.png)'), alt: 'bar', src: 'foo.png' }),
),
),
),
@@ -664,8 +603,8 @@ two
`,
expectedDoc: doc(
blockquote(
- sourceAttrs('0:22', '> This is a blockquote'),
- paragraph(sourceAttrs('2:22', 'This is a blockquote'), 'This is a blockquote'),
+ source('> This is a blockquote'),
+ paragraph(source('This is a blockquote'), 'This is a blockquote'),
),
),
},
@@ -676,17 +615,11 @@ two
`,
expectedDoc: doc(
blockquote(
- sourceAttrs('0:31', '> - List item 1\n> - List item 2'),
+ source('> - List item 1\n> - List item 2'),
bulletList(
- sourceAttrs('2:31', '- List item 1\n> - List item 2'),
- listItem(
- sourceAttrs('2:15', '- List item 1'),
- paragraph(sourceAttrs('4:15', 'List item 1'), 'List item 1'),
- ),
- listItem(
- sourceAttrs('18:31', '- List item 2'),
- paragraph(sourceAttrs('20:31', 'List item 2'), 'List item 2'),
- ),
+ source('- List item 1\n> - List item 2'),
+ listItem(source('- List item 1'), paragraph(source('List item 1'), 'List item 1')),
+ listItem(source('- List item 2'), paragraph(source('List item 2'), 'List item 2')),
),
),
),
@@ -699,10 +632,10 @@ code block
`,
expectedDoc: doc(
- paragraph(sourceAttrs('0:10', 'code block'), 'code block'),
+ paragraph(source('code block'), 'code block'),
codeBlock(
{
- ...sourceAttrs('12:42', " const fn = () => 'GitLab';"),
+ ...source(" const fn = () => 'GitLab';"),
class: 'code highlight',
language: null,
},
@@ -719,7 +652,7 @@ const fn = () => 'GitLab';
expectedDoc: doc(
codeBlock(
{
- ...sourceAttrs('0:44', "```javascript\nconst fn = () => 'GitLab';\n```"),
+ ...source("```javascript\nconst fn = () => 'GitLab';\n```"),
class: 'code highlight',
language: 'javascript',
},
@@ -736,7 +669,7 @@ const fn = () => 'GitLab';
expectedDoc: doc(
codeBlock(
{
- ...sourceAttrs('0:44', "~~~javascript\nconst fn = () => 'GitLab';\n~~~"),
+ ...source("~~~javascript\nconst fn = () => 'GitLab';\n~~~"),
class: 'code highlight',
language: 'javascript',
},
@@ -752,7 +685,7 @@ const fn = () => 'GitLab';
expectedDoc: doc(
codeBlock(
{
- ...sourceAttrs('0:7', '```\n```'),
+ ...source('```\n```'),
class: 'code highlight',
language: null,
},
@@ -770,7 +703,7 @@ const fn = () => 'GitLab';
expectedDoc: doc(
codeBlock(
{
- ...sourceAttrs('0:45', "```javascript\nconst fn = () => 'GitLab';\n\n```"),
+ ...source("```javascript\nconst fn = () => 'GitLab';\n\n```"),
class: 'code highlight',
language: 'javascript',
},
@@ -782,8 +715,8 @@ const fn = () => 'GitLab';
markdown: '~~Strikedthrough text~~',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:23', '~~Strikedthrough text~~'),
- strike(sourceAttrs('0:23', '~~Strikedthrough text~~'), 'Strikedthrough text'),
+ source('~~Strikedthrough text~~'),
+ strike(source('~~Strikedthrough text~~'), 'Strikedthrough text'),
),
),
},
@@ -791,8 +724,8 @@ const fn = () => 'GitLab';
markdown: '<del>Strikedthrough text</del>',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:30', '<del>Strikedthrough text</del>'),
- strike(sourceAttrs('0:30', '<del>Strikedthrough text</del>'), 'Strikedthrough text'),
+ source('<del>Strikedthrough text</del>'),
+ strike(source('<del>Strikedthrough text</del>'), 'Strikedthrough text'),
),
),
},
@@ -800,11 +733,8 @@ const fn = () => 'GitLab';
markdown: '<strike>Strikedthrough text</strike>',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:36', '<strike>Strikedthrough text</strike>'),
- strike(
- sourceAttrs('0:36', '<strike>Strikedthrough text</strike>'),
- 'Strikedthrough text',
- ),
+ source('<strike>Strikedthrough text</strike>'),
+ strike(source('<strike>Strikedthrough text</strike>'), 'Strikedthrough text'),
),
),
},
@@ -812,8 +742,8 @@ const fn = () => 'GitLab';
markdown: '<s>Strikedthrough text</s>',
expectedDoc: doc(
paragraph(
- sourceAttrs('0:26', '<s>Strikedthrough text</s>'),
- strike(sourceAttrs('0:26', '<s>Strikedthrough text</s>'), 'Strikedthrough text'),
+ source('<s>Strikedthrough text</s>'),
+ strike(source('<s>Strikedthrough text</s>'), 'Strikedthrough text'),
),
),
},
@@ -826,21 +756,21 @@ const fn = () => 'GitLab';
taskList(
{
numeric: false,
- ...sourceAttrs('0:45', '- [ ] task list item 1\n- [ ] task list item 2'),
+ ...source('- [ ] task list item 1\n- [ ] task list item 2'),
},
taskItem(
{
checked: false,
- ...sourceAttrs('0:22', '- [ ] task list item 1'),
+ ...source('- [ ] task list item 1'),
},
- paragraph(sourceAttrs('6:22', 'task list item 1'), 'task list item 1'),
+ paragraph(source('task list item 1'), 'task list item 1'),
),
taskItem(
{
checked: false,
- ...sourceAttrs('23:45', '- [ ] task list item 2'),
+ ...source('- [ ] task list item 2'),
},
- paragraph(sourceAttrs('29:45', 'task list item 2'), 'task list item 2'),
+ paragraph(source('task list item 2'), 'task list item 2'),
),
),
),
@@ -854,21 +784,21 @@ const fn = () => 'GitLab';
taskList(
{
numeric: false,
- ...sourceAttrs('0:45', '- [x] task list item 1\n- [x] task list item 2'),
+ ...source('- [x] task list item 1\n- [x] task list item 2'),
},
taskItem(
{
checked: true,
- ...sourceAttrs('0:22', '- [x] task list item 1'),
+ ...source('- [x] task list item 1'),
},
- paragraph(sourceAttrs('6:22', 'task list item 1'), 'task list item 1'),
+ paragraph(source('task list item 1'), 'task list item 1'),
),
taskItem(
{
checked: true,
- ...sourceAttrs('23:45', '- [x] task list item 2'),
+ ...source('- [x] task list item 2'),
},
- paragraph(sourceAttrs('29:45', 'task list item 2'), 'task list item 2'),
+ paragraph(source('task list item 2'), 'task list item 2'),
),
),
),
@@ -882,21 +812,21 @@ const fn = () => 'GitLab';
taskList(
{
numeric: true,
- ...sourceAttrs('0:47', '1. [ ] task list item 1\n2. [ ] task list item 2'),
+ ...source('1. [ ] task list item 1\n2. [ ] task list item 2'),
},
taskItem(
{
checked: false,
- ...sourceAttrs('0:23', '1. [ ] task list item 1'),
+ ...source('1. [ ] task list item 1'),
},
- paragraph(sourceAttrs('7:23', 'task list item 1'), 'task list item 1'),
+ paragraph(source('task list item 1'), 'task list item 1'),
),
taskItem(
{
checked: false,
- ...sourceAttrs('24:47', '2. [ ] task list item 2'),
+ ...source('2. [ ] task list item 2'),
},
- paragraph(sourceAttrs('31:47', 'task list item 2'), 'task list item 2'),
+ paragraph(source('task list item 2'), 'task list item 2'),
),
),
),
@@ -909,16 +839,16 @@ const fn = () => 'GitLab';
`,
expectedDoc: doc(
table(
- sourceAttrs('0:29', '| a | b |\n|---|---|\n| c | d |'),
+ source('| a | b |\n|---|---|\n| c | d |'),
tableRow(
- sourceAttrs('0:9', '| a | b |'),
- tableHeader(sourceAttrs('0:5', '| a |'), paragraph(sourceAttrs('2:3', 'a'), 'a')),
- tableHeader(sourceAttrs('5:9', ' b |'), paragraph(sourceAttrs('6:7', 'b'), 'b')),
+ source('| a | b |'),
+ tableHeader(source('| a |'), paragraph(source('a'), 'a')),
+ tableHeader(source(' b |'), paragraph(source('b'), 'b')),
),
tableRow(
- sourceAttrs('20:29', '| c | d |'),
- tableCell(sourceAttrs('20:25', '| c |'), paragraph(sourceAttrs('22:23', 'c'), 'c')),
- tableCell(sourceAttrs('25:29', ' d |'), paragraph(sourceAttrs('26:27', 'd'), 'd')),
+ source('| c | d |'),
+ tableCell(source('| c |'), paragraph(source('c'), 'c')),
+ tableCell(source(' d |'), paragraph(source('d'), 'd')),
),
),
),
@@ -936,30 +866,29 @@ const fn = () => 'GitLab';
`,
expectedDoc: doc(
table(
- sourceAttrs(
- '0:132',
+ source(
'<table>\n <tr>\n <th colspan="2" rowspan="5">Header</th>\n </tr>\n <tr>\n <td colspan="2" rowspan="5">Body</td>\n </tr>\n</table>',
),
tableRow(
- sourceAttrs('10:66', '<tr>\n <th colspan="2" rowspan="5">Header</th>\n </tr>'),
+ source('<tr>\n <th colspan="2" rowspan="5">Header</th>\n </tr>'),
tableHeader(
{
- ...sourceAttrs('19:58', '<th colspan="2" rowspan="5">Header</th>'),
+ ...source('<th colspan="2" rowspan="5">Header</th>'),
colspan: 2,
rowspan: 5,
},
- paragraph(sourceAttrs('47:53', 'Header'), 'Header'),
+ paragraph(source('Header'), 'Header'),
),
),
tableRow(
- sourceAttrs('69:123', '<tr>\n <td colspan="2" rowspan="5">Body</td>\n </tr>'),
+ source('<tr>\n <td colspan="2" rowspan="5">Body</td>\n </tr>'),
tableCell(
{
- ...sourceAttrs('78:115', '<td colspan="2" rowspan="5">Body</td>'),
+ ...source('<td colspan="2" rowspan="5">Body</td>'),
colspan: 2,
rowspan: 5,
},
- paragraph(sourceAttrs('106:110', 'Body'), 'Body'),
+ paragraph(source('Body'), 'Body'),
),
),
),
@@ -977,24 +906,177 @@ Paragraph
`,
expectedDoc: doc(
paragraph(
- sourceAttrs('0:30', 'This is a footnote [^footnote]'),
+ source('This is a footnote [^footnote]'),
'This is a footnote ',
footnoteReference({
- ...sourceAttrs('19:30', '[^footnote]'),
+ ...source('[^footnote]'),
identifier: 'footnote',
label: 'footnote',
}),
),
- paragraph(sourceAttrs('32:41', 'Paragraph'), 'Paragraph'),
+ paragraph(source('Paragraph'), 'Paragraph'),
footnoteDefinition(
{
- ...sourceAttrs('43:75', '[^footnote]: Footnote definition'),
+ ...source('[^footnote]: Footnote definition'),
identifier: 'footnote',
label: 'footnote',
},
- paragraph(sourceAttrs('56:75', 'Footnote definition'), 'Footnote definition'),
+ paragraph(source('Footnote definition'), 'Footnote definition'),
+ ),
+ paragraph(source('Paragraph'), 'Paragraph'),
+ ),
+ },
+ {
+ markdown: `
+<div>div</div>
+`,
+ expectedDoc: doc(div(source('<div>div</div>'), paragraph(source('div'), 'div'))),
+ },
+ {
+ markdown: `
+[![moon](moon.jpg)](/uri)
+`,
+ expectedDoc: doc(
+ paragraph(
+ source('[![moon](moon.jpg)](/uri)'),
+ link(
+ { ...source('[![moon](moon.jpg)](/uri)'), href: '/uri' },
+ image({ ...source('![moon](moon.jpg)'), src: 'moon.jpg', alt: 'moon' }),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+<del>
+
+*foo*
+
+</del>
+`,
+ expectedDoc: doc(
+ paragraph(
+ source('*foo*'),
+ strike(source('<del>\n\n*foo*\n\n</del>'), italic(source('*foo*'), 'foo')),
+ ),
+ ),
+ expectedMarkdown: '*foo*',
+ },
+ {
+ markdown: `
+~[moon](moon.jpg) and [sun](sun.jpg)~
+`,
+ expectedDoc: doc(
+ paragraph(
+ source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
+ strike(
+ source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
+ link({ ...source('[moon](moon.jpg)'), href: 'moon.jpg' }, 'moon'),
+ ),
+ strike(source('~[moon](moon.jpg) and [sun](sun.jpg)~'), ' and '),
+ strike(
+ source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
+ link({ ...source('[sun](sun.jpg)'), href: 'sun.jpg' }, 'sun'),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+<del>
+
+**Paragraph 1**
+
+_Paragraph 2_
+
+</del>
+ `,
+ expectedDoc: doc(
+ paragraph(
+ source('**Paragraph 1**'),
+ strike(
+ source('<del>\n\n**Paragraph 1**\n\n_Paragraph 2_\n\n</del>'),
+ bold(source('**Paragraph 1**'), 'Paragraph 1'),
+ ),
+ ),
+ paragraph(
+ source('_Paragraph 2_'),
+ strike(
+ source('<del>\n\n**Paragraph 1**\n\n_Paragraph 2_\n\n</del>'),
+ italic(source('_Paragraph 2_'), 'Paragraph 2'),
+ ),
+ ),
+ ),
+ expectedMarkdown: `**Paragraph 1**
+
+_Paragraph 2_`,
+ },
+ /* TODO
+ * Implement proper editing support for HTML comments in the Content Editor
+ * https://gitlab.com/gitlab-org/gitlab/-/issues/342173
+ */
+ {
+ markdown: '<!-- HTML comment -->',
+ expectedDoc: doc(paragraph()),
+ expectedMarkdown: '',
+ },
+ {
+ markdown: `
+<![CDATA[
+function matchwo(a,b)
+{
+ if (a < b && a < 0) then {
+ return 1;
+
+ } else {
+
+ return 0;
+ }
+}
+]]>
+ `,
+ expectedDoc: doc(paragraph()),
+ expectedMarkdown: '',
+ },
+ {
+ markdown: `
+<!-- foo -->*bar*
+*baz*
+ `,
+ expectedDoc: doc(
+ paragraph(source('*bar*'), '*bar*\n'),
+ paragraph(source('*baz*'), italic(source('*baz*'), 'baz')),
+ ),
+ expectedMarkdown: `*bar*
+
+*baz*`,
+ },
+ {
+ markdown: `
+<table><tr><td>
+<pre>
+**Hello**,
+
+_world_.
+</pre>
+</td></tr></table>
+`,
+ expectedDoc: doc(
+ table(
+ source('<table><tr><td>\n<pre>\n**Hello**,\n\n_world_.\n</pre>\n</td></tr></table>'),
+ tableRow(
+ source('<tr><td>\n<pre>\n**Hello**,\n\n_world_.\n</pre>\n</td></tr>'),
+ tableCell(
+ source('<td>\n<pre>\n**Hello**,\n\n_world_.\n</pre>\n</td>'),
+ pre(
+ source('<pre>\n**Hello**,\n\n_world_.\n</pre>'),
+ paragraph(source('**Hello**,'), '**Hello**,\n'),
+ paragraph(source('_world_.\n'), italic(source('_world_'), 'world'), '.\n'),
+ ),
+ paragraph(),
+ ),
+ ),
),
- paragraph(sourceAttrs('77:86', 'Paragraph'), 'Paragraph'),
),
},
];
@@ -1002,12 +1084,75 @@ Paragraph
const runOnly = examples.find((example) => example.only === true);
const runExamples = runOnly ? [runOnly] : examples;
- it.each(runExamples)('processes %s correctly', async ({ markdown, expectedDoc }) => {
- const trimmed = markdown.trim();
- const document = await deserialize(trimmed);
+ it.each(runExamples)(
+ 'processes %s correctly',
+ async ({ markdown, expectedDoc, expectedMarkdown }) => {
+ const trimmed = markdown.trim();
+ const document = await deserialize(trimmed);
- expect(expectedDoc).not.toBeFalsy();
- expect(document.toJSON()).toEqual(expectedDoc.toJSON());
- expect(serialize(document)).toEqual(trimmed);
- });
+ expect(expectedDoc).not.toBeFalsy();
+ expect(document.toJSON()).toEqual(expectedDoc.toJSON());
+ expect(serialize(document)).toEqual(expectedMarkdown ?? trimmed);
+ },
+ );
+
+ /**
+ * DISCLAIMER: THIS IS A SECURITY ORIENTED TEST THAT ENSURES
+ * THE CLIENT-SIDE PARSER IGNORES DANGEROUS TAGS THAT ARE NOT
+ * EXPLICITELY SUPPORTED.
+ *
+ * PLEASE CONSIDER THIS INFORMATION WHILE MODIFYING THESE TESTS
+ */
+ it.each([
+ {
+ markdown: `
+<script>
+alert("Hello world")
+</script>
+ `,
+ expectedHtml: '<p></p>',
+ },
+ {
+ markdown: `
+<foo>Hello</foo>
+ `,
+ expectedHtml: '<p></p>',
+ },
+ {
+ markdown: `
+<h1 class="heading-with-class">Header</h1>
+ `,
+ expectedHtml: '<h1>Header</h1>',
+ },
+ {
+ markdown: `
+<a id="link-id">Header</a> and other text
+ `,
+ expectedHtml:
+ '<p><a target="_blank" rel="noopener noreferrer nofollow">Header</a> and other text</p>',
+ },
+ {
+ markdown: `
+<style>
+body {
+ display: none;
+}
+</style>
+ `,
+ expectedHtml: '<p></p>',
+ },
+ {
+ markdown: '<div style="transform">div</div>',
+ expectedHtml: '<div><p>div</p></div>',
+ },
+ ])(
+ 'removes unknown tags and unsupported attributes from HTML output',
+ async ({ markdown, expectedHtml }) => {
+ const document = await deserialize(markdown);
+
+ tiptapEditor.commands.setContent(document.toJSON());
+
+ expect(tiptapEditor.getHTML()).toEqual(expectedHtml);
+ },
+ );
});
diff --git a/spec/frontend/content_editor/render_html_and_json_for_all_examples.js b/spec/frontend/content_editor/render_html_and_json_for_all_examples.js
new file mode 100644
index 00000000000..116a26cf7d5
--- /dev/null
+++ b/spec/frontend/content_editor/render_html_and_json_for_all_examples.js
@@ -0,0 +1,115 @@
+import { DOMSerializer } from 'prosemirror-model';
+// TODO: DRY up duplication with spec/frontend/content_editor/services/markdown_serializer_spec.js
+// See https://gitlab.com/groups/gitlab-org/-/epics/7719#plan
+import Blockquote from '~/content_editor/extensions/blockquote';
+import Bold from '~/content_editor/extensions/bold';
+import BulletList from '~/content_editor/extensions/bullet_list';
+import Code from '~/content_editor/extensions/code';
+import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
+import DescriptionItem from '~/content_editor/extensions/description_item';
+import DescriptionList from '~/content_editor/extensions/description_list';
+import Details from '~/content_editor/extensions/details';
+import DetailsContent from '~/content_editor/extensions/details_content';
+import Emoji from '~/content_editor/extensions/emoji';
+import Figure from '~/content_editor/extensions/figure';
+import FigureCaption from '~/content_editor/extensions/figure_caption';
+import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
+import FootnoteReference from '~/content_editor/extensions/footnote_reference';
+import FootnotesSection from '~/content_editor/extensions/footnotes_section';
+import HardBreak from '~/content_editor/extensions/hard_break';
+import Heading from '~/content_editor/extensions/heading';
+import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
+import HTMLNodes from '~/content_editor/extensions/html_nodes';
+import Image from '~/content_editor/extensions/image';
+import InlineDiff from '~/content_editor/extensions/inline_diff';
+import Italic from '~/content_editor/extensions/italic';
+import Link from '~/content_editor/extensions/link';
+import ListItem from '~/content_editor/extensions/list_item';
+import OrderedList from '~/content_editor/extensions/ordered_list';
+import Strike from '~/content_editor/extensions/strike';
+import Table from '~/content_editor/extensions/table';
+import TableCell from '~/content_editor/extensions/table_cell';
+import TableHeader from '~/content_editor/extensions/table_header';
+import TableRow from '~/content_editor/extensions/table_row';
+import TaskItem from '~/content_editor/extensions/task_item';
+import TaskList from '~/content_editor/extensions/task_list';
+import createMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
+import { createTestEditor } from 'jest/content_editor/test_utils';
+
+const tiptapEditor = createTestEditor({
+ extensions: [
+ Blockquote,
+ Bold,
+ BulletList,
+ Code,
+ CodeBlockHighlight,
+ DescriptionItem,
+ DescriptionList,
+ Details,
+ DetailsContent,
+ Emoji,
+ FootnoteDefinition,
+ FootnoteReference,
+ FootnotesSection,
+ Figure,
+ FigureCaption,
+ HardBreak,
+ Heading,
+ HorizontalRule,
+ ...HTMLNodes,
+ Image,
+ InlineDiff,
+ Italic,
+ Link,
+ ListItem,
+ OrderedList,
+ Strike,
+ Table,
+ TableCell,
+ TableHeader,
+ TableRow,
+ TaskItem,
+ TaskList,
+ ],
+});
+
+export const IMPLEMENTATION_ERROR_MSG = 'Error - check implementation';
+
+async function renderMarkdownToHTMLAndJSON(markdown, schema, deserializer) {
+ let prosemirrorDocument;
+ try {
+ const { document } = await deserializer.deserialize({ schema, markdown });
+ prosemirrorDocument = document;
+ } catch (e) {
+ const errorMsg = `${IMPLEMENTATION_ERROR_MSG}:\n${e.message}`;
+ return {
+ html: errorMsg,
+ json: errorMsg,
+ };
+ }
+
+ const documentFragment = DOMSerializer.fromSchema(schema).serializeFragment(
+ prosemirrorDocument.content,
+ );
+ const htmlString = Array.from(documentFragment.children)
+ .map((el) => el.outerHTML)
+ .join('\n');
+
+ const json = prosemirrorDocument.toJSON();
+ const jsonString = JSON.stringify(json, null, 2);
+ return { html: htmlString, json: jsonString };
+}
+
+export function renderHtmlAndJsonForAllExamples(markdownExamples) {
+ const { schema } = tiptapEditor;
+ const deserializer = createMarkdownDeserializer();
+ const exampleNames = Object.keys(markdownExamples);
+
+ return exampleNames.reduce(async (promisedExamples, exampleName) => {
+ const markdown = markdownExamples[exampleName];
+ const htmlAndJson = await renderMarkdownToHTMLAndJSON(markdown, schema, deserializer);
+ const examples = await promisedExamples;
+ examples[exampleName] = htmlAndJson;
+ return examples;
+ }, Promise.resolve({}));
+}
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 13e9efaea59..509cda3046c 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -7,7 +7,6 @@ import DescriptionItem from '~/content_editor/extensions/description_item';
import DescriptionList from '~/content_editor/extensions/description_list';
import Details from '~/content_editor/extensions/details';
import DetailsContent from '~/content_editor/extensions/details_content';
-import Division from '~/content_editor/extensions/division';
import Emoji from '~/content_editor/extensions/emoji';
import Figure from '~/content_editor/extensions/figure';
import FigureCaption from '~/content_editor/extensions/figure_caption';
@@ -16,6 +15,8 @@ import FootnoteReference from '~/content_editor/extensions/footnote_reference';
import HardBreak from '~/content_editor/extensions/hard_break';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
+import HTMLMarks from '~/content_editor/extensions/html_marks';
+import HTMLNodes from '~/content_editor/extensions/html_nodes';
import Image from '~/content_editor/extensions/image';
import InlineDiff from '~/content_editor/extensions/inline_diff';
import Italic from '~/content_editor/extensions/italic';
@@ -48,7 +49,6 @@ const tiptapEditor = createTestEditor({
DescriptionList,
Details,
DetailsContent,
- Division,
Emoji,
FootnoteDefinition,
FootnoteReference,
@@ -71,6 +71,8 @@ const tiptapEditor = createTestEditor({
TableRow,
TaskItem,
TaskList,
+ ...HTMLMarks,
+ ...HTMLNodes,
],
});
@@ -84,7 +86,7 @@ const {
codeBlock,
details,
detailsContent,
- division,
+ div,
descriptionItem,
descriptionList,
emoji,
@@ -120,7 +122,6 @@ const {
codeBlock: { nodeType: CodeBlockHighlight.name },
details: { nodeType: Details.name },
detailsContent: { nodeType: DetailsContent.name },
- division: { nodeType: Division.name },
descriptionItem: { nodeType: DescriptionItem.name },
descriptionList: { nodeType: DescriptionList.name },
emoji: { markType: Emoji.name },
@@ -145,6 +146,13 @@ const {
tableRow: { nodeType: TableRow.name },
taskItem: { nodeType: TaskItem.name },
taskList: { nodeType: TaskList.name },
+ ...HTMLNodes.reduce(
+ (builders, htmlNode) => ({
+ ...builders,
+ [htmlNode.name]: { nodeType: htmlNode.name },
+ }),
+ {},
+ ),
},
});
@@ -725,8 +733,8 @@ _inception_
it('correctly renders div', () => {
expect(
serialize(
- division(paragraph('just a paragraph in a div')),
- division(paragraph('just some ', bold('styled'), ' ', italic('content'), ' in a div')),
+ div(paragraph('just a paragraph in a div')),
+ div(paragraph('just some ', bold('styled'), ' ', italic('content'), ' in a div')),
),
).toBe(
'<div>just a paragraph in a div</div>\n<div>\n\njust some **styled** _content_ in a div\n\n</div>',
@@ -1169,7 +1177,7 @@ Oranges are orange [^1]
};
it.each`
- mark | content | modifiedContent | editAction
+ mark | markdown | modifiedMarkdown | editAction
${'bold'} | ${'**bold**'} | ${'**bold modified**'} | ${defaultEditAction}
${'bold'} | ${'__bold__'} | ${'__bold modified__'} | ${defaultEditAction}
${'bold'} | ${'<strong>bold</strong>'} | ${'<strong>bold modified</strong>'} | ${defaultEditAction}
@@ -1205,10 +1213,10 @@ Oranges are orange [^1]
${'taskList'} | ${'2) [x] task list item'} | ${'2) [x] task list item modified'} | ${defaultEditAction}
`(
'preserves original $mark syntax when sourceMarkdown is available for $content',
- async ({ content, modifiedContent, editAction }) => {
+ async ({ markdown, modifiedMarkdown, editAction }) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
schema: tiptapEditor.schema,
- content,
+ markdown,
});
editAction(document);
@@ -1218,7 +1226,7 @@ Oranges are orange [^1]
doc: tiptapEditor.state.doc,
});
- expect(serialized).toEqual(modifiedContent);
+ expect(serialized).toEqual(modifiedMarkdown);
},
);
});
diff --git a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
index 8a304c73163..2efc73ddef8 100644
--- a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
+++ b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
@@ -77,7 +77,7 @@ describe('content_editor/services/markdown_sourcemap', () => {
render: () => BULLET_LIST_HTML,
}).deserialize({
schema: tiptapEditor.schema,
- content: BULLET_LIST_MARKDOWN,
+ markdown: BULLET_LIST_MARKDOWN,
});
const expected = doc(
diff --git a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
index 2001f5c1441..bd4ed950f9d 100644
--- a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
+++ b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
@@ -148,6 +149,7 @@ describe('custom metrics form fields component', () => {
it('expect loading message to display', async () => {
const queryInput = wrapper.find(`input[name="${queryInputName}"]`);
queryInput.setValue('query');
+ await nextTick();
expect(wrapper.text()).toContain('Validating query');
});
diff --git a/spec/frontend/cycle_analytics/path_navigation_spec.js b/spec/frontend/cycle_analytics/path_navigation_spec.js
index e8c4ebd3a38..fa9eadbd071 100644
--- a/spec/frontend/cycle_analytics/path_navigation_spec.js
+++ b/spec/frontend/cycle_analytics/path_navigation_spec.js
@@ -85,7 +85,7 @@ describe('Project PathNavigation', () => {
const result = findPathNavigationTitles();
transformedProjectStagePathData.forEach(({ title, metric }, index) => {
expect(result[index]).toContain(title);
- expect(result[index]).toContain(metric);
+ expect(result[index]).toContain(metric.toString());
});
});
diff --git a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
index df86b10cba3..23e41f35b00 100644
--- a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
+++ b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
@@ -55,10 +55,10 @@ describe('ValueStreamMetrics', () => {
describe('with successful requests', () => {
beforeEach(() => {
mockGetValueStreamSummaryMetrics = jest.fn().mockResolvedValue({ data: metricsData });
- wrapper = createComponent();
});
it('will display a loader with pending requests', async () => {
+ wrapper = createComponent();
await nextTick();
expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
@@ -66,6 +66,7 @@ describe('ValueStreamMetrics', () => {
describe('with data loaded', () => {
beforeEach(async () => {
+ wrapper = createComponent();
await waitForPromises();
});
diff --git a/spec/frontend/design_management/components/design_sidebar_spec.js b/spec/frontend/design_management/components/design_sidebar_spec.js
index 40968d9204a..f13796138bd 100644
--- a/spec/frontend/design_management/components/design_sidebar_spec.js
+++ b/spec/frontend/design_management/components/design_sidebar_spec.js
@@ -1,7 +1,6 @@
-import { GlCollapse, GlPopover } from '@gitlab/ui';
+import { GlAccordionItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import Cookies from '~/lib/utils/cookies';
import DesignDiscussion from '~/design_management/components/design_notes/design_discussion.vue';
import DesignNoteSignedOut from '~/design_management/components/design_notes/design_note_signed_out.vue';
import DesignSidebar from '~/design_management/components/design_sidebar.vue';
@@ -27,8 +26,6 @@ const $route = {
},
};
-const cookieKey = 'hide_design_resolved_comments_popover';
-
const mutate = jest.fn().mockResolvedValue();
describe('Design management design sidebar component', () => {
@@ -40,9 +37,7 @@ describe('Design management design sidebar component', () => {
const findUnresolvedDiscussions = () => wrapper.findAll('[data-testid="unresolved-discussion"]');
const findResolvedDiscussions = () => wrapper.findAll('[data-testid="resolved-discussion"]');
const findParticipants = () => wrapper.find(Participants);
- const findCollapsible = () => wrapper.find(GlCollapse);
- const findToggleResolvedCommentsButton = () => wrapper.find('[data-testid="resolved-comments"]');
- const findPopover = () => wrapper.find(GlPopover);
+ const findResolvedCommentsToggle = () => wrapper.find(GlAccordionItem);
const findNewDiscussionDisclaimer = () =>
wrapper.find('[data-testid="new-discussion-disclaimer"]');
@@ -61,7 +56,6 @@ describe('Design management design sidebar component', () => {
mutate,
},
},
- stubs: { GlPopover },
provide: {
registerPath: '/users/sign_up?redirect_to_referer=yes',
signInPath: '/users/sign_in?redirect_to_referer=yes',
@@ -119,7 +113,6 @@ describe('Design management design sidebar component', () => {
describe('when has discussions', () => {
beforeEach(() => {
- Cookies.set(cookieKey, true);
createComponent();
});
@@ -131,26 +124,23 @@ describe('Design management design sidebar component', () => {
expect(findResolvedDiscussions()).toHaveLength(1);
});
- it('has resolved comments collapsible collapsed', () => {
- expect(findCollapsible().attributes('visible')).toBeUndefined();
+ it('has resolved comments accordion item collapsed', () => {
+ expect(findResolvedCommentsToggle().props('visible')).toBe(false);
});
- it('emits toggleResolveComments event on resolve comments button click', () => {
- findToggleResolvedCommentsButton().vm.$emit('click');
+ it('emits toggleResolveComments event on resolve comments button click', async () => {
+ findResolvedCommentsToggle().vm.$emit('input', true);
+ await nextTick();
expect(wrapper.emitted('toggleResolvedComments')).toHaveLength(1);
});
- it('opens a collapsible when resolvedDiscussionsExpanded prop changes to true', async () => {
- expect(findCollapsible().attributes('visible')).toBeUndefined();
+ it('opens the accordion item when resolvedDiscussionsExpanded prop changes to true', async () => {
+ expect(findResolvedCommentsToggle().props('visible')).toBe(false);
wrapper.setProps({
resolvedDiscussionsExpanded: true,
});
await nextTick();
- expect(findCollapsible().attributes('visible')).toBe('true');
- });
-
- it('does not popover about resolved comments', () => {
- expect(findPopover().exists()).toBe(false);
+ expect(findResolvedCommentsToggle().props('visible')).toBe(true);
});
it('sends a mutation to set an active discussion when clicking on a discussion', () => {
@@ -232,36 +222,6 @@ describe('Design management design sidebar component', () => {
});
});
- describe('when showing resolved discussions for the first time', () => {
- beforeEach(() => {
- Cookies.set(cookieKey, false);
- createComponent();
- });
-
- it('renders a popover if we show resolved comments collapsible for the first time', () => {
- expect(findPopover().exists()).toBe(true);
- });
-
- it('scrolls to resolved threads link', () => {
- expect(scrollIntoViewMock).toHaveBeenCalled();
- });
-
- it('dismisses a popover on the outside click', async () => {
- wrapper.trigger('click');
- await nextTick();
- expect(findPopover().exists()).toBe(false);
- });
-
- it(`sets a ${cookieKey} cookie on clicking outside the popover`, () => {
- jest.spyOn(Cookies, 'set');
- wrapper.trigger('click');
- expect(Cookies.set).toHaveBeenCalledWith(cookieKey, 'true', {
- expires: 365 * 10,
- secure: false,
- });
- });
- });
-
describe('when user is not logged in', () => {
const findDesignNoteSignedOut = () => wrapper.findComponent(DesignNoteSignedOut);
@@ -292,7 +252,6 @@ describe('Design management design sidebar component', () => {
describe('design has discussions', () => {
beforeEach(() => {
- Cookies.set(cookieKey, true);
createComponent();
});
diff --git a/spec/frontend/design_management/components/image_spec.js b/spec/frontend/design_management/components/image_spec.js
index e27b2bc9fa5..65ee0ae6238 100644
--- a/spec/frontend/design_management/components/image_spec.js
+++ b/spec/frontend/design_management/components/image_spec.js
@@ -1,6 +1,7 @@
import { GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import DesignImage from '~/design_management/components/image.vue';
describe('Design management large image component', () => {
@@ -15,6 +16,10 @@ describe('Design management large image component', () => {
wrapper.setData(data);
}
+ beforeEach(() => {
+ stubPerformanceWebAPI();
+ });
+
afterEach(() => {
wrapper.destroy();
});
diff --git a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
index be736184e60..9997f02cd01 100644
--- a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
@@ -7,6 +7,8 @@ exports[`Design management index page designs renders error 1`] = `
>
<!---->
+ <!---->
+
<div
class="gl-mt-6"
>
@@ -39,6 +41,8 @@ exports[`Design management index page designs renders loading icon 1`] = `
>
<!---->
+ <!---->
+
<div
class="gl-mt-6"
>
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index 0f2857821ea..3177a5e016c 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -88,57 +88,26 @@ exports[`Design management design index page renders design index 1`] = `
signinpath=""
/>
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="link-inherit-color gl-text-body gl-text-decoration-none gl-font-weight-bold gl-mb-4"
- data-testid="resolved-comments"
- icon="chevron-right"
- id="resolved-comments"
- size="medium"
- variant="link"
+ <gl-accordion-stub
+ class="gl-mb-5"
+ headerlevel="3"
>
- Resolved Comments (1)
-
- </gl-button-stub>
-
- <gl-popover-stub
- container="popovercontainer"
- cssclasses=""
- placement="top"
- show="true"
- target="resolved-comments"
- title="Resolved Comments"
- >
- <p>
-
- Comments you resolve can be viewed and unresolved by going to the "Resolved Comments" section below
-
- </p>
-
- <a
- href="https://docs.gitlab.com/ee/user/project/issues/design_management.html#resolve-design-threads"
- rel="noopener noreferrer"
- target="_blank"
+ <gl-accordion-item-stub
+ headerclass="gl-mb-5!"
+ title="Resolved Comments (1)"
>
- Learn more about resolving comments
- </a>
- </gl-popover-stub>
-
- <gl-collapse-stub
- class="gl-mt-3"
- >
- <design-discussion-stub
- data-testid="resolved-discussion"
- designid="gid::/gitlab/Design/1"
- discussion="[object Object]"
- discussionwithopenform=""
- markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
- noteableid="gid::/gitlab/Design/1"
- registerpath=""
- signinpath=""
- />
- </gl-collapse-stub>
+ <design-discussion-stub
+ data-testid="resolved-discussion"
+ designid="gid::/gitlab/Design/1"
+ discussion="[object Object]"
+ discussionwithopenform=""
+ markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
+ noteableid="gid::/gitlab/Design/1"
+ registerpath=""
+ signinpath=""
+ />
+ </gl-accordion-item-stub>
+ </gl-accordion-stub>
</div>
</div>
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 087655d10f7..21be7bd148b 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -1,5 +1,4 @@
import { GlEmptyState } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo, { ApolloMutation } from 'vue-apollo';
@@ -9,6 +8,7 @@ import VueDraggable from 'vuedraggable';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import permissionsQuery from 'shared_queries/design_management/design_permissions.query.graphql';
import getDesignListQuery from 'shared_queries/design_management/get_design_list.query.graphql';
import DeleteButton from '~/design_management/components/delete_button.vue';
@@ -23,6 +23,7 @@ import * as utils from '~/design_management/utils/design_management_utils';
import {
EXISTING_DESIGN_DROP_MANY_FILES_MESSAGE,
EXISTING_DESIGN_DROP_INVALID_FILENAME_MESSAGE,
+ UPLOAD_DESIGN_ERROR,
} from '~/design_management/utils/error_messages';
import {
DESIGN_TRACKING_PAGE_NAME,
@@ -101,20 +102,20 @@ describe('Design management index page', () => {
let moveDesignHandler;
const findDesignCheckboxes = () => wrapper.findAll('.design-checkbox');
- const findSelectAllButton = () => wrapper.find('[data-testid="select-all-designs-button"');
- const findToolbar = () => wrapper.find('.qa-selector-toolbar');
- const findDesignCollectionIsCopying = () =>
- wrapper.find('[data-testid="design-collection-is-copying"');
- const findDeleteButton = () => wrapper.find(DeleteButton);
- const findDropzone = () => wrapper.findAll(DesignDropzone).at(0);
+ const findSelectAllButton = () => wrapper.findByTestId('select-all-designs-button');
+ const findToolbar = () => wrapper.findByTestId('design-selector-toolbar');
+ const findDesignCollectionIsCopying = () => wrapper.findByTestId('design-collection-is-copying');
+ const findDeleteButton = () => wrapper.findComponent(DeleteButton);
+ const findDropzone = () => wrapper.findAllComponents(DesignDropzone).at(0);
const dropzoneClasses = () => findDropzone().classes();
- const findDropzoneWrapper = () => wrapper.find('[data-testid="design-dropzone-wrapper"]');
- const findFirstDropzoneWithDesign = () => wrapper.findAll(DesignDropzone).at(1);
- const findDesignsWrapper = () => wrapper.find('[data-testid="designs-root"]');
+ const findDropzoneWrapper = () => wrapper.findByTestId('design-dropzone-wrapper');
+ const findFirstDropzoneWithDesign = () => wrapper.findAllComponents(DesignDropzone).at(1);
+ const findDesignsWrapper = () => wrapper.findByTestId('designs-root');
const findDesigns = () => wrapper.findAll(Design);
const draggableAttributes = () => wrapper.find(VueDraggable).vm.$attrs;
- const findDesignUploadButton = () => wrapper.find('[data-testid="design-upload-button"]');
- const findDesignToolbarWrapper = () => wrapper.find('[data-testid="design-toolbar-wrapper"]');
+ const findDesignUploadButton = () => wrapper.findByTestId('design-upload-button');
+ const findDesignToolbarWrapper = () => wrapper.findByTestId('design-toolbar-wrapper');
+ const findDesignUpdateAlert = () => wrapper.findByTestId('design-update-alert');
async function moveDesigns(localWrapper) {
await waitForPromises();
@@ -149,7 +150,7 @@ describe('Design management index page', () => {
mutate,
};
- wrapper = shallowMount(Index, {
+ wrapper = shallowMountExtended(Index, {
data() {
return {
allVersions,
@@ -185,7 +186,7 @@ describe('Design management index page', () => {
];
fakeApollo = createMockApollo(requestHandlers, {}, { addTypename: true });
- wrapper = shallowMount(Index, {
+ wrapper = shallowMountExtended(Index, {
apolloProvider: fakeApollo,
router,
stubs: { VueDraggable },
@@ -412,7 +413,8 @@ describe('Design management index page', () => {
await nextTick();
expect(wrapper.vm.filesToBeSaved).toEqual([]);
expect(wrapper.vm.isSaving).toBeFalsy();
- expect(createFlash).toHaveBeenCalled();
+ expect(findDesignUpdateAlert().exists()).toBe(true);
+ expect(findDesignUpdateAlert().text()).toBe(UPLOAD_DESIGN_ERROR);
});
it('does not call mutation if createDesign is false', () => {
@@ -431,19 +433,23 @@ describe('Design management index page', () => {
wrapper.vm.onUploadDesign(new Array(MAXIMUM_FILE_UPLOAD_LIMIT).fill(mockDesigns[0]));
- expect(createFlash).not.toHaveBeenCalled();
+ expect(findDesignUpdateAlert().exists()).toBe(false);
});
- it('warns when too many files are uploaded', () => {
+ it('warns when too many files are uploaded', async () => {
createComponent();
wrapper.vm.onUploadDesign(new Array(MAXIMUM_FILE_UPLOAD_LIMIT + 1).fill(mockDesigns[0]));
+ await nextTick();
- expect(createFlash).toHaveBeenCalled();
+ expect(findDesignUpdateAlert().exists()).toBe(true);
+ expect(findDesignUpdateAlert().text()).toBe(
+ 'The maximum number of designs allowed to be uploaded is 10. Please try again.',
+ );
});
});
- it('flashes warning if designs are skipped', async () => {
+ it('displays warning if designs are skipped', async () => {
createComponent({
mockMutate: () =>
Promise.resolve({
@@ -458,11 +464,8 @@ describe('Design management index page', () => {
]);
await uploadDesign;
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
- message: 'Upload skipped. test.jpg did not change.',
- types: 'warning',
- });
+ expect(findDesignUpdateAlert().exists()).toBe(true);
+ expect(findDesignUpdateAlert().text()).toBe('Upload skipped. test.jpg did not change.');
});
describe('dragging onto an existing design', () => {
@@ -495,13 +498,17 @@ describe('Design management index page', () => {
description | eventPayload | message
${'> 1 file'} | ${[{ name: 'test' }, { name: 'test-2' }]} | ${EXISTING_DESIGN_DROP_MANY_FILES_MESSAGE}
${'different filename'} | ${[{ name: 'wrong-name' }]} | ${EXISTING_DESIGN_DROP_INVALID_FILENAME_MESSAGE}
- `('calls createFlash when upload has $description', ({ eventPayload, message }) => {
- const designDropzone = findFirstDropzoneWithDesign();
- designDropzone.vm.$emit('change', eventPayload);
-
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({ message });
- });
+ `(
+ 'displays GlAlert component when upload has $description',
+ async ({ eventPayload, message }) => {
+ expect(findDesignUpdateAlert().exists()).toBe(false);
+ const designDropzone = findFirstDropzoneWithDesign();
+ await designDropzone.vm.$emit('change', eventPayload);
+
+ expect(findDesignUpdateAlert().exists()).toBe(true);
+ expect(findDesignUpdateAlert().text()).toBe(message);
+ },
+ );
});
describe('tracking', () => {
@@ -804,7 +811,7 @@ describe('Design management index page', () => {
expect(createFlash).toHaveBeenCalledWith({ message: 'Houston, we have a problem' });
});
- it('displays flash if mutation had a non-recoverable error', async () => {
+ it('displays alert if mutation had a non-recoverable error', async () => {
createComponentWithApollo({
moveHandler: jest.fn().mockRejectedValue('Error'),
});
@@ -812,9 +819,10 @@ describe('Design management index page', () => {
await moveDesigns(wrapper);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
- message: 'Something went wrong when reordering designs. Please try again',
- });
+ expect(findDesignUpdateAlert().exists()).toBe(true);
+ expect(findDesignUpdateAlert().text()).toBe(
+ 'Something went wrong when reordering designs. Please try again',
+ );
});
});
});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 76e4a944d87..96f2ac1692c 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -18,6 +18,7 @@ import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import createDiffsStore from '../create_diffs_store';
import diffsMockData from '../mock_data/merge_request_diffs';
@@ -79,6 +80,7 @@ describe('diffs/components/app', () => {
}
beforeEach(() => {
+ stubPerformanceWebAPI();
// setup globals (needed for component to mount :/)
window.mrTabs = {
resetViewContainer: jest.fn(),
diff --git a/spec/frontend/diffs/components/collapsed_files_warning_spec.js b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
index 8cc342e45a7..cc4f13ab0cf 100644
--- a/spec/frontend/diffs/components/collapsed_files_warning_spec.js
+++ b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
@@ -6,7 +6,7 @@ import { EVT_EXPAND_ALL_FILES } from '~/diffs/constants';
import eventHub from '~/diffs/event_hub';
import createStore from '~/diffs/store/modules';
-import file from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
const propsData = {
limited: true,
@@ -15,7 +15,7 @@ const propsData = {
};
async function files(store, count) {
- const copies = Array(count).fill(file);
+ const copies = Array(count).fill(getDiffFileMock());
store.state.diffs.diffFiles.push(...copies);
await nextTick();
diff --git a/spec/frontend/diffs/components/diff_code_quality_spec.js b/spec/frontend/diffs/components/diff_code_quality_spec.js
new file mode 100644
index 00000000000..81a817c47dc
--- /dev/null
+++ b/spec/frontend/diffs/components/diff_code_quality_spec.js
@@ -0,0 +1,66 @@
+import { GlIcon } from '@gitlab/ui';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DiffCodeQuality from '~/diffs/components/diff_code_quality.vue';
+import { SEVERITY_CLASSES, SEVERITY_ICONS } from '~/reports/codequality_report/constants';
+import { multipleFindingsArr } from '../mock_data/diff_code_quality';
+
+let wrapper;
+
+const findIcon = () => wrapper.findComponent(GlIcon);
+
+describe('DiffCodeQuality', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const createWrapper = (codeQuality, mountFunction = mountExtended) => {
+ return mountFunction(DiffCodeQuality, {
+ propsData: {
+ expandedLines: [],
+ line: 1,
+ codeQuality,
+ },
+ });
+ };
+
+ it('hides details and throws hideCodeQualityFindings event on close click', async () => {
+ wrapper = createWrapper(multipleFindingsArr);
+ expect(wrapper.findByTestId('diff-codequality').exists()).toBe(true);
+
+ await wrapper.findByTestId('diff-codequality-close').trigger('click');
+
+ expect(wrapper.emitted('hideCodeQualityFindings').length).toBe(1);
+ expect(wrapper.emitted().hideCodeQualityFindings[0][0]).toBe(wrapper.props('line'));
+ });
+
+ it('renders correct amount of list items for codequality array and their description', async () => {
+ wrapper = createWrapper(multipleFindingsArr);
+ const listItems = wrapper.findAll('li');
+
+ expect(wrapper.findAll('li').length).toBe(3);
+
+ listItems.wrappers.map((e, i) => {
+ return expect(e.text()).toEqual(multipleFindingsArr[i].description);
+ });
+ });
+
+ it.each`
+ severity
+ ${'info'}
+ ${'minor'}
+ ${'major'}
+ ${'critical'}
+ ${'blocker'}
+ ${'unknown'}
+ `('shows icon for $severity degradation', ({ severity }) => {
+ wrapper = createWrapper([{ severity }], shallowMountExtended);
+
+ expect(findIcon().exists()).toBe(true);
+
+ expect(findIcon().attributes()).toMatchObject({
+ class: `codequality-severity-icon ${SEVERITY_CLASSES[severity]}`,
+ name: SEVERITY_ICONS[severity],
+ size: '12',
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index 7d2afe105a5..6844e6e497a 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -10,7 +10,7 @@ import { diffViewerModes } from '~/ide/constants';
import NoteForm from '~/notes/components/note_form.vue';
import NoPreviewViewer from '~/vue_shared/components/diff_viewer/viewers/no_preview.vue';
import NotDiffableViewer from '~/vue_shared/components/diff_viewer/viewers/not_diffable.vue';
-import diffFileMockData from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
Vue.use(Vuex);
@@ -28,7 +28,7 @@ describe('DiffContent', () => {
const getCommentFormForDiffFileGetterMock = jest.fn();
const defaultProps = {
- diffFile: JSON.parse(JSON.stringify(diffFileMockData)),
+ diffFile: getDiffFileMock(),
};
const createComponent = ({ props, state, provide } = {}) => {
@@ -70,7 +70,7 @@ describe('DiffContent', () => {
isInlineView: isInlineViewGetterMock,
isParallelView: isParallelViewGetterMock,
getCommentFormForDiffFile: getCommentFormForDiffFileGetterMock,
- diffLines: () => () => [...diffFileMockData.parallel_diff_lines],
+ diffLines: () => () => [...getDiffFileMock().parallel_diff_lines],
fileLineCodequality: () => () => [],
},
actions: {
diff --git a/spec/frontend/diffs/components/diff_expansion_cell_spec.js b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
index 5ff0728b358..34bb73ccf26 100644
--- a/spec/frontend/diffs/components/diff_expansion_cell_spec.js
+++ b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
@@ -1,10 +1,9 @@
import { mount } from '@vue/test-utils';
-import { cloneDeep } from 'lodash';
import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
import { INLINE_DIFF_VIEW_TYPE } from '~/diffs/constants';
import { getPreviousLineIndex } from '~/diffs/store/utils';
import { createStore } from '~/mr_notes/stores';
-import diffFileMockData from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
const EXPAND_UP_CLASS = '.js-unfold';
const EXPAND_DOWN_CLASS = '.js-unfold-down';
@@ -26,7 +25,7 @@ function makeLoadMoreLinesPayload({
isExpandDown = false,
}) {
return {
- endpoint: diffFileMockData.context_lines_path,
+ endpoint: getDiffFileMock().context_lines_path,
params: {
since: sinceLine,
to: toLine,
@@ -57,7 +56,7 @@ describe('DiffExpansionCell', () => {
let store;
beforeEach(() => {
- mockFile = cloneDeep(diffFileMockData);
+ mockFile = getDiffFileMock();
mockLine = getLine(mockFile, INLINE_DIFF_VIEW_TYPE, 8);
store = createStore();
store.state.diffs.diffFiles = [mockFile];
@@ -117,102 +116,102 @@ describe('DiffExpansionCell', () => {
});
describe('any row', () => {
- [
- { diffViewType: INLINE_DIFF_VIEW_TYPE, lineIndex: 8, file: cloneDeep(diffFileMockData) },
- ].forEach(({ diffViewType, file, lineIndex }) => {
- describe(`with diffViewType (${diffViewType})`, () => {
- beforeEach(() => {
- mockLine = getLine(mockFile, diffViewType, lineIndex);
- store.state.diffs.diffFiles = [{ ...mockFile, ...file }];
- store.state.diffs.diffViewType = diffViewType;
- });
-
- it('does not initially dispatch anything', () => {
- expect(store.dispatch).not.toHaveBeenCalled();
- });
-
- it('on expand all clicked, dispatch loadMoreLines', () => {
- const oldLineNumber = mockLine.meta_data.old_pos;
- const newLineNumber = mockLine.meta_data.new_pos;
- const previousIndex = getPreviousLineIndex(mockFile, {
- oldLineNumber,
- newLineNumber,
+ [{ diffViewType: INLINE_DIFF_VIEW_TYPE, lineIndex: 8, file: getDiffFileMock() }].forEach(
+ ({ diffViewType, file, lineIndex }) => {
+ describe(`with diffViewType (${diffViewType})`, () => {
+ beforeEach(() => {
+ mockLine = getLine(mockFile, diffViewType, lineIndex);
+ store.state.diffs.diffFiles = [{ ...mockFile, ...file }];
+ store.state.diffs.diffViewType = diffViewType;
});
- const wrapper = createComponent({ file, lineCountBetween: 10 });
-
- findExpandAll(wrapper).trigger('click');
+ it('does not initially dispatch anything', () => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
- expect(store.dispatch).toHaveBeenCalledWith(
- 'diffs/loadMoreLines',
- makeLoadMoreLinesPayload({
- fileHash: mockFile.file_hash,
- toLine: newLineNumber - 1,
- sinceLine: previousIndex,
+ it('on expand all clicked, dispatch loadMoreLines', () => {
+ const oldLineNumber = mockLine.meta_data.old_pos;
+ const newLineNumber = mockLine.meta_data.new_pos;
+ const previousIndex = getPreviousLineIndex(mockFile, {
oldLineNumber,
- }),
- );
- });
+ newLineNumber,
+ });
+
+ const wrapper = createComponent({ file, lineCountBetween: 10 });
+
+ findExpandAll(wrapper).trigger('click');
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'diffs/loadMoreLines',
+ makeLoadMoreLinesPayload({
+ fileHash: mockFile.file_hash,
+ toLine: newLineNumber - 1,
+ sinceLine: previousIndex,
+ oldLineNumber,
+ }),
+ );
+ });
- it('on expand up clicked, dispatch loadMoreLines', () => {
- mockLine.meta_data.old_pos = 200;
- mockLine.meta_data.new_pos = 200;
+ it('on expand up clicked, dispatch loadMoreLines', () => {
+ mockLine.meta_data.old_pos = 200;
+ mockLine.meta_data.new_pos = 200;
- const oldLineNumber = mockLine.meta_data.old_pos;
- const newLineNumber = mockLine.meta_data.new_pos;
+ const oldLineNumber = mockLine.meta_data.old_pos;
+ const newLineNumber = mockLine.meta_data.new_pos;
- const wrapper = createComponent({ file });
+ const wrapper = createComponent({ file });
- findExpandUp(wrapper).trigger('click');
+ findExpandUp(wrapper).trigger('click');
- expect(store.dispatch).toHaveBeenCalledWith(
- 'diffs/loadMoreLines',
- makeLoadMoreLinesPayload({
- fileHash: mockFile.file_hash,
- toLine: newLineNumber - 1,
- sinceLine: 179,
- oldLineNumber,
- diffViewType,
- unfold: true,
- }),
- );
- });
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'diffs/loadMoreLines',
+ makeLoadMoreLinesPayload({
+ fileHash: mockFile.file_hash,
+ toLine: newLineNumber - 1,
+ sinceLine: 179,
+ oldLineNumber,
+ diffViewType,
+ unfold: true,
+ }),
+ );
+ });
- it('on expand down clicked, dispatch loadMoreLines', () => {
- mockFile[lineSources[diffViewType]][lineIndex + 1] = cloneDeep(
- mockFile[lineSources[diffViewType]][lineIndex],
- );
- const nextLine = getLine(mockFile, diffViewType, lineIndex + 1);
-
- nextLine.meta_data.old_pos = 300;
- nextLine.meta_data.new_pos = 300;
- mockLine.meta_data.old_pos = 200;
- mockLine.meta_data.new_pos = 200;
-
- const wrapper = createComponent({ file });
-
- findExpandDown(wrapper).trigger('click');
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/loadMoreLines', {
- endpoint: diffFileMockData.context_lines_path,
- params: {
- since: 1,
- to: 21, // the load amount, plus 1 line
- offset: 0,
- unfold: true,
- bottom: true,
- },
- lineNumbers: {
- // when expanding down, these are based on the previous line, 0, in this case
- oldLineNumber: 0,
- newLineNumber: 0,
- },
- nextLineNumbers: { old_line: 200, new_line: 200 },
- fileHash: mockFile.file_hash,
- isExpandDown: true,
+ it('on expand down clicked, dispatch loadMoreLines', () => {
+ mockFile[lineSources[diffViewType]][lineIndex + 1] = getDiffFileMock()[
+ lineSources[diffViewType]
+ ][lineIndex];
+ const nextLine = getLine(mockFile, diffViewType, lineIndex + 1);
+
+ nextLine.meta_data.old_pos = 300;
+ nextLine.meta_data.new_pos = 300;
+ mockLine.meta_data.old_pos = 200;
+ mockLine.meta_data.new_pos = 200;
+
+ const wrapper = createComponent({ file });
+
+ findExpandDown(wrapper).trigger('click');
+
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/loadMoreLines', {
+ endpoint: mockFile.context_lines_path,
+ params: {
+ since: 1,
+ to: 21, // the load amount, plus 1 line
+ offset: 0,
+ unfold: true,
+ bottom: true,
+ },
+ lineNumbers: {
+ // when expanding down, these are based on the previous line, 0, in this case
+ oldLineNumber: 0,
+ newLineNumber: 0,
+ },
+ nextLineNumbers: { old_line: 200, new_line: 200 },
+ fileHash: mockFile.file_hash,
+ isExpandDown: true,
+ });
});
});
- });
- });
+ },
+ );
});
});
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index a0aa4c784bf..9e8d9e1ca29 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -20,7 +20,7 @@ import axios from '~/lib/utils/axios_utils';
import { scrollToElement } from '~/lib/utils/common_utils';
import httpStatus from '~/lib/utils/http_status';
import createNotesStore from '~/notes/stores/modules';
-import diffFileMockDataReadable from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
jest.mock('~/lib/utils/common_utils');
@@ -106,7 +106,7 @@ const findLoader = (wrapper) => wrapper.find('[data-testid="loader-icon"]');
const findToggleButton = (wrapper) => wrapper.find('[data-testid="expand-button"]');
const toggleFile = (wrapper) => findDiffHeader(wrapper).vm.$emit('toggleFile');
-const getReadableFile = () => JSON.parse(JSON.stringify(diffFileMockDataReadable));
+const getReadableFile = () => getDiffFileMock();
const getUnreadableFile = () => JSON.parse(JSON.stringify(diffFileMockDataUnreadable));
const makeFileAutomaticallyCollapsed = (store, index = 0) =>
diff --git a/spec/frontend/diffs/components/diff_line_note_form_spec.js b/spec/frontend/diffs/components/diff_line_note_form_spec.js
index b59043168b8..542d61c4680 100644
--- a/spec/frontend/diffs/components/diff_line_note_form_spec.js
+++ b/spec/frontend/diffs/components/diff_line_note_form_spec.js
@@ -1,200 +1,207 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import Vuex from 'vuex';
+import Autosave from '~/autosave';
import DiffLineNoteForm from '~/diffs/components/diff_line_note_form.vue';
-import { createStore } from '~/mr_notes/stores';
+import { createModules } from '~/mr_notes/stores';
import NoteForm from '~/notes/components/note_form.vue';
+import MultilineCommentForm from '~/notes/components/multiline_comment_form.vue';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import { noteableDataMock } from 'jest/notes/mock_data';
-import diffFileMockData from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
-jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal', () => {
- return {
- confirmAction: jest.fn(),
- };
-});
+jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
+jest.mock('~/autosave');
describe('DiffLineNoteForm', () => {
let wrapper;
let diffFile;
let diffLines;
- const getDiffFileMock = () => ({ ...diffFileMockData });
+ let actions;
+ let store;
- const createComponent = (args = {}) => {
- diffFile = getDiffFileMock();
- diffLines = diffFile.highlighted_diff_lines;
- const store = createStore();
+ const getSelectedLine = () => {
+ const lineCode = diffLines[1].line_code;
+ return diffFile.highlighted_diff_lines.find((l) => l.line_code === lineCode);
+ };
+
+ const createStore = (state) => {
+ const modules = createModules();
+ modules.diffs.actions = {
+ ...modules.diffs.actions,
+ saveDiffDiscussion: jest.fn(() => Promise.resolve()),
+ };
+ modules.diffs.getters = {
+ ...modules.diffs.getters,
+ diffCompareDropdownTargetVersions: jest.fn(),
+ diffCompareDropdownSourceVersions: jest.fn(),
+ selectedSourceIndex: jest.fn(),
+ };
+ modules.notes.getters = {
+ ...modules.notes.getters,
+ noteableType: jest.fn(),
+ };
+ actions = modules.diffs.actions;
+
+ store = new Vuex.Store({ modules });
store.state.notes.userData.id = 1;
store.state.notes.noteableData = noteableDataMock;
+
+ store.replaceState({ ...store.state, ...state });
+ };
+
+ const createComponent = ({ props, state } = {}) => {
+ wrapper?.destroy();
+ diffFile = getDiffFileMock();
+ diffLines = diffFile.highlighted_diff_lines;
+
+ createStore(state);
store.state.diffs.diffFiles = [diffFile];
- store.replaceState({ ...store.state, ...args.state });
+ const propsData = {
+ diffFileHash: diffFile.file_hash,
+ diffLines,
+ line: diffLines[1],
+ range: { start: diffLines[0], end: diffLines[1] },
+ noteTargetLine: diffLines[1],
+ ...props,
+ };
- return shallowMount(DiffLineNoteForm, {
+ wrapper = shallowMount(DiffLineNoteForm, {
store,
- propsData: {
- ...{
- diffFileHash: diffFile.file_hash,
- diffLines,
- line: diffLines[1],
- range: { start: diffLines[0], end: diffLines[1] },
- noteTargetLine: diffLines[1],
- },
- ...(args.props || {}),
- },
+ propsData,
});
};
const findNoteForm = () => wrapper.findComponent(NoteForm);
+ const findCommentForm = () => wrapper.findComponent(MultilineCommentForm);
- describe('methods', () => {
- beforeEach(() => {
- wrapper = createComponent();
- });
-
- describe('handleCancelCommentForm', () => {
- afterEach(() => {
- confirmAction.mockReset();
- });
-
- it('should ask for confirmation when shouldConfirm and isDirty passed as truthy', () => {
- confirmAction.mockResolvedValueOnce(false);
-
- findNoteForm().vm.$emit('cancelForm', true, true);
-
- expect(confirmAction).toHaveBeenCalled();
- });
+ beforeEach(() => {
+ Autosave.mockClear();
+ createComponent();
+ });
- it('should only ask for confirmation once', () => {
- // Never resolve so we can test what happens when triggered while "confirmAction" is loading
- confirmAction.mockImplementation(() => new Promise(() => {}));
+ it('shows note form', () => {
+ expect(wrapper.find(NoteForm).exists()).toBe(true);
+ });
- findNoteForm().vm.$emit('cancelForm', true, true);
- findNoteForm().vm.$emit('cancelForm', true, true);
+ it('passes the provided range of lines to comment form', () => {
+ expect(findCommentForm().props('lineRange')).toMatchObject({
+ start: diffLines[0],
+ end: diffLines[1],
+ });
+ });
- expect(confirmAction).toHaveBeenCalledTimes(1);
- });
+ it('respects empty range when passing a range of lines', () => {
+ createComponent({ props: { range: null } });
+ expect(findCommentForm().props('lineRange')).toMatchObject({
+ start: diffLines[1],
+ end: diffLines[1],
+ });
+ });
- it('should not ask for confirmation when one of the params false', () => {
- confirmAction.mockResolvedValueOnce(false);
+ it('should init autosave', () => {
+ expect(Autosave).toHaveBeenCalledWith({}, [
+ 'Note',
+ 'Issue',
+ 98,
+ undefined,
+ 'DiffNote',
+ undefined,
+ '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
+ ]);
+ });
- findNoteForm().vm.$emit('cancelForm', true, false);
+ describe('when cancelling form', () => {
+ afterEach(() => {
+ confirmAction.mockReset();
+ });
- expect(confirmAction).not.toHaveBeenCalled();
+ it('should only ask for confirmation once', () => {
+ let finalizePromise;
+ confirmAction.mockImplementation(
+ () =>
+ new Promise((resolve) => {
+ finalizePromise = resolve;
+ }),
+ );
- findNoteForm().vm.$emit('cancelForm', false, true);
+ findNoteForm().vm.$emit('cancelForm', true, true);
+ findNoteForm().vm.$emit('cancelForm', true, true);
- expect(confirmAction).not.toHaveBeenCalled();
- });
+ expect(confirmAction).toHaveBeenCalledTimes(1);
+ finalizePromise();
+ });
- it('should call cancelCommentForm with lineCode', async () => {
+ describe('with confirmation', () => {
+ beforeEach(() => {
confirmAction.mockResolvedValueOnce(true);
- jest.spyOn(wrapper.vm, 'cancelCommentForm').mockImplementation(() => {});
- jest.spyOn(wrapper.vm, 'resetAutoSave').mockImplementation(() => {});
+ });
+ it('should ask form confirmation and hide form for a line', async () => {
findNoteForm().vm.$emit('cancelForm', true, true);
-
await nextTick();
-
expect(confirmAction).toHaveBeenCalled();
-
await nextTick();
- expect(wrapper.vm.cancelCommentForm).toHaveBeenCalledWith({
- lineCode: diffLines[1].line_code,
- fileHash: wrapper.vm.diffFileHash,
- });
- expect(wrapper.vm.resetAutoSave).toHaveBeenCalled();
+ expect(getSelectedLine().hasForm).toBe(false);
+ expect(Autosave.mock.instances[0].reset).toHaveBeenCalled();
});
});
- describe('saveNoteForm', () => {
- it('should call saveNote action with proper params', async () => {
- const saveDiffDiscussionSpy = jest
- .spyOn(wrapper.vm, 'saveDiffDiscussion')
- .mockReturnValue(Promise.resolve());
-
- const lineRange = {
- start: {
- line_code: wrapper.vm.commentLineStart.line_code,
- type: wrapper.vm.commentLineStart.type,
- new_line: 2,
- old_line: null,
- },
- end: {
- line_code: wrapper.vm.line.line_code,
- type: wrapper.vm.line.type,
- new_line: 2,
- old_line: null,
- },
- };
-
- const formData = {
- ...wrapper.vm.formData,
- lineRange,
- };
-
- await wrapper.vm.handleSaveNote('note body');
- expect(saveDiffDiscussionSpy).toHaveBeenCalledWith({
- note: 'note body',
- formData,
- });
+ describe('without confirmation', () => {
+ beforeEach(() => {
+ confirmAction.mockResolvedValueOnce(false);
});
- });
- });
- describe('created', () => {
- it('should use the provided `range` of lines', () => {
- wrapper = createComponent();
+ it('should ask for confirmation when shouldConfirm and isDirty passed as truthy', () => {
+ findNoteForm().vm.$emit('cancelForm', true, true);
- expect(wrapper.vm.lines.start).toBe(diffLines[0]);
- expect(wrapper.vm.lines.end).toBe(diffLines[1]);
- });
+ expect(confirmAction).toHaveBeenCalled();
+ });
- it("should fill the internal `lines` data with the provided `line` if there's no provided `range", () => {
- wrapper = createComponent({ props: { range: null } });
+ it('should not ask for confirmation when one of the params false', () => {
+ findNoteForm().vm.$emit('cancelForm', true, false);
- expect(wrapper.vm.lines.start).toBe(diffLines[1]);
- expect(wrapper.vm.lines.end).toBe(diffLines[1]);
- });
- });
+ expect(confirmAction).not.toHaveBeenCalled();
- describe('mounted', () => {
- it('should init autosave', () => {
- const key = 'autosave/Note/Issue/98//DiffNote//1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2';
- wrapper = createComponent();
+ findNoteForm().vm.$emit('cancelForm', false, true);
- expect(wrapper.vm.autosave).toBeDefined();
- expect(wrapper.vm.autosave.key).toEqual(key);
+ expect(confirmAction).not.toHaveBeenCalled();
+ });
});
+ });
- it('should set selectedCommentPosition', () => {
- wrapper = createComponent();
- let startLineCode = wrapper.vm.commentLineStart.line_code;
- let lineCode = wrapper.vm.line.line_code;
-
- expect(startLineCode).toEqual(lineCode);
- wrapper.destroy();
-
- const state = {
- notes: {
- selectedCommentPosition: {
- start: {
- line_code: 'test',
- },
- },
+ describe('saving note', () => {
+ it('should save original line', async () => {
+ const lineRange = {
+ start: {
+ line_code: diffLines[1].line_code,
+ type: diffLines[1].type,
+ new_line: 2,
+ old_line: null,
+ },
+ end: {
+ line_code: diffLines[1].line_code,
+ type: diffLines[1].type,
+ new_line: 2,
+ old_line: null,
},
};
- wrapper = createComponent({ state });
- startLineCode = wrapper.vm.commentLineStart.line_code;
- lineCode = state.notes.selectedCommentPosition.start.line_code;
- expect(startLineCode).toEqual(lineCode);
+ await findNoteForm().vm.$emit('handleFormUpdate', 'note body');
+ expect(actions.saveDiffDiscussion.mock.calls[0][1].formData).toMatchObject({
+ lineRange,
+ });
});
- });
- describe('template', () => {
- it('should have note form', () => {
- wrapper = createComponent();
- expect(wrapper.find(NoteForm).exists()).toBe(true);
+ it('should save selected line from the store', async () => {
+ const lineCode = 'test';
+ store.state.notes.selectedCommentPosition = { start: { line_code: lineCode } };
+ createComponent({ state: store.state });
+ await findNoteForm().vm.$emit('handleFormUpdate', 'note body');
+ expect(actions.saveDiffDiscussion.mock.calls[0][1].formData.lineRange.start.line_code).toBe(
+ lineCode,
+ );
});
});
});
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 4c5ce429c9d..be81508213b 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -6,7 +6,7 @@ import DiffRow from '~/diffs/components/diff_row.vue';
import { mapParallel } from '~/diffs/components/diff_row_utils';
import diffsModule from '~/diffs/store/modules';
import { findInteropAttributes } from '../find_interop_attributes';
-import diffFileMockData from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
const showCommentForm = jest.fn();
const enterdragging = jest.fn();
@@ -210,6 +210,7 @@ describe('DiffRow', () => {
});
describe('sets coverage title and class', () => {
+ const diffFileMockData = getDiffFileMock();
const thisLine = diffFileMockData.parallel_diff_lines[2];
const rightLine = diffFileMockData.parallel_diff_lines[2].right;
diff --git a/spec/frontend/diffs/components/diff_stats_spec.js b/spec/frontend/diffs/components/diff_stats_spec.js
index 4ef1ec55cb0..09fe69e97de 100644
--- a/spec/frontend/diffs/components/diff_stats_spec.js
+++ b/spec/frontend/diffs/components/diff_stats_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import DiffStats from '~/diffs/components/diff_stats.vue';
-import mockDiffFile from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
const TEST_ADDED_LINES = 100;
const TEST_REMOVED_LINES = 200;
@@ -48,6 +48,7 @@ describe('diff_stats', () => {
const getBytesContainer = () => wrapper.find('.diff-stats > div:first-child');
beforeEach(() => {
+ const mockDiffFile = getDiffFileMock();
file = {
...mockDiffFile,
viewer: {
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index dfbe30e460b..15923a1c6de 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -1,7 +1,9 @@
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import DiffView from '~/diffs/components/diff_view.vue';
+import DiffCodeQuality from '~/diffs/components/diff_code_quality.vue';
+import { diffCodeQuality } from '../mock_data/diff_code_quality';
describe('DiffView', () => {
const DiffExpansionCell = { template: `<div/>` };
@@ -12,7 +14,7 @@ describe('DiffView', () => {
const setSelectedCommentPosition = jest.fn();
const getDiffRow = (wrapper) => wrapper.findComponent(DiffRow).vm;
- const createWrapper = (props) => {
+ const createWrapper = (props, provide = {}) => {
Vue.use(Vuex);
const batchComments = {
@@ -46,9 +48,33 @@ describe('DiffView', () => {
...props,
};
const stubs = { DiffExpansionCell, DiffRow, DiffCommentCell, DraftNote };
- return shallowMount(DiffView, { propsData, store, stubs });
+ return shallowMount(DiffView, { propsData, store, stubs, provide });
};
+ it('does not render a codeQuality diff view when there is no finding', () => {
+ const wrapper = createWrapper();
+ expect(wrapper.findComponent(DiffCodeQuality).exists()).toBe(false);
+ });
+
+ it('does render a codeQuality diff view with the correct props when there is a finding & refactorCodeQualityInlineFindings flag is true ', async () => {
+ const wrapper = createWrapper(diffCodeQuality, {
+ glFeatures: { refactorCodeQualityInlineFindings: true },
+ });
+ wrapper.findComponent(DiffRow).vm.$emit('toggleCodeQualityFindings', 2);
+ await nextTick();
+ expect(wrapper.findComponent(DiffCodeQuality).exists()).toBe(true);
+ expect(wrapper.findComponent(DiffCodeQuality).props().codeQuality.length).not.toBe(0);
+ });
+
+ it('does not render a codeQuality diff view when there is a finding & refactorCodeQualityInlineFindings flag is false ', async () => {
+ const wrapper = createWrapper(diffCodeQuality, {
+ glFeatures: { refactorCodeQualityInlineFindings: false },
+ });
+ wrapper.findComponent(DiffRow).vm.$emit('toggleCodeQualityFindings', 2);
+ await nextTick();
+ expect(wrapper.findComponent(DiffCodeQuality).exists()).toBe(false);
+ });
+
it.each`
type | side | container | sides | total
${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true }, right: { lineDraft: {}, renderDiscussion: true } }} | ${2}
diff --git a/spec/frontend/diffs/components/settings_dropdown_spec.js b/spec/frontend/diffs/components/settings_dropdown_spec.js
index 693fc5bfd8f..2ec11ba86fd 100644
--- a/spec/frontend/diffs/components/settings_dropdown_spec.js
+++ b/spec/frontend/diffs/components/settings_dropdown_spec.js
@@ -1,6 +1,5 @@
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import SettingsDropdown from '~/diffs/components/settings_dropdown.vue';
@@ -139,9 +138,7 @@ describe('Diff settings dropdown component', () => {
const checkbox = wrapper.findByTestId('show-whitespace');
const { checked } = checkbox.element;
- checkbox.trigger('click');
-
- await nextTick();
+ await checkbox.setChecked(false);
expect(store.dispatch).toHaveBeenCalledWith('diffs/setShowWhitespace', {
showWhitespace: !checked,
@@ -182,9 +179,7 @@ describe('Diff settings dropdown component', () => {
Object.assign(origStore.state.diffs, { viewDiffsFileByFile: start }),
});
- getFileByFileCheckbox(wrapper).trigger('click');
-
- await nextTick();
+ await getFileByFileCheckbox(wrapper).setChecked(setting);
expect(store.dispatch).toHaveBeenCalledWith('diffs/setFileByFile', {
fileByFile: setting,
diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js
index 963805f4792..931a9562d36 100644
--- a/spec/frontend/diffs/components/tree_list_spec.js
+++ b/spec/frontend/diffs/components/tree_list_spec.js
@@ -50,6 +50,19 @@ describe('Diffs tree list component', () => {
type: 'blob',
parentPath: 'app',
},
+ 'test.rb': {
+ addedLines: 0,
+ changed: true,
+ deleted: false,
+ fileHash: 'test',
+ key: 'test.rb',
+ name: 'test.rb',
+ path: 'app/test.rb',
+ removedLines: 0,
+ tempFile: true,
+ type: 'blob',
+ parentPath: 'app',
+ },
app: {
key: 'app',
path: 'app',
@@ -85,6 +98,23 @@ describe('Diffs tree list component', () => {
createComponent();
});
+ describe('search by file extension', () => {
+ it.each`
+ extension | itemSize
+ ${'*.md'} | ${0}
+ ${'*.js'} | ${1}
+ ${'index.js'} | ${1}
+ ${'app/*.js'} | ${1}
+ ${'*.js, *.rb'} | ${2}
+ `('it returns $itemSize item for $extension', async ({ extension, itemSize }) => {
+ wrapper.find('[data-testid="diff-tree-search"]').setValue(extension);
+
+ await nextTick();
+
+ expect(getFileRows()).toHaveLength(itemSize);
+ });
+ });
+
it('renders tree', () => {
expect(getFileRows()).toHaveLength(2);
expect(getFileRows().at(0).html()).toContain('index.js');
@@ -120,7 +150,7 @@ describe('Diffs tree list component', () => {
wrapper.vm.$store.state.diffs.renderTreeList = false;
await nextTick();
- expect(getFileRows()).toHaveLength(1);
+ expect(getFileRows()).toHaveLength(2);
});
it('renders file paths when renderTreeList is false', async () => {
diff --git a/spec/frontend/diffs/mock_data/diff_code_quality.js b/spec/frontend/diffs/mock_data/diff_code_quality.js
new file mode 100644
index 00000000000..2ca421a20b4
--- /dev/null
+++ b/spec/frontend/diffs/mock_data/diff_code_quality.js
@@ -0,0 +1,62 @@
+export const multipleFindingsArr = [
+ {
+ severity: 'minor',
+ description: 'Unexpected Debugger Statement.',
+ line: 2,
+ },
+ {
+ severity: 'major',
+ description:
+ 'Function `aVeryLongFunction` has 52 lines of code (exceeds 25 allowed). Consider refactoring.',
+ line: 3,
+ },
+ {
+ severity: 'minor',
+ description: 'Arrow function has too many statements (52). Maximum allowed is 30.',
+ line: 3,
+ },
+];
+
+export const multipleFindings = {
+ filePath: 'index.js',
+ codequality: multipleFindingsArr,
+};
+
+export const singularFinding = {
+ filePath: 'index.js',
+ codequality: [multipleFindingsArr[0]],
+};
+
+export const diffCodeQuality = {
+ diffFile: { file_hash: '123' },
+ diffLines: [
+ {
+ left: {
+ type: 'old',
+ old_line: 1,
+ new_line: null,
+ codequality: [],
+ lineDraft: {},
+ },
+ },
+ {
+ left: {
+ type: null,
+ old_line: 2,
+ new_line: 1,
+ codequality: [],
+ lineDraft: {},
+ },
+ },
+ {
+ left: {
+ type: 'new',
+ old_line: null,
+ new_line: 2,
+
+ codequality: [multipleFindingsArr[0]],
+ lineDraft: {},
+ },
+ },
+ ],
+};
diff --git a/spec/frontend/diffs/mock_data/diff_file.js b/spec/frontend/diffs/mock_data/diff_file.js
index 9ebcd5ef26b..dd200b0248c 100644
--- a/spec/frontend/diffs/mock_data/diff_file.js
+++ b/spec/frontend/diffs/mock_data/diff_file.js
@@ -1,4 +1,4 @@
-export default {
+export const getDiffFileMock = () => ({
submodule: false,
submodule_link: null,
blob: {
@@ -305,4 +305,4 @@ export default {
],
discussions: [],
renderingLines: false,
-};
+});
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index cc595e58dda..346e43e5a72 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -3,7 +3,7 @@ import Cookies from '~/lib/utils/cookies';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
-import mockDiffFile from 'jest/diffs/mock_data/diff_file';
+import { getDiffFileMock } from 'jest/diffs/mock_data/diff_file';
import {
DIFF_VIEW_COOKIE_NAME,
INLINE_DIFF_VIEW_TYPE,
@@ -754,7 +754,7 @@ describe('DiffsStoreActions', () => {
it('dispatches actions', () => {
const commitId = 'something';
const formData = {
- diffFile: { ...mockDiffFile },
+ diffFile: getDiffFileMock(),
noteableData: {},
};
const note = {};
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index 57e623b843d..031e4fe2be2 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -3,7 +3,7 @@ import createState from '~/diffs/store/modules/diff_state';
import * as types from '~/diffs/store/mutation_types';
import mutations from '~/diffs/store/mutations';
import * as utils from '~/diffs/store/utils';
-import diffFileMockData from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
describe('DiffsStoreMutations', () => {
describe('SET_BASE_CONFIG', () => {
@@ -71,6 +71,7 @@ describe('DiffsStoreMutations', () => {
describe('SET_DIFF_METADATA', () => {
it('should overwrite state with the camelCased data that is passed in', () => {
+ const diffFileMockData = getDiffFileMock();
const state = {
diffFiles: [],
};
@@ -94,7 +95,7 @@ describe('DiffsStoreMutations', () => {
it('should set diff data batch type properly', () => {
const state = { diffFiles: [] };
const diffMock = {
- diff_files: [diffFileMockData],
+ diff_files: [getDiffFileMock()],
};
mutations[types.SET_DIFF_DATA_BATCH](state, diffMock);
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 6f55f76d7b5..8852c6c62c5 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -1,4 +1,3 @@
-import { clone } from 'lodash';
import {
LINE_POSITION_LEFT,
LINE_POSITION_RIGHT,
@@ -14,10 +13,9 @@ import {
import * as utils from '~/diffs/store/utils';
import { MERGE_REQUEST_NOTEABLE_TYPE } from '~/notes/constants';
import { noteableDataMock } from 'jest/notes/mock_data';
-import diffFileMockData from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
import { diffMetadata } from '../mock_data/diff_metadata';
-const getDiffFileMock = () => JSON.parse(JSON.stringify(diffFileMockData));
const getDiffMetadataMock = () => JSON.parse(JSON.stringify(diffMetadata));
describe('DiffsStoreUtils', () => {
@@ -47,7 +45,7 @@ describe('DiffsStoreUtils', () => {
let diffFile;
beforeEach(() => {
- diffFile = { ...clone(diffFileMockData) };
+ diffFile = getDiffFileMock();
});
it('should return the correct previous line number', () => {
diff --git a/spec/frontend/diffs/utils/diff_file_spec.js b/spec/frontend/diffs/utils/diff_file_spec.js
index 778897be3ba..b062a156216 100644
--- a/spec/frontend/diffs/utils/diff_file_spec.js
+++ b/spec/frontend/diffs/utils/diff_file_spec.js
@@ -6,7 +6,7 @@ import {
match,
} from '~/diffs/utils/diff_file';
import { diffViewerModes } from '~/ide/constants';
-import mockDiffFile from '../mock_data/diff_file';
+import { getDiffFileMock } from '../mock_data/diff_file';
function getDiffFiles() {
const loadFull = 'namespace/project/-/merge_requests/12345/diff_for_path?file_identifier=abc';
@@ -210,7 +210,7 @@ describe('diff_file utilities', () => {
];
const validFile = [
'computes the correct stats from a file',
- mockDiffFile,
+ getDiffFileMock(),
{
changed: 1024,
percent: 100,
@@ -223,7 +223,7 @@ describe('diff_file utilities', () => {
const negativeChange = [
'computed the correct states from a file with a negative size change',
{
- ...mockDiffFile,
+ ...getDiffFileMock(),
new_size: 0,
old_size: 1024,
},
diff --git a/spec/frontend/dirty_submit/dirty_submit_form_spec.js b/spec/frontend/dirty_submit/dirty_submit_form_spec.js
index bcbe824bd9f..f24bb7374a3 100644
--- a/spec/frontend/dirty_submit/dirty_submit_form_spec.js
+++ b/spec/frontend/dirty_submit/dirty_submit_form_spec.js
@@ -55,7 +55,6 @@ describe('DirtySubmitForm', () => {
describe('throttling tests', () => {
beforeEach(() => {
throttle.mockImplementation(lodash.throttle);
- jest.useFakeTimers();
});
afterEach(() => {
diff --git a/spec/frontend/emoji/awards_app/store/actions_spec.js b/spec/frontend/emoji/awards_app/store/actions_spec.js
index 0761256ed23..cd3dfab30d4 100644
--- a/spec/frontend/emoji/awards_app/store/actions_spec.js
+++ b/spec/frontend/emoji/awards_app/store/actions_spec.js
@@ -8,10 +8,6 @@ jest.mock('@sentry/browser');
jest.mock('~/vue_shared/plugins/global_toast');
describe('Awards app actions', () => {
- afterEach(() => {
- window.gon = {};
- });
-
describe('setInitialData', () => {
it('commits SET_INITIAL_DATA', async () => {
await testAction(
@@ -52,8 +48,6 @@ describe('Awards app actions', () => {
});
it('commits FETCH_AWARDS_SUCCESS', async () => {
- window.gon.current_user_id = 1;
-
await testAction(
actions.fetchAwards,
'1',
@@ -62,10 +56,6 @@ describe('Awards app actions', () => {
[{ type: 'fetchAwards', payload: '2' }],
);
});
-
- it('does not commit FETCH_AWARDS_SUCCESS when user signed out', async () => {
- await testAction(actions.fetchAwards, '1', { path: '/awards' }, [], []);
- });
});
});
@@ -75,8 +65,6 @@ describe('Awards app actions', () => {
});
it('calls Sentry.captureException', async () => {
- window.gon = { current_user_id: 1 };
-
await testAction(actions.fetchAwards, null, { path: '/awards' }, [], [], () => {
expect(Sentry.captureException).toHaveBeenCalled();
});
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 8465b57c660..dc1c1dfbe4a 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -67,12 +67,6 @@ class CustomEnvironment extends JSDOMEnvironment {
// Expose the jsdom (created in super class) to the global so that we can call reconfigure({ url: '' }) to properly set `window.location`
this.global.jsdom = this.dom;
- Object.assign(this.global.performance, {
- mark: () => null,
- measure: () => null,
- getEntriesByName: () => [],
- });
-
//
// Monaco-related environment variables
//
diff --git a/spec/frontend/environments/canary_update_modal_spec.js b/spec/frontend/environments/canary_update_modal_spec.js
index 22d13558a84..16792dcda1e 100644
--- a/spec/frontend/environments/canary_update_modal_spec.js
+++ b/spec/frontend/environments/canary_update_modal_spec.js
@@ -47,7 +47,7 @@ describe('/environments/components/canary_update_modal.vue', () => {
modalId: 'confirm-canary-change',
actionPrimary: {
text: 'Change ratio',
- attributes: [{ variant: 'info' }],
+ attributes: [{ variant: 'confirm' }],
},
actionCancel: { text: 'Cancel' },
});
diff --git a/spec/frontend/environments/confirm_rollback_modal_spec.js b/spec/frontend/environments/confirm_rollback_modal_spec.js
index b8dcb7c0d08..c4763933468 100644
--- a/spec/frontend/environments/confirm_rollback_modal_spec.js
+++ b/spec/frontend/environments/confirm_rollback_modal_spec.js
@@ -2,6 +2,7 @@ import { GlModal, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import { trimText } from 'helpers/text_helper';
import ConfirmRollbackModal from '~/environments/components/confirm_rollback_modal.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import eventHub from '~/environments/event_hub';
@@ -76,9 +77,9 @@ describe('Confirm Rollback Modal Component', () => {
expect(modal.attributes('title')).toContain('Rollback');
expect(modal.attributes('title')).toContain('test');
- expect(modal.props('actionPrimary').text).toBe('Rollback');
+ expect(modal.props('actionPrimary').text).toBe('Rollback environment');
expect(modal.props('actionPrimary').attributes).toEqual(primaryPropsAttrs);
- expect(modal.text()).toContain('commit abc0123');
+ expect(trimText(modal.text())).toContain('commit abc0123');
expect(modal.text()).toContain('Are you sure you want to continue?');
});
@@ -95,8 +96,8 @@ describe('Confirm Rollback Modal Component', () => {
expect(modal.attributes('title')).toContain('Re-deploy');
expect(modal.attributes('title')).toContain('test');
- expect(modal.props('actionPrimary').text).toBe('Re-deploy');
- expect(modal.text()).toContain('commit abc0123');
+ expect(modal.props('actionPrimary').text).toBe('Re-deploy environment');
+ expect(trimText(modal.text())).toContain('commit abc0123');
expect(modal.text()).toContain('Are you sure you want to continue?');
});
@@ -156,7 +157,7 @@ describe('Confirm Rollback Modal Component', () => {
);
const modal = component.find(GlModal);
- expect(modal.text()).toContain('commit abc0123');
+ expect(trimText(modal.text())).toContain('commit abc0123');
expect(modal.text()).toContain('Are you sure you want to continue?');
});
@@ -180,7 +181,7 @@ describe('Confirm Rollback Modal Component', () => {
expect(modal.attributes('title')).toContain('Rollback');
expect(modal.attributes('title')).toContain('test');
- expect(modal.props('actionPrimary').text).toBe('Rollback');
+ expect(modal.props('actionPrimary').text).toBe('Rollback environment');
expect(modal.props('actionPrimary').attributes).toEqual(primaryPropsAttrs);
});
@@ -204,7 +205,7 @@ describe('Confirm Rollback Modal Component', () => {
expect(modal.attributes('title')).toContain('Re-deploy');
expect(modal.attributes('title')).toContain('test');
- expect(modal.props('actionPrimary').text).toBe('Re-deploy');
+ expect(modal.props('actionPrimary').text).toBe('Re-deploy environment');
});
it('should commit the "rollback" mutation when "ok" is clicked', async () => {
diff --git a/spec/frontend/environments/deploy_board_component_spec.js b/spec/frontend/environments/deploy_board_component_spec.js
index 6bf87f7b07f..4d63648dd48 100644
--- a/spec/frontend/environments/deploy_board_component_spec.js
+++ b/spec/frontend/environments/deploy_board_component_spec.js
@@ -3,11 +3,9 @@ import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import CanaryIngress from '~/environments/components/canary_ingress.vue';
import DeployBoard from '~/environments/components/deploy_board.vue';
-import { deployBoardMockData, environment } from './mock_data';
+import { deployBoardMockData } from './mock_data';
import { rolloutStatus } from './graphql/mock_data';
-const logsPath = `gitlab-org/gitlab-test/-/logs?environment_name=${environment.name}`;
-
describe('Deploy Board', () => {
let wrapper;
@@ -17,7 +15,6 @@ describe('Deploy Board', () => {
deployBoardData: deployBoardMockData,
isLoading: false,
isEmpty: false,
- logsPath,
...props,
},
});
@@ -132,7 +129,6 @@ describe('Deploy Board', () => {
deployBoardData: {},
isLoading: false,
isEmpty: true,
- logsPath,
});
return nextTick();
});
@@ -151,7 +147,6 @@ describe('Deploy Board', () => {
deployBoardData: {},
isLoading: true,
isEmpty: false,
- logsPath,
});
return nextTick();
});
@@ -167,7 +162,6 @@ describe('Deploy Board', () => {
wrapper = createComponent({
isLoading: false,
isEmpty: false,
- logsPath: environment.log_path,
deployBoardData: deployBoardMockData,
});
({ statuses } = wrapper.vm);
diff --git a/spec/frontend/environments/environment_item_spec.js b/spec/frontend/environments/environment_item_spec.js
index 0761d04229c..1c86a66d9b8 100644
--- a/spec/frontend/environments/environment_item_spec.js
+++ b/spec/frontend/environments/environment_item_spec.js
@@ -68,7 +68,7 @@ describe('Environment item', () => {
describe('With deployment', () => {
it('should render deployment internal id', () => {
expect(wrapper.find('.deployment-column span').text()).toContain(
- environment.last_deployment.iid,
+ environment.last_deployment.iid.toString(),
);
expect(wrapper.find('.deployment-column span').text()).toContain('#');
@@ -400,7 +400,7 @@ describe('Environment item', () => {
});
it('should render the number of children in a badge', () => {
- expect(wrapper.find('.folder-name .badge').text()).toContain(folder.size);
+ expect(wrapper.find('.folder-name .badge').text()).toContain(folder.size.toString());
});
it('should not render the "Upcoming deployment" column', () => {
diff --git a/spec/frontend/environments/environment_table_spec.js b/spec/frontend/environments/environment_table_spec.js
index 666e87c748e..aff6b1327f0 100644
--- a/spec/frontend/environments/environment_table_spec.js
+++ b/spec/frontend/environments/environment_table_spec.js
@@ -64,7 +64,6 @@ describe('Environment table', () => {
name: 'review',
size: 1,
environment_path: 'url',
- logs_path: 'url',
id: 1,
hasDeployBoard: true,
deployBoardData: deployBoardMockData,
@@ -92,7 +91,6 @@ describe('Environment table', () => {
name: 'review',
size: 1,
environment_path: 'url',
- logs_path: 'url',
id: 1,
isFolder: true,
isOpen: true,
@@ -161,7 +159,6 @@ describe('Environment table', () => {
name: 'review',
size: 1,
environment_path: 'url',
- logs_path: 'url',
id: 1,
hasDeployBoard: true,
deployBoardData: deployBoardMockData,
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index 91b75c850bd..57f98c81124 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -204,9 +204,9 @@ describe('~/environments/components/environments_app.vue', () => {
const [available, stopped] = wrapper.findAllByRole('tab').wrappers;
expect(available.text()).toContain(__('Available'));
- expect(available.text()).toContain(resolvedEnvironmentsApp.availableCount);
+ expect(available.text()).toContain(resolvedEnvironmentsApp.availableCount.toString());
expect(stopped.text()).toContain(__('Stopped'));
- expect(stopped.text()).toContain(resolvedEnvironmentsApp.stoppedCount);
+ expect(stopped.text()).toContain(resolvedEnvironmentsApp.stoppedCount.toString());
});
it('should change the requested scope on tab change', async () => {
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 5e0f0ca9bef..23d448f3964 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -398,6 +398,30 @@ describe('ErrorTrackingList', () => {
});
describe('When pagination is required', () => {
+ describe('and previous cursor is not available', () => {
+ beforeEach(async () => {
+ store.state.list.loading = false;
+ delete store.state.list.pagination.previous;
+ mountComponent();
+ });
+
+ it('disables Prev button in the pagination', async () => {
+ expect(findPagination().props('prevPage')).toBe(null);
+ expect(findPagination().props('nextPage')).not.toBe(null);
+ });
+ });
+ describe('and next cursor is not available', () => {
+ beforeEach(async () => {
+ store.state.list.loading = false;
+ delete store.state.list.pagination.next;
+ mountComponent();
+ });
+
+ it('disables Next button in the pagination', async () => {
+ expect(findPagination().props('prevPage')).not.toBe(null);
+ expect(findPagination().props('nextPage')).toBe(null);
+ });
+ });
describe('and the user is not on the first page', () => {
describe('and the previous button is clicked', () => {
beforeEach(async () => {
diff --git a/spec/frontend/fixtures/api_deploy_keys.rb b/spec/frontend/fixtures/api_deploy_keys.rb
index 7027b8c975b..5ffc726f086 100644
--- a/spec/frontend/fixtures/api_deploy_keys.rb
+++ b/spec/frontend/fixtures/api_deploy_keys.rb
@@ -11,6 +11,7 @@ RSpec.describe API::DeployKeys, '(JavaScript fixtures)', type: :request do
let_it_be(:project2) { create(:project) }
let_it_be(:deploy_key) { create(:deploy_key, public: true) }
let_it_be(:deploy_key2) { create(:deploy_key, public: true) }
+ let_it_be(:deploy_key_without_fingerprint) { create(:deploy_key, :without_md5_fingerprint, public: true) }
let_it_be(:deploy_keys_project) { create(:deploy_keys_project, :write_access, project: project, deploy_key: deploy_key) }
let_it_be(:deploy_keys_project2) { create(:deploy_keys_project, :write_access, project: project2, deploy_key: deploy_key) }
let_it_be(:deploy_keys_project3) { create(:deploy_keys_project, :write_access, project: project, deploy_key: deploy_key2) }
diff --git a/spec/frontend/fixtures/blob.rb b/spec/frontend/fixtures/blob.rb
index af548823886..b2bbdd2749e 100644
--- a/spec/frontend/fixtures/blob.rb
+++ b/spec/frontend/fixtures/blob.rb
@@ -12,7 +12,6 @@ RSpec.describe Projects::BlobController, '(JavaScript fixtures)', type: :control
render_views
before do
- stub_feature_flags(refactor_blob_viewer: false) # This fixture is only used by the legacy (non-refactored) blob viewer
sign_in(user)
allow(SecureRandom).to receive(:hex).and_return('securerandomhex:thereisnospoon')
end
diff --git a/spec/frontend/fixtures/deploy_keys.rb b/spec/frontend/fixtures/deploy_keys.rb
index bed6c798793..154084e0181 100644
--- a/spec/frontend/fixtures/deploy_keys.rb
+++ b/spec/frontend/fixtures/deploy_keys.rb
@@ -27,9 +27,9 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
render_views
it 'deploy_keys/keys.json' do
- create(:rsa_deploy_key_2048, public: true)
- project_key = create(:deploy_key, key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQCdMHEHyhRjbhEZVddFn6lTWdgEy5Q6Bz4nwGB76xWZI5YT/1WJOMEW+sL5zYd31kk7sd3FJ5L9ft8zWMWrr/iWXQikC2cqZK24H1xy+ZUmrRuJD4qGAaIVoyyzBL+avL+lF8J5lg6YSw8gwJY/lX64/vnJHUlWw2n5BF8IFOWhiw== dummy@gitlab.com')
- internal_key = create(:deploy_key, key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDNd/UJWhPrpb+b/G5oL109y57yKuCxE+WUGJGYaj7WQKsYRJmLYh1mgjrl+KVyfsWpq4ylOxIfFSnN9xBBFN8mlb0Fma5DC7YsSsibJr3MZ19ZNBprwNcdogET7aW9I0In7Wu5f2KqI6e5W/spJHCy4JVxzVMUvk6Myab0LnJ2iQ== dummy@gitlab.com')
+ create(:rsa_deploy_key_5120, public: true)
+ project_key = create(:deploy_key)
+ internal_key = create(:deploy_key)
create(:deploy_keys_project, project: project, deploy_key: project_key)
create(:deploy_keys_project, project: project2, deploy_key: internal_key)
create(:deploy_keys_project, project: project3, deploy_key: project_key)
diff --git a/spec/frontend/fixtures/jobs.rb b/spec/frontend/fixtures/jobs.rb
index 3cc87432655..2e15eefdce6 100644
--- a/spec/frontend/fixtures/jobs.rb
+++ b/spec/frontend/fixtures/jobs.rb
@@ -2,40 +2,94 @@
require 'spec_helper'
-RSpec.describe Projects::JobsController, '(JavaScript fixtures)', type: :controller do
+RSpec.describe 'Jobs (JavaScript fixtures)' do
+ include ApiHelpers
include JavaScriptFixturesHelpers
+ include GraphqlHelpers
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project, :repository, namespace: namespace, path: 'builds-project') }
let(:user) { project.first_owner }
let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id) }
- let!(:build_with_artifacts) { create(:ci_build, :success, :artifacts, :trace_artifact, pipeline: pipeline, stage: 'test', artifacts_expire_at: Time.now + 18.months) }
- let!(:failed_build) { create(:ci_build, :failed, pipeline: pipeline, stage: 'build') }
- let!(:pending_build) { create(:ci_build, :pending, pipeline: pipeline, stage: 'deploy') }
- let!(:delayed_job) do
- create(:ci_build, :scheduled,
- pipeline: pipeline,
- name: 'delayed job',
- stage: 'test')
+
+ after do
+ remove_repository(project)
end
- render_views
+ describe Projects::JobsController, type: :controller do
+ let!(:delayed) { create(:ci_build, :scheduled, pipeline: pipeline, name: 'delayed job') }
- before do
- sign_in(user)
- end
+ before do
+ sign_in(user)
+ end
- after do
- remove_repository(project)
+ it 'jobs/delayed.json' do
+ get :show, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: delayed.to_param
+ }, format: :json
+
+ expect(response).to be_successful
+ end
end
- it 'jobs/delayed.json' do
- get :show, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: delayed_job.to_param
- }, format: :json
+ describe GraphQL::Query, type: :request do
+ let(:artifact) { create(:ci_job_artifact, file_type: :archive, file_format: :zip) }
+
+ let!(:build) { create(:ci_build, :success, name: 'build', pipeline: pipeline) }
+ let!(:cancelable) { create(:ci_build, :cancelable, name: 'cancelable', pipeline: pipeline) }
+ let!(:created_by_tag) { create(:ci_build, :success, name: 'created_by_tag', tag: true, pipeline: pipeline) }
+ let!(:pending) { create(:ci_build, :pending, name: 'pending', pipeline: pipeline) }
+ let!(:playable) { create(:ci_build, :playable, name: 'playable', pipeline: pipeline) }
+ let!(:retryable) { create(:ci_build, :retryable, name: 'retryable', pipeline: pipeline) }
+ let!(:scheduled) { create(:ci_build, :scheduled, name: 'scheduled', pipeline: pipeline) }
+ let!(:with_artifact) { create(:ci_build, :success, name: 'with_artifact', job_artifacts: [artifact], pipeline: pipeline) }
+ let!(:with_coverage) { create(:ci_build, :success, name: 'with_coverage', coverage: 40.0, pipeline: pipeline) }
+
+ fixtures_path = 'graphql/jobs/'
+ get_jobs_query = 'get_jobs.query.graphql'
+ full_path = 'frontend-fixtures/builds-project'
+
+ let_it_be(:query) do
+ get_graphql_query_as_string("jobs/components/table/graphql/queries/#{get_jobs_query}")
+ end
+
+ it "#{fixtures_path}#{get_jobs_query}.json" do
+ post_graphql(query, current_user: user, variables: {
+ fullPath: full_path
+ })
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it "#{fixtures_path}#{get_jobs_query}.as_guest.json" do
+ guest = create(:user)
+ project.add_guest(guest)
+
+ post_graphql(query, current_user: guest, variables: {
+ fullPath: full_path
+ })
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it "#{fixtures_path}#{get_jobs_query}.paginated.json" do
+ post_graphql(query, current_user: user, variables: {
+ fullPath: full_path,
+ first: 2
+ })
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it "#{fixtures_path}#{get_jobs_query}.empty.json" do
+ post_graphql(query, current_user: user, variables: {
+ fullPath: full_path,
+ first: 0
+ })
- expect(response).to be_successful
+ expect_graphql_errors_to_be_empty
+ end
end
end
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index a79982fa647..36281af0219 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -29,7 +29,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
before do
allow(Gitlab::Ci::RunnerUpgradeCheck.instance)
.to receive(:check_runner_upgrade_status)
- .and_return(:not_available)
+ .and_return({ not_available: nil })
end
describe do
@@ -39,19 +39,19 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- admin_runners_query = 'list/admin_runners.query.graphql'
+ all_runners_query = 'list/all_runners.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{admin_runners_query}")
+ get_graphql_query_as_string("#{query_path}#{all_runners_query}")
end
- it "#{fixtures_path}#{admin_runners_query}.json" do
+ it "#{fixtures_path}#{all_runners_query}.json" do
post_graphql(query, current_user: admin, variables: {})
expect_graphql_errors_to_be_empty
end
- it "#{fixtures_path}#{admin_runners_query}.paginated.json" do
+ it "#{fixtures_path}#{all_runners_query}.paginated.json" do
post_graphql(query, current_user: admin, variables: { first: 2 })
expect_graphql_errors_to_be_empty
@@ -59,13 +59,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- admin_runners_count_query = 'list/admin_runners_count.query.graphql'
+ all_runners_count_query = 'list/all_runners_count.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{admin_runners_count_query}")
+ get_graphql_query_as_string("#{query_path}#{all_runners_count_query}")
end
- it "#{fixtures_path}#{admin_runners_count_query}.json" do
+ it "#{fixtures_path}#{all_runners_count_query}.json" do
post_graphql(query, current_user: admin, variables: {})
expect_graphql_errors_to_be_empty
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 552377e3381..072cf34d0ef 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -738,7 +738,7 @@ describe('GfmAutoComplete', () => {
$textarea.trigger('focus').val(text).caret('pos', -1);
$textarea.trigger('keyup');
- return new Promise(window.requestAnimationFrame);
+ jest.runOnlyPendingTimers();
};
const getDropdownItems = () => {
@@ -747,10 +747,11 @@ describe('GfmAutoComplete', () => {
return [].map.call(items, (item) => item.textContent.trim());
};
- const expectLabels = ({ input, output }) =>
- triggerDropdown(input).then(() => {
- expect(getDropdownItems()).toEqual(output.map((label) => label.title));
- });
+ const expectLabels = ({ input, output }) => {
+ triggerDropdown(input);
+
+ expect(getDropdownItems()).toEqual(output.map((label) => label.title));
+ };
describe('with no labels assigned', () => {
beforeEach(() => {
diff --git a/spec/frontend/gitlab_pages/new/pages/pages_pipeline_wizard_spec.js b/spec/frontend/gitlab_pages/new/pages/pages_pipeline_wizard_spec.js
new file mode 100644
index 00000000000..685b5144a95
--- /dev/null
+++ b/spec/frontend/gitlab_pages/new/pages/pages_pipeline_wizard_spec.js
@@ -0,0 +1,102 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PagesPipelineWizard, { i18n } from '~/gitlab_pages/components/pages_pipeline_wizard.vue';
+import PipelineWizard from '~/pipeline_wizard/pipeline_wizard.vue';
+import pagesTemplate from '~/pipeline_wizard/templates/pages.yml';
+import pagesMarkOnboardingComplete from '~/gitlab_pages/queries/mark_onboarding_complete.graphql';
+import { redirectTo } from '~/lib/utils/url_utility';
+
+Vue.use(VueApollo);
+
+jest.mock('~/lib/utils/url_utility');
+
+describe('PagesPipelineWizard', () => {
+ const markOnboardingCompleteMutationHandler = jest.fn();
+ let wrapper;
+ const props = {
+ projectPath: '/user/repo',
+ defaultBranch: 'main',
+ redirectToWhenDone: './',
+ };
+
+ const findPipelineWizardWrapper = () => wrapper.findComponent(PipelineWizard);
+ const createMockApolloProvider = () => {
+ return createMockApollo([
+ [
+ pagesMarkOnboardingComplete,
+ markOnboardingCompleteMutationHandler.mockResolvedValue({
+ data: {
+ pagesMarkOnboardingComplete: {
+ onboardingComplete: true,
+ errors: [],
+ },
+ },
+ }),
+ ],
+ ]);
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(PagesPipelineWizard, {
+ apolloProvider: createMockApolloProvider(),
+ propsData: props,
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows the pipeline wizard', () => {
+ expect(findPipelineWizardWrapper().exists()).toBe(true);
+ });
+
+ it('passes the appropriate props', () => {
+ const pipelineWizardWrapperProps = findPipelineWizardWrapper().props();
+
+ expect(pipelineWizardWrapperProps.template).toBe(pagesTemplate);
+ expect(pipelineWizardWrapperProps.projectPath).toBe(props.projectPath);
+ expect(pipelineWizardWrapperProps.defaultBranch).toBe(props.defaultBranch);
+ });
+
+ describe('after the steps are complete', () => {
+ const mockDone = () => findPipelineWizardWrapper().vm.$emit('done');
+
+ it('shows a loading screen during the update', async () => {
+ mockDone();
+
+ await nextTick();
+
+ const loadingScreenWrapper = wrapper.findByTestId('onboarding-mutation-loading');
+ expect(loadingScreenWrapper.exists()).toBe(true);
+ expect(loadingScreenWrapper.text()).toBe(i18n.loadingMessage);
+ });
+
+ it('calls pagesMarkOnboardingComplete mutation when done', async () => {
+ mockDone();
+
+ await waitForPromises();
+
+ expect(markOnboardingCompleteMutationHandler).toHaveBeenCalledWith({
+ input: {
+ projectPath: props.projectPath,
+ },
+ });
+ });
+
+ it('navigates to the path defined in redirectToWhenDone when done', async () => {
+ mockDone();
+
+ await waitForPromises();
+
+ expect(redirectTo).toHaveBeenCalledWith(props.redirectToWhenDone);
+ });
+ });
+});
diff --git a/spec/frontend/google_cloud/components/app_spec.js b/spec/frontend/google_cloud/components/app_spec.js
deleted file mode 100644
index 0cafe6d3b9d..00000000000
--- a/spec/frontend/google_cloud/components/app_spec.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { mapValues } from 'lodash';
-import App from '~/google_cloud/components/app.vue';
-import Home from '~/google_cloud/components/home.vue';
-import IncubationBanner from '~/google_cloud/components/incubation_banner.vue';
-import ServiceAccountsForm from '~/google_cloud/components/service_accounts_form.vue';
-import GcpError from '~/google_cloud/components/errors/gcp_error.vue';
-import NoGcpProjects from '~/google_cloud/components/errors/no_gcp_projects.vue';
-
-const BASE_FEEDBACK_URL =
- 'https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/feedback/-/issues/new';
-const SCREEN_COMPONENTS = {
- Home,
- ServiceAccountsForm,
- GcpError,
- NoGcpProjects,
-};
-const SERVICE_ACCOUNTS_FORM_PROPS = {
- gcpProjects: [1, 2, 3],
- refs: [4, 5, 6],
- cancelPath: '',
-};
-const HOME_PROPS = {
- serviceAccounts: [{}, {}],
- gcpRegions: [{}, {}],
- createServiceAccountUrl: '#url-create-service-account',
- configureGcpRegionsUrl: '#url-configure-gcp-regions',
- emptyIllustrationUrl: '#url-empty-illustration',
- enableCloudRunUrl: '#url-enable-cloud-run',
- enableCloudStorageUrl: '#enableCloudStorageUrl',
- revokeOauthUrl: '#revokeOauthUrl',
-};
-
-describe('google_cloud App component', () => {
- let wrapper;
-
- const findIncubationBanner = () => wrapper.findComponent(IncubationBanner);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe.each`
- screen | extraProps | componentName
- ${'gcp_error'} | ${{ error: 'mock_gcp_client_error' }} | ${'GcpError'}
- ${'no_gcp_projects'} | ${{}} | ${'NoGcpProjects'}
- ${'service_accounts_form'} | ${SERVICE_ACCOUNTS_FORM_PROPS} | ${'ServiceAccountsForm'}
- ${'home'} | ${HOME_PROPS} | ${'Home'}
- `('for screen=$screen', ({ screen, extraProps, componentName }) => {
- const component = SCREEN_COMPONENTS[componentName];
-
- beforeEach(() => {
- wrapper = shallowMount(App, { propsData: { screen, ...extraProps } });
- });
-
- it(`renders only ${componentName}`, () => {
- const existences = mapValues(SCREEN_COMPONENTS, (x) => wrapper.findComponent(x).exists());
-
- expect(existences).toEqual({
- ...mapValues(SCREEN_COMPONENTS, () => false),
- [componentName]: true,
- });
- });
-
- it(`renders the ${componentName} with props`, () => {
- expect(wrapper.findComponent(component).props()).toEqual(extraProps);
- });
-
- it('renders incubation banner', () => {
- expect(findIncubationBanner().props()).toEqual({
- shareFeedbackUrl: `${BASE_FEEDBACK_URL}?issuable_template=general_feedback`,
- reportBugUrl: `${BASE_FEEDBACK_URL}?issuable_template=report_bug`,
- featureRequestUrl: `${BASE_FEEDBACK_URL}?issuable_template=feature_request`,
- });
- });
- });
-});
diff --git a/spec/frontend/google_cloud/components/errors/gcp_error_spec.js b/spec/frontend/google_cloud/components/errors/gcp_error_spec.js
deleted file mode 100644
index 4062a8b902a..00000000000
--- a/spec/frontend/google_cloud/components/errors/gcp_error_spec.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlAlert } from '@gitlab/ui';
-import GcpError from '~/google_cloud/components/errors/gcp_error.vue';
-
-describe('GcpError component', () => {
- let wrapper;
-
- const findAlert = () => wrapper.findComponent(GlAlert);
- const findBlockquote = () => wrapper.find('blockquote');
-
- const propsData = { error: 'IAM and CloudResourceManager API disabled' };
-
- beforeEach(() => {
- wrapper = shallowMount(GcpError, { propsData });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('contains alert', () => {
- expect(findAlert().exists()).toBe(true);
- });
-
- it('contains relevant text', () => {
- const alertText = findAlert().text();
- expect(findAlert().props('title')).toBe(GcpError.i18n.title);
- expect(alertText).toContain(GcpError.i18n.description);
- });
-
- it('contains error stacktrace', () => {
- expect(findBlockquote().text()).toBe(propsData.error);
- });
-});
diff --git a/spec/frontend/google_cloud/components/errors/no_gcp_projects_spec.js b/spec/frontend/google_cloud/components/errors/no_gcp_projects_spec.js
deleted file mode 100644
index e1e20377880..00000000000
--- a/spec/frontend/google_cloud/components/errors/no_gcp_projects_spec.js
+++ /dev/null
@@ -1,33 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { GlAlert, GlButton } from '@gitlab/ui';
-import NoGcpProjects from '~/google_cloud/components/errors/no_gcp_projects.vue';
-
-describe('NoGcpProjects component', () => {
- let wrapper;
-
- const findAlert = () => wrapper.findComponent(GlAlert);
- const findButton = () => wrapper.findComponent(GlButton);
-
- beforeEach(() => {
- wrapper = mount(NoGcpProjects);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('contains alert', () => {
- expect(findAlert().exists()).toBe(true);
- });
-
- it('contains relevant text', () => {
- expect(findAlert().props('title')).toBe(NoGcpProjects.i18n.title);
- expect(findAlert().text()).toContain(NoGcpProjects.i18n.description);
- });
-
- it('contains create gcp project button', () => {
- const button = findButton();
- expect(button.text()).toBe(NoGcpProjects.i18n.createLabel);
- expect(button.attributes('href')).toBe('https://console.cloud.google.com/projectcreate');
- });
-});
diff --git a/spec/frontend/google_cloud/components/google_cloud_menu_spec.js b/spec/frontend/google_cloud/components/google_cloud_menu_spec.js
new file mode 100644
index 00000000000..4809ea37045
--- /dev/null
+++ b/spec/frontend/google_cloud/components/google_cloud_menu_spec.js
@@ -0,0 +1,40 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import GoogleCloudMenu from '~/google_cloud/components/google_cloud_menu.vue';
+
+describe('google_cloud/components/google_cloud_menu', () => {
+ let wrapper;
+
+ const props = {
+ active: 'configuration',
+ configurationUrl: 'configuration-url',
+ deploymentsUrl: 'deployments-url',
+ databasesUrl: 'databases-url',
+ };
+
+ beforeEach(() => {
+ wrapper = mountExtended(GoogleCloudMenu, { propsData: props });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains active configuration link', () => {
+ const link = wrapper.findByTestId('configurationLink');
+ expect(link.text()).toBe(GoogleCloudMenu.i18n.configuration.title);
+ expect(link.attributes('href')).toBe(props.configurationUrl);
+ expect(link.element.classList.contains('gl-tab-nav-item-active')).toBe(true);
+ });
+
+ it('contains deployments link', () => {
+ const link = wrapper.findByTestId('deploymentsLink');
+ expect(link.text()).toBe(GoogleCloudMenu.i18n.deployments.title);
+ expect(link.attributes('href')).toBe(props.deploymentsUrl);
+ });
+
+ it('contains databases link', () => {
+ const link = wrapper.findByTestId('databasesLink');
+ expect(link.text()).toBe(GoogleCloudMenu.i18n.databases.title);
+ expect(link.attributes('href')).toBe(props.databasesUrl);
+ });
+});
diff --git a/spec/frontend/google_cloud/components/home_spec.js b/spec/frontend/google_cloud/components/home_spec.js
deleted file mode 100644
index 42e3d72577d..00000000000
--- a/spec/frontend/google_cloud/components/home_spec.js
+++ /dev/null
@@ -1,66 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlTab, GlTabs } from '@gitlab/ui';
-import Home from '~/google_cloud/components/home.vue';
-import ServiceAccountsList from '~/google_cloud/components/service_accounts_list.vue';
-
-describe('google_cloud Home component', () => {
- let wrapper;
-
- const findTabs = () => wrapper.findComponent(GlTabs);
- const findTabItems = () => findTabs().findAllComponents(GlTab);
- const findTabItemsModel = () =>
- findTabs()
- .findAllComponents(GlTab)
- .wrappers.map((x) => ({
- title: x.attributes('title'),
- disabled: x.attributes('disabled'),
- }));
-
- const TEST_HOME_PROPS = {
- serviceAccounts: [{}, {}],
- gcpRegions: [{}, {}],
- createServiceAccountUrl: '#url-create-service-account',
- configureGcpRegionsUrl: '#url-configure-gcp-regions',
- emptyIllustrationUrl: '#url-empty-illustration',
- enableCloudRunUrl: '#url-enable-cloud-run',
- enableCloudStorageUrl: '#enableCloudStorageUrl',
- revokeOauthUrl: '#revokeOauthUrl',
- };
-
- beforeEach(() => {
- const propsData = {
- screen: 'home',
- ...TEST_HOME_PROPS,
- };
- wrapper = shallowMount(Home, { propsData });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('google_cloud App tabs', () => {
- it('should contain tabs', () => {
- expect(findTabs().exists()).toBe(true);
- });
-
- it('should contain three tab items', () => {
- expect(findTabItemsModel()).toEqual([
- { title: 'Configuration', disabled: undefined },
- { title: 'Deployments', disabled: undefined },
- { title: 'Services', disabled: '' },
- ]);
- });
-
- describe('configuration tab', () => {
- it('should contain service accounts component', () => {
- const serviceAccounts = findTabItems().at(0).findComponent(ServiceAccountsList);
- expect(serviceAccounts.props()).toEqual({
- list: TEST_HOME_PROPS.serviceAccounts,
- createUrl: TEST_HOME_PROPS.createServiceAccountUrl,
- emptyIllustrationUrl: TEST_HOME_PROPS.emptyIllustrationUrl,
- });
- });
- });
- });
-});
diff --git a/spec/frontend/google_cloud/components/incubation_banner_spec.js b/spec/frontend/google_cloud/components/incubation_banner_spec.js
index 89517be4ef1..09a4d92dca2 100644
--- a/spec/frontend/google_cloud/components/incubation_banner_spec.js
+++ b/spec/frontend/google_cloud/components/incubation_banner_spec.js
@@ -2,7 +2,7 @@ import { mount } from '@vue/test-utils';
import { GlAlert, GlLink } from '@gitlab/ui';
import IncubationBanner from '~/google_cloud/components/incubation_banner.vue';
-describe('IncubationBanner component', () => {
+describe('google_cloud/components/incubation_banner', () => {
let wrapper;
const findAlert = () => wrapper.findComponent(GlAlert);
@@ -12,12 +12,7 @@ describe('IncubationBanner component', () => {
const findShareFeedbackLink = () => findLinks().at(2);
beforeEach(() => {
- const propsData = {
- shareFeedbackUrl: 'url_general_feedback',
- reportBugUrl: 'url_report_bug',
- featureRequestUrl: 'url_feature_request',
- };
- wrapper = mount(IncubationBanner, { propsData });
+ wrapper = mount(IncubationBanner);
});
afterEach(() => {
@@ -41,20 +36,26 @@ describe('IncubationBanner component', () => {
it('contains feature request link', () => {
const link = findFeatureRequestLink();
+ const expected =
+ 'https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/feedback/-/issues/new?issuable_template=feature_request';
expect(link.text()).toBe('request a feature');
- expect(link.attributes('href')).toBe('url_feature_request');
+ expect(link.attributes('href')).toBe(expected);
});
it('contains report bug link', () => {
const link = findReportBugLink();
+ const expected =
+ 'https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/feedback/-/issues/new?issuable_template=report_bug';
expect(link.text()).toBe('report a bug');
- expect(link.attributes('href')).toBe('url_report_bug');
+ expect(link.attributes('href')).toBe(expected);
});
it('contains share feedback link', () => {
const link = findShareFeedbackLink();
+ const expected =
+ 'https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/feedback/-/issues/new?issuable_template=general_feedback';
expect(link.text()).toBe('share feedback');
- expect(link.attributes('href')).toBe('url_general_feedback');
+ expect(link.attributes('href')).toBe(expected);
});
});
});
diff --git a/spec/frontend/google_cloud/components/revoke_oauth_spec.js b/spec/frontend/google_cloud/components/revoke_oauth_spec.js
index 87580dbf6de..faaec07fc35 100644
--- a/spec/frontend/google_cloud/components/revoke_oauth_spec.js
+++ b/spec/frontend/google_cloud/components/revoke_oauth_spec.js
@@ -5,7 +5,7 @@ import RevokeOauth, {
GOOGLE_CLOUD_REVOKE_DESCRIPTION,
} from '~/google_cloud/components/revoke_oauth.vue';
-describe('RevokeOauth component', () => {
+describe('google_cloud/components/revoke_oauth', () => {
let wrapper;
const findTitle = () => wrapper.find('h2');
diff --git a/spec/frontend/google_cloud/configuration/panel_spec.js b/spec/frontend/google_cloud/configuration/panel_spec.js
new file mode 100644
index 00000000000..79eb4cb4918
--- /dev/null
+++ b/spec/frontend/google_cloud/configuration/panel_spec.js
@@ -0,0 +1,65 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import Panel from '~/google_cloud/configuration/panel.vue';
+import IncubationBanner from '~/google_cloud/components/incubation_banner.vue';
+import GoogleCloudMenu from '~/google_cloud/components/google_cloud_menu.vue';
+import ServiceAccountsList from '~/google_cloud/service_accounts/list.vue';
+import GcpRegionsList from '~/google_cloud/gcp_regions/list.vue';
+import RevokeOauth from '~/google_cloud/components/revoke_oauth.vue';
+
+describe('google_cloud/configuration/panel', () => {
+ let wrapper;
+
+ const props = {
+ configurationUrl: 'configuration-url',
+ deploymentsUrl: 'deployments-url',
+ databasesUrl: 'databases-url',
+ serviceAccounts: [],
+ createServiceAccountUrl: 'create-service-account-url',
+ emptyIllustrationUrl: 'empty-illustration-url',
+ gcpRegions: [],
+ configureGcpRegionsUrl: 'configure-gcp-regions-url',
+ revokeOauthUrl: 'revoke-oauth-url',
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMountExtended(Panel, { propsData: props });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains incubation banner', () => {
+ const target = wrapper.findComponent(IncubationBanner);
+ expect(target.exists()).toBe(true);
+ });
+
+ it('contains google cloud menu with `configuration` active', () => {
+ const target = wrapper.findComponent(GoogleCloudMenu);
+ expect(target.exists()).toBe(true);
+ expect(target.props('active')).toBe('configuration');
+ expect(target.props('configurationUrl')).toBe(props.configurationUrl);
+ expect(target.props('deploymentsUrl')).toBe(props.deploymentsUrl);
+ expect(target.props('databasesUrl')).toBe(props.databasesUrl);
+ });
+
+ it('contains service accounts list', () => {
+ const target = wrapper.findComponent(ServiceAccountsList);
+ expect(target.exists()).toBe(true);
+ expect(target.props('list')).toBe(props.serviceAccounts);
+ expect(target.props('createUrl')).toBe(props.createServiceAccountUrl);
+ expect(target.props('emptyIllustrationUrl')).toBe(props.emptyIllustrationUrl);
+ });
+
+ it('contains gcp regions list', () => {
+ const target = wrapper.findComponent(GcpRegionsList);
+ expect(target.props('list')).toBe(props.gcpRegions);
+ expect(target.props('createUrl')).toBe(props.configureGcpRegionsUrl);
+ expect(target.props('emptyIllustrationUrl')).toBe(props.emptyIllustrationUrl);
+ });
+
+ it('contains revoke oauth', () => {
+ const target = wrapper.findComponent(RevokeOauth);
+ expect(target.props('url')).toBe(props.revokeOauthUrl);
+ });
+});
diff --git a/spec/frontend/google_cloud/databases/cloudsql/create_instance_form_spec.js b/spec/frontend/google_cloud/databases/cloudsql/create_instance_form_spec.js
new file mode 100644
index 00000000000..48e4b0ca1ad
--- /dev/null
+++ b/spec/frontend/google_cloud/databases/cloudsql/create_instance_form_spec.js
@@ -0,0 +1,103 @@
+import { GlFormCheckbox } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import InstanceForm from '~/google_cloud/databases/cloudsql/create_instance_form.vue';
+
+describe('google_cloud/databases/cloudsql/create_instance_form', () => {
+ let wrapper;
+
+ const findByTestId = (id) => wrapper.findByTestId(id);
+ const findCancelButton = () => findByTestId('cancel-button');
+ const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findHeader = () => wrapper.find('header');
+ const findSubmitButton = () => findByTestId('submit-button');
+
+ const propsData = {
+ gcpProjects: [],
+ refs: [],
+ cancelPath: '#cancel-url',
+ formTitle: 'mock form title',
+ formDescription: 'mock form description',
+ databaseVersions: [],
+ tiers: [],
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMountExtended(InstanceForm, { propsData, stubs: { GlFormCheckbox } });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains header', () => {
+ expect(findHeader().exists()).toBe(true);
+ });
+
+ it('contains GCP project form group', () => {
+ const formGroup = findByTestId('form_group_gcp_project');
+ expect(formGroup.exists()).toBe(true);
+ expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.gcpProjectLabel);
+ expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.gcpProjectDescription);
+ });
+
+ it('contains GCP project dropdown', () => {
+ const select = findByTestId('select_gcp_project');
+ expect(select.exists()).toBe(true);
+ });
+
+ it('contains Environments form group', () => {
+ const formGroup = findByTestId('form_group_environments');
+ expect(formGroup.exists()).toBe(true);
+ expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.refsLabel);
+ expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.refsDescription);
+ });
+
+ it('contains Environments dropdown', () => {
+ const select = findByTestId('select_environments');
+ expect(select.exists()).toBe(true);
+ });
+
+ it('contains Tier form group', () => {
+ const formGroup = findByTestId('form_group_tier');
+ expect(formGroup.exists()).toBe(true);
+ expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.tierLabel);
+ expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.tierDescription);
+ });
+
+ it('contains Tier dropdown', () => {
+ const select = findByTestId('select_tier');
+ expect(select.exists()).toBe(true);
+ });
+
+ it('contains Database Version form group', () => {
+ const formGroup = findByTestId('form_group_database_version');
+ expect(formGroup.exists()).toBe(true);
+ expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.databaseVersionLabel);
+ });
+
+ it('contains Database Version dropdown', () => {
+ const select = findByTestId('select_database_version');
+ expect(select.exists()).toBe(true);
+ });
+
+ it('contains Submit button', () => {
+ expect(findSubmitButton().exists()).toBe(true);
+ expect(findSubmitButton().text()).toBe(InstanceForm.i18n.submitLabel);
+ });
+
+ it('contains Cancel button', () => {
+ expect(findCancelButton().exists()).toBe(true);
+ expect(findCancelButton().text()).toBe(InstanceForm.i18n.cancelLabel);
+ expect(findCancelButton().attributes('href')).toBe('#cancel-url');
+ });
+
+ it('contains Confirmation checkbox', () => {
+ const checkbox = findCheckbox();
+ expect(checkbox.text()).toBe(InstanceForm.i18n.checkboxLabel);
+ });
+
+ it('checkbox must be required', () => {
+ const checkbox = findCheckbox();
+ expect(checkbox.attributes('required')).toBe('true');
+ });
+});
diff --git a/spec/frontend/google_cloud/databases/cloudsql/instance_table_spec.js b/spec/frontend/google_cloud/databases/cloudsql/instance_table_spec.js
new file mode 100644
index 00000000000..a5736d0a524
--- /dev/null
+++ b/spec/frontend/google_cloud/databases/cloudsql/instance_table_spec.js
@@ -0,0 +1,65 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlEmptyState, GlTable } from '@gitlab/ui';
+import InstanceTable from '~/google_cloud/databases/cloudsql/instance_table.vue';
+
+describe('google_cloud/databases/cloudsql/instance_table', () => {
+ let wrapper;
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findTable = () => wrapper.findComponent(GlTable);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when there are no instances', () => {
+ beforeEach(() => {
+ const propsData = {
+ cloudsqlInstances: [],
+ emptyIllustrationUrl: '#empty-illustration-url',
+ };
+ wrapper = shallowMount(InstanceTable, { propsData });
+ });
+
+ it('should depict empty state', () => {
+ const emptyState = findEmptyState();
+ expect(emptyState.exists()).toBe(true);
+ expect(emptyState.attributes('title')).toBe(InstanceTable.i18n.noInstancesTitle);
+ expect(emptyState.attributes('description')).toBe(InstanceTable.i18n.noInstancesDescription);
+ });
+ });
+
+ describe('when there are three instances', () => {
+ beforeEach(() => {
+ const propsData = {
+ cloudsqlInstances: [
+ {
+ ref: '*',
+ gcp_project: 'test-gcp-project',
+ instance_name: 'postgres-14-instance',
+ version: 'POSTGRES_14',
+ },
+ {
+ ref: 'production',
+ gcp_project: 'prod-gcp-project',
+ instance_name: 'postgres-14-instance',
+ version: 'POSTGRES_14',
+ },
+ {
+ ref: 'staging',
+ gcp_project: 'test-gcp-project',
+ instance_name: 'postgres-14-instance',
+ version: 'POSTGRES_14',
+ },
+ ],
+ emptyIllustrationUrl: '#empty-illustration-url',
+ };
+ wrapper = shallowMount(InstanceTable, { propsData });
+ });
+
+ it('should contain a table', () => {
+ const table = findTable();
+ expect(table.exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/google_cloud/databases/panel_spec.js b/spec/frontend/google_cloud/databases/panel_spec.js
new file mode 100644
index 00000000000..490c0136651
--- /dev/null
+++ b/spec/frontend/google_cloud/databases/panel_spec.js
@@ -0,0 +1,36 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import Panel from '~/google_cloud/databases/panel.vue';
+import IncubationBanner from '~/google_cloud/components/incubation_banner.vue';
+import GoogleCloudMenu from '~/google_cloud/components/google_cloud_menu.vue';
+
+describe('google_cloud/databases/panel', () => {
+ let wrapper;
+
+ const props = {
+ configurationUrl: 'configuration-url',
+ deploymentsUrl: 'deployments-url',
+ databasesUrl: 'databases-url',
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMountExtended(Panel, { propsData: props });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains incubation banner', () => {
+ const target = wrapper.findComponent(IncubationBanner);
+ expect(target.exists()).toBe(true);
+ });
+
+ it('contains google cloud menu with `databases` active', () => {
+ const target = wrapper.findComponent(GoogleCloudMenu);
+ expect(target.exists()).toBe(true);
+ expect(target.props('active')).toBe('databases');
+ expect(target.props('configurationUrl')).toBe(props.configurationUrl);
+ expect(target.props('deploymentsUrl')).toBe(props.deploymentsUrl);
+ expect(target.props('databasesUrl')).toBe(props.databasesUrl);
+ });
+});
diff --git a/spec/frontend/google_cloud/databases/service_table_spec.js b/spec/frontend/google_cloud/databases/service_table_spec.js
new file mode 100644
index 00000000000..4a622e544e1
--- /dev/null
+++ b/spec/frontend/google_cloud/databases/service_table_spec.js
@@ -0,0 +1,44 @@
+import { GlTable } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ServiceTable from '~/google_cloud/databases/service_table.vue';
+
+describe('google_cloud/databases/service_table', () => {
+ let wrapper;
+
+ const findTable = () => wrapper.findComponent(GlTable);
+
+ beforeEach(() => {
+ const propsData = {
+ cloudsqlPostgresUrl: '#url-cloudsql-postgres',
+ cloudsqlMysqlUrl: '#url-cloudsql-mysql',
+ cloudsqlSqlserverUrl: '#url-cloudsql-sqlserver',
+ alloydbPostgresUrl: '#url-alloydb-postgres',
+ memorystoreRedisUrl: '#url-memorystore-redis',
+ firestoreUrl: '#url-firestore',
+ };
+ wrapper = mountExtended(ServiceTable, { propsData });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should contain a table', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it.each`
+ name | testId | url
+ ${'cloudsql-postgres'} | ${'button-cloudsql-postgres'} | ${'#url-cloudsql-postgres'}
+ ${'cloudsql-mysql'} | ${'button-cloudsql-mysql'} | ${'#url-cloudsql-mysql'}
+ ${'cloudsql-sqlserver'} | ${'button-cloudsql-sqlserver'} | ${'#url-cloudsql-sqlserver'}
+ ${'alloydb-postgres'} | ${'button-alloydb-postgres'} | ${'#url-alloydb-postgres'}
+ ${'memorystore-redis'} | ${'button-memorystore-redis'} | ${'#url-memorystore-redis'}
+ ${'firestore'} | ${'button-firestore'} | ${'#url-firestore'}
+ `('renders $name button with correct url', ({ testId, url }) => {
+ const button = wrapper.findByTestId(testId);
+
+ expect(button.exists()).toBe(true);
+ expect(button.attributes('href')).toBe(url);
+ });
+});
diff --git a/spec/frontend/google_cloud/deployments/panel_spec.js b/spec/frontend/google_cloud/deployments/panel_spec.js
new file mode 100644
index 00000000000..729db1707a7
--- /dev/null
+++ b/spec/frontend/google_cloud/deployments/panel_spec.js
@@ -0,0 +1,46 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import Panel from '~/google_cloud/deployments/panel.vue';
+import IncubationBanner from '~/google_cloud/components/incubation_banner.vue';
+import GoogleCloudMenu from '~/google_cloud/components/google_cloud_menu.vue';
+import ServiceTable from '~/google_cloud/deployments/service_table.vue';
+
+describe('google_cloud/deployments/panel', () => {
+ let wrapper;
+
+ const props = {
+ configurationUrl: 'configuration-url',
+ deploymentsUrl: 'deployments-url',
+ databasesUrl: 'databases-url',
+ enableCloudRunUrl: 'cloud-run-url',
+ enableCloudStorageUrl: 'cloud-storage-url',
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMountExtended(Panel, { propsData: props });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains incubation banner', () => {
+ const target = wrapper.findComponent(IncubationBanner);
+ expect(target.exists()).toBe(true);
+ });
+
+ it('contains google cloud menu with `deployments` active', () => {
+ const target = wrapper.findComponent(GoogleCloudMenu);
+ expect(target.exists()).toBe(true);
+ expect(target.props('active')).toBe('deployments');
+ expect(target.props('configurationUrl')).toBe(props.configurationUrl);
+ expect(target.props('deploymentsUrl')).toBe(props.deploymentsUrl);
+ expect(target.props('databasesUrl')).toBe(props.databasesUrl);
+ });
+
+ it('contains service-table', () => {
+ const target = wrapper.findComponent(ServiceTable);
+ expect(target.exists()).toBe(true);
+ expect(target.props('cloudRunUrl')).toBe(props.enableCloudRunUrl);
+ expect(target.props('cloudStorageUrl')).toBe(props.enableCloudStorageUrl);
+ });
+});
diff --git a/spec/frontend/google_cloud/components/deployments_service_table_spec.js b/spec/frontend/google_cloud/deployments/service_table_spec.js
index 882376547c4..8faad64e313 100644
--- a/spec/frontend/google_cloud/components/deployments_service_table_spec.js
+++ b/spec/frontend/google_cloud/deployments/service_table_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
import { GlButton, GlTable } from '@gitlab/ui';
-import DeploymentsServiceTable from '~/google_cloud/components/deployments_service_table.vue';
+import DeploymentsServiceTable from '~/google_cloud/deployments/service_table.vue';
-describe('google_cloud DeploymentsServiceTable component', () => {
+describe('google_cloud/deployments/service_table', () => {
let wrapper;
const findTable = () => wrapper.findComponent(GlTable);
diff --git a/spec/frontend/google_cloud/components/gcp_regions_form_spec.js b/spec/frontend/google_cloud/gcp_regions/form_spec.js
index a8b7593e7c8..1030e9c8a18 100644
--- a/spec/frontend/google_cloud/components/gcp_regions_form_spec.js
+++ b/spec/frontend/google_cloud/gcp_regions/form_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { GlButton, GlFormGroup, GlFormSelect } from '@gitlab/ui';
-import GcpRegionsForm from '~/google_cloud/components/gcp_regions_form.vue';
+import GcpRegionsForm from '~/google_cloud/gcp_regions/form.vue';
-describe('GcpRegionsForm component', () => {
+describe('google_cloud/gcp_regions/form', () => {
let wrapper;
const findHeader = () => wrapper.find('header');
diff --git a/spec/frontend/google_cloud/components/gcp_regions_list_spec.js b/spec/frontend/google_cloud/gcp_regions/list_spec.js
index ab0c17451e8..6d8c389e5a1 100644
--- a/spec/frontend/google_cloud/components/gcp_regions_list_spec.js
+++ b/spec/frontend/google_cloud/gcp_regions/list_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
import { GlButton, GlEmptyState, GlTable } from '@gitlab/ui';
-import GcpRegionsList from '~/google_cloud/components/gcp_regions_list.vue';
+import GcpRegionsList from '~/google_cloud/gcp_regions/list.vue';
-describe('GcpRegions component', () => {
+describe('google_cloud/gcp_regions/list', () => {
describe('when the project does not have any configured regions', () => {
let wrapper;
diff --git a/spec/frontend/google_cloud/components/service_accounts_form_spec.js b/spec/frontend/google_cloud/service_accounts/form_spec.js
index 38602d4e8cc..8be481774fa 100644
--- a/spec/frontend/google_cloud/components/service_accounts_form_spec.js
+++ b/spec/frontend/google_cloud/service_accounts/form_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { GlButton, GlFormGroup, GlFormSelect, GlFormCheckbox } from '@gitlab/ui';
-import ServiceAccountsForm from '~/google_cloud/components/service_accounts_form.vue';
+import ServiceAccountsForm from '~/google_cloud/service_accounts/form.vue';
-describe('ServiceAccountsForm component', () => {
+describe('google_cloud/service_accounts/form', () => {
let wrapper;
const findHeader = () => wrapper.find('header');
diff --git a/spec/frontend/google_cloud/components/service_accounts_list_spec.js b/spec/frontend/google_cloud/service_accounts/list_spec.js
index f7051c8a53d..7a76a893757 100644
--- a/spec/frontend/google_cloud/components/service_accounts_list_spec.js
+++ b/spec/frontend/google_cloud/service_accounts/list_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
import { GlAlert, GlButton, GlEmptyState, GlTable } from '@gitlab/ui';
-import ServiceAccountsList from '~/google_cloud/components/service_accounts_list.vue';
+import ServiceAccountsList from '~/google_cloud/service_accounts/list.vue';
-describe('ServiceAccounts component', () => {
+describe('google_cloud/service_accounts/list', () => {
describe('when the project does not have any service accounts', () => {
let wrapper;
diff --git a/spec/frontend/google_tag_manager/index_spec.js b/spec/frontend/google_tag_manager/index_spec.js
index 50811f43fc3..6a7eb1fd9f1 100644
--- a/spec/frontend/google_tag_manager/index_spec.js
+++ b/spec/frontend/google_tag_manager/index_spec.js
@@ -10,6 +10,7 @@ import {
trackSaasTrialGroup,
trackSaasTrialProject,
trackSaasTrialGetStarted,
+ trackTrialAcceptTerms,
trackCheckout,
trackTransaction,
trackAddToCartUsageTab,
@@ -255,6 +256,16 @@ describe('~/google_tag_manager/index', () => {
expect(logError).not.toHaveBeenCalled();
});
+ it('when trackTrialAcceptTerms is invoked', () => {
+ expect(spy).not.toHaveBeenCalled();
+
+ trackTrialAcceptTerms();
+
+ expect(spy).toHaveBeenCalledTimes(1);
+ expect(spy).toHaveBeenCalledWith({ event: 'saasTrialAcceptTerms' });
+ expect(logError).not.toHaveBeenCalled();
+ });
+
describe('when trackCheckout is invoked', () => {
it('with selectedPlan: 2c92a00d76f0d5060176f2fb0a5029ff', () => {
expect(spy).not.toHaveBeenCalled();
diff --git a/spec/frontend/groups/components/group_folder_spec.js b/spec/frontend/groups/components/group_folder_spec.js
index 98b7c2dd6c6..f223333360d 100644
--- a/spec/frontend/groups/components/group_folder_spec.js
+++ b/spec/frontend/groups/components/group_folder_spec.js
@@ -1,65 +1,50 @@
-import Vue, { nextTick } from 'vue';
-
-import groupFolderComponent from '~/groups/components/group_folder.vue';
-import groupItemComponent from '~/groups/components/group_item.vue';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import GroupFolder from '~/groups/components/group_folder.vue';
+import GroupItem from '~/groups/components/group_item.vue';
+import { MAX_CHILDREN_COUNT } from '~/groups/constants';
import { mockGroups, mockParentGroupItem } from '../mock_data';
-const createComponent = (groups = mockGroups, parentGroup = mockParentGroupItem) => {
- const Component = Vue.extend(groupFolderComponent);
-
- return new Component({
- propsData: {
- groups,
- parentGroup,
- },
- });
-};
+describe('GroupFolder component', () => {
+ let wrapper;
-describe('GroupFolderComponent', () => {
- let vm;
+ Vue.component('GroupItem', GroupItem);
- beforeEach(async () => {
- Vue.component('GroupItem', groupItemComponent);
+ const findLink = () => wrapper.find('a');
- vm = createComponent();
- vm.$mount();
-
- await nextTick();
- });
+ const createComponent = ({ groups = mockGroups, parentGroup = mockParentGroupItem } = {}) =>
+ shallowMount(GroupFolder, {
+ propsData: {
+ groups,
+ parentGroup,
+ },
+ });
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- describe('computed', () => {
- describe('hasMoreChildren', () => {
- it('should return false when childrenCount of group is less than MAX_CHILDREN_COUNT', () => {
- expect(vm.hasMoreChildren).toBeFalsy();
- });
- });
+ it('does not render more children stats link when children count of group is under limit', () => {
+ wrapper = createComponent();
- describe('moreChildrenStats', () => {
- it('should return message with count of excess children over MAX_CHILDREN_COUNT limit', () => {
- expect(vm.moreChildrenStats).toBe('3 more items');
- });
- });
+ expect(findLink().exists()).toBe(false);
});
- describe('template', () => {
- it('should render component template correctly', () => {
- expect(vm.$el.classList.contains('group-list-tree')).toBeTruthy();
- expect(vm.$el.querySelectorAll('li.group-row').length).toBe(7);
+ it('renders text of count of excess children when children count of group is over limit', () => {
+ const childrenCount = MAX_CHILDREN_COUNT + 1;
+ wrapper = createComponent({
+ parentGroup: {
+ ...mockParentGroupItem,
+ childrenCount,
+ },
});
- it('should render more children link when groups list has children over MAX_CHILDREN_COUNT limit', () => {
- const parentGroup = { ...mockParentGroupItem };
- parentGroup.childrenCount = 21;
+ expect(findLink().text()).toBe(`${childrenCount} more items`);
+ });
- const newVm = createComponent(mockGroups, parentGroup);
- newVm.$mount();
+ it('renders group items', () => {
+ wrapper = createComponent();
- expect(newVm.$el.querySelector('li.group-row a.has-more-items')).toBeDefined();
- newVm.$destroy();
- });
+ expect(wrapper.findAllComponents(GroupItem)).toHaveLength(7);
});
});
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 8ea7e54aef4..0bc80df6535 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -1,4 +1,4 @@
-import { mount } from '@vue/test-utils';
+import { GlPopover } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import GroupFolder from '~/groups/components/group_folder.vue';
import GroupItem from '~/groups/components/group_item.vue';
@@ -6,14 +6,25 @@ import ItemActions from '~/groups/components/item_actions.vue';
import eventHub from '~/groups/event_hub';
import { getGroupItemMicrodata } from '~/groups/store/utils';
import * as urlUtilities from '~/lib/utils/url_utility';
+import {
+ ITEM_TYPE,
+ VISIBILITY_INTERNAL,
+ VISIBILITY_PRIVATE,
+ VISIBILITY_PUBLIC,
+} from '~/groups/constants';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { mockParentGroupItem, mockChildren } from '../mock_data';
const createComponent = (
propsData = { group: mockParentGroupItem, parentGroup: mockChildren[0] },
+ provide = {
+ currentGroupVisibility: VISIBILITY_PRIVATE,
+ },
) => {
- return mount(GroupItem, {
+ return mountExtended(GroupItem, {
propsData,
components: { GroupFolder },
+ provide,
});
};
@@ -276,4 +287,90 @@ describe('GroupItemComponent', () => {
});
});
});
+
+ describe('visibility warning popover', () => {
+ const findPopover = () => wrapper.findComponent(GlPopover);
+
+ const itDoesNotRenderVisibilityWarningPopover = () => {
+ it('does not render visibility warning popover', () => {
+ expect(findPopover().exists()).toBe(false);
+ });
+ };
+
+ describe('when showing groups', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ itDoesNotRenderVisibilityWarningPopover();
+ });
+
+ describe('when `action` prop is not `shared`', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ group: mockParentGroupItem,
+ parentGroup: mockChildren[0],
+ action: 'subgroups_and_projects',
+ });
+ });
+
+ itDoesNotRenderVisibilityWarningPopover();
+ });
+
+ describe('when showing projects', () => {
+ describe.each`
+ itemVisibility | currentGroupVisibility | isPopoverShown
+ ${VISIBILITY_PRIVATE} | ${VISIBILITY_PUBLIC} | ${false}
+ ${VISIBILITY_INTERNAL} | ${VISIBILITY_PUBLIC} | ${false}
+ ${VISIBILITY_PUBLIC} | ${VISIBILITY_PUBLIC} | ${false}
+ ${VISIBILITY_PRIVATE} | ${VISIBILITY_PRIVATE} | ${false}
+ ${VISIBILITY_INTERNAL} | ${VISIBILITY_PRIVATE} | ${true}
+ ${VISIBILITY_PUBLIC} | ${VISIBILITY_PRIVATE} | ${true}
+ `(
+ 'when item visibility is $itemVisibility and parent group visibility is $currentGroupVisibility',
+ ({ itemVisibility, currentGroupVisibility, isPopoverShown }) => {
+ beforeEach(() => {
+ wrapper = createComponent(
+ {
+ group: {
+ ...mockParentGroupItem,
+ visibility: itemVisibility,
+ type: ITEM_TYPE.PROJECT,
+ },
+ parentGroup: mockChildren[0],
+ action: 'shared',
+ },
+ {
+ currentGroupVisibility,
+ },
+ );
+ });
+
+ if (isPopoverShown) {
+ it('renders visibility warning popover', () => {
+ expect(findPopover().exists()).toBe(true);
+ });
+ } else {
+ itDoesNotRenderVisibilityWarningPopover();
+ }
+ },
+ );
+ });
+
+ it('sets up popover `target` prop correctly', () => {
+ wrapper = createComponent({
+ group: {
+ ...mockParentGroupItem,
+ visibility: VISIBILITY_PUBLIC,
+ type: ITEM_TYPE.PROJECT,
+ },
+ parentGroup: mockChildren[0],
+ action: 'shared',
+ });
+
+ expect(findPopover().props('target')()).toEqual(
+ wrapper.findByRole('button', { name: GroupItem.i18n.popoverTitle }).element,
+ );
+ });
+ });
});
diff --git a/spec/frontend/groups/components/group_name_and_path_spec.js b/spec/frontend/groups/components/group_name_and_path_spec.js
index eaa0801ab50..9c9bdead6fa 100644
--- a/spec/frontend/groups/components/group_name_and_path_spec.js
+++ b/spec/frontend/groups/components/group_name_and_path_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { merge } from 'lodash';
import { GlAlert } from '@gitlab/ui';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -50,17 +51,17 @@ describe('GroupNameAndPath', () => {
const findAlert = () => extendedWrapper(wrapper.findComponent(GlAlert));
const apiMockAvailablePath = () => {
- getGroupPathAvailability.mockResolvedValue({
+ getGroupPathAvailability.mockResolvedValueOnce({
data: { exists: false, suggests: [] },
});
};
const apiMockUnavailablePath = (suggests = [mockGroupUrlSuggested]) => {
- getGroupPathAvailability.mockResolvedValue({
+ getGroupPathAvailability.mockResolvedValueOnce({
data: { exists: true, suggests },
});
};
const apiMockLoading = () => {
- getGroupPathAvailability.mockImplementation(() => new Promise(() => {}));
+ getGroupPathAvailability.mockImplementationOnce(() => new Promise(() => {}));
};
const expectLoadingMessageExists = () => {
@@ -169,7 +170,7 @@ describe('GroupNameAndPath', () => {
describe('when API call fails', () => {
it('calls `createAlert`', async () => {
- getGroupPathAvailability.mockRejectedValue({});
+ getGroupPathAvailability.mockRejectedValueOnce({});
createComponent();
@@ -184,14 +185,20 @@ describe('GroupNameAndPath', () => {
describe('when multiple API calls are in-flight', () => {
it('aborts the first API call and resolves second API call', async () => {
- apiMockLoading();
+ getGroupPathAvailability.mockRejectedValueOnce({ __CANCEL__: true });
apiMockUnavailablePath();
+
const abortSpy = jest.spyOn(AbortController.prototype, 'abort');
createComponent();
await findGroupNameField().setValue('Foo');
await findGroupNameField().setValue(mockGroupName);
+
+ // Wait for re-render to ensure loading message is still there
+ await nextTick();
+ expectLoadingMessageExists();
+
await waitForPromises();
expect(createAlert).not.toHaveBeenCalled();
diff --git a/spec/frontend/groups/components/groups_spec.js b/spec/frontend/groups/components/groups_spec.js
index 590b4fb3d57..48a2319cf96 100644
--- a/spec/frontend/groups/components/groups_spec.js
+++ b/spec/frontend/groups/components/groups_spec.js
@@ -1,45 +1,55 @@
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import groupFolderComponent from '~/groups/components/group_folder.vue';
-import groupItemComponent from '~/groups/components/group_item.vue';
-import groupsComponent from '~/groups/components/groups.vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import GroupFolderComponent from '~/groups/components/group_folder.vue';
+import GroupItemComponent from '~/groups/components/group_item.vue';
+import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
+import GroupsComponent from '~/groups/components/groups.vue';
import eventHub from '~/groups/event_hub';
+import { VISIBILITY_PRIVATE } from '~/groups/constants';
import { mockGroups, mockPageInfo } from '../mock_data';
-const createComponent = (searchEmpty = false) => {
- const Component = Vue.extend(groupsComponent);
+describe('GroupsComponent', () => {
+ let wrapper;
- return mountComponent(Component, {
+ const defaultPropsData = {
groups: mockGroups,
pageInfo: mockPageInfo,
searchEmptyMessage: 'No matching results',
- searchEmpty,
- });
-};
+ searchEmpty: false,
+ };
-describe('GroupsComponent', () => {
- let vm;
-
- beforeEach(async () => {
- Vue.component('GroupFolder', groupFolderComponent);
- Vue.component('GroupItem', groupItemComponent);
+ const createComponent = ({ propsData } = {}) => {
+ wrapper = mountExtended(GroupsComponent, {
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ provide: {
+ currentGroupVisibility: VISIBILITY_PRIVATE,
+ },
+ });
+ };
- vm = createComponent();
+ const findPaginationLinks = () => wrapper.findComponent(PaginationLinks);
- await nextTick();
+ beforeEach(async () => {
+ Vue.component('GroupFolder', GroupFolderComponent);
+ Vue.component('GroupItem', GroupItemComponent);
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('methods', () => {
describe('change', () => {
it('should emit `fetchPage` event when page is changed via pagination', () => {
+ createComponent();
+
jest.spyOn(eventHub, '$emit').mockImplementation();
- vm.change(2);
+ findPaginationLinks().props('change')(2);
expect(eventHub.$emit).toHaveBeenCalledWith('fetchPage', {
page: 2,
@@ -52,18 +62,18 @@ describe('GroupsComponent', () => {
});
describe('template', () => {
- it('should render component template correctly', async () => {
- await nextTick();
- expect(vm.$el.querySelector('.groups-list-tree-container')).toBeDefined();
- expect(vm.$el.querySelector('.group-list-tree')).toBeDefined();
- expect(vm.$el.querySelector('.gl-pagination')).toBeDefined();
- expect(vm.$el.querySelectorAll('.has-no-search-results').length).toBe(0);
+ it('should render component template correctly', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(GroupFolderComponent).exists()).toBe(true);
+ expect(findPaginationLinks().exists()).toBe(true);
+ expect(wrapper.findByText(defaultPropsData.searchEmptyMessage).exists()).toBe(false);
});
- it('should render empty search message when `searchEmpty` is `true`', async () => {
- vm.searchEmpty = true;
- await nextTick();
- expect(vm.$el.querySelector('.has-no-search-results')).toBeDefined();
+ it('should render empty search message when `searchEmpty` is `true`', () => {
+ createComponent({ propsData: { searchEmpty: true } });
+
+ expect(wrapper.findByText(defaultPropsData.searchEmptyMessage).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/groups/mock_data.js b/spec/frontend/groups/mock_data.js
index 603cb27deec..9a325776374 100644
--- a/spec/frontend/groups/mock_data.js
+++ b/spec/frontend/groups/mock_data.js
@@ -5,26 +5,6 @@ export const ITEM_TYPE = {
GROUP: 'group',
};
-export const GROUP_VISIBILITY_TYPE = {
- public: 'Public - The group and any public projects can be viewed without any authentication.',
- internal:
- 'Internal - The group and any internal projects can be viewed by any logged in user except external users.',
- private: 'Private - The group and its projects can only be viewed by members.',
-};
-
-export const PROJECT_VISIBILITY_TYPE = {
- public: 'Public - The project can be accessed without any authentication.',
- internal: 'Internal - The project can be accessed by any logged in user except external users.',
- private:
- 'Private - Project access must be granted explicitly to each user. If this project is part of a group, access will be granted to members of the group.',
-};
-
-export const VISIBILITY_TYPE_ICON = {
- public: 'earth',
- internal: 'shield',
- private: 'lock',
-};
-
export const mockParentGroupItem = {
id: 55,
name: 'hardware',
@@ -49,6 +29,7 @@ export const mockParentGroupItem = {
isChildrenLoading: false,
isBeingRemoved: false,
updatedAt: '2017-04-09T18:40:39.101Z',
+ lastActivityAt: '2017-04-09T18:40:39.101Z',
};
export const mockRawChildren = [
diff --git a/spec/frontend/header_search/components/app_spec.js b/spec/frontend/header_search/components/app_spec.js
index f0de5b083ae..d89218f5542 100644
--- a/spec/frontend/header_search/components/app_spec.js
+++ b/spec/frontend/header_search/components/app_spec.js
@@ -1,22 +1,32 @@
-import { GlSearchBoxByType } from '@gitlab/ui';
+import { GlSearchBoxByType, GlToken, GlIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { s__, sprintf } from '~/locale';
import HeaderSearchApp from '~/header_search/components/app.vue';
import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue';
import HeaderSearchDefaultItems from '~/header_search/components/header_search_default_items.vue';
import HeaderSearchScopedItems from '~/header_search/components/header_search_scoped_items.vue';
-import { SEARCH_INPUT_DESCRIPTION, SEARCH_RESULTS_DESCRIPTION } from '~/header_search/constants';
+import {
+ SEARCH_INPUT_DESCRIPTION,
+ SEARCH_RESULTS_DESCRIPTION,
+ SEARCH_BOX_INDEX,
+ ICON_PROJECT,
+ ICON_GROUP,
+ ICON_SUBGROUP,
+ SCOPE_TOKEN_MAX_LENGTH,
+} from '~/header_search/constants';
import DropdownKeyboardNavigation from '~/vue_shared/components/dropdown_keyboard_navigation.vue';
import { ENTER_KEY } from '~/lib/utils/keys';
import { visitUrl } from '~/lib/utils/url_utility';
+import { truncate } from '~/lib/utils/text_utility';
import {
MOCK_SEARCH,
MOCK_SEARCH_QUERY,
MOCK_USERNAME,
MOCK_DEFAULT_SEARCH_OPTIONS,
MOCK_SCOPED_SEARCH_OPTIONS,
- MOCK_SORTED_AUTOCOMPLETE_OPTIONS,
+ MOCK_SEARCH_CONTEXT_FULL,
} from '../mock_data';
Vue.use(Vuex);
@@ -52,11 +62,27 @@ describe('HeaderSearchApp', () => {
});
};
+ const formatScopeName = (scopeName) => {
+ if (!scopeName) {
+ return false;
+ }
+ const searchResultsScope = s__('GlobalSearch|in %{scope}');
+ return truncate(
+ sprintf(searchResultsScope, {
+ scope: scopeName,
+ }),
+ SCOPE_TOKEN_MAX_LENGTH,
+ );
+ };
+
afterEach(() => {
wrapper.destroy();
});
+ const findHeaderSearchForm = () => wrapper.findByTestId('header-search-form');
const findHeaderSearchInput = () => wrapper.findComponent(GlSearchBoxByType);
+ const findScopeToken = () => wrapper.findComponent(GlToken);
+ const findHeaderSearchInputKBD = () => wrapper.find('.keyboard-shortcut-helper');
const findHeaderSearchDropdown = () => wrapper.findByTestId('header-search-dropdown-menu');
const findHeaderSearchDefaultItems = () => wrapper.findComponent(HeaderSearchDefaultItems);
const findHeaderSearchScopedItems = () => wrapper.findComponent(HeaderSearchScopedItems);
@@ -76,6 +102,14 @@ describe('HeaderSearchApp', () => {
expect(findHeaderSearchInput().exists()).toBe(true);
});
+ it('Header Search Input KBD hint', () => {
+ expect(findHeaderSearchInputKBD().exists()).toBe(true);
+ expect(findHeaderSearchInputKBD().text()).toContain('/');
+ expect(findHeaderSearchInputKBD().attributes('title')).toContain(
+ 'Use the shortcut key <kbd>/</kbd> to start a search',
+ );
+ });
+
it('Search Input Description', () => {
expect(findSearchInputDescription().exists()).toBe(true);
});
@@ -106,53 +140,38 @@ describe('HeaderSearchApp', () => {
});
describe.each`
- search | showDefault | showScoped | showAutocomplete | showDropdownNavigation
- ${null} | ${true} | ${false} | ${false} | ${true}
- ${''} | ${true} | ${false} | ${false} | ${true}
- ${'1'} | ${false} | ${false} | ${false} | ${false}
- ${')'} | ${false} | ${false} | ${false} | ${false}
- ${'t'} | ${false} | ${false} | ${true} | ${true}
- ${'te'} | ${false} | ${true} | ${true} | ${true}
- ${'tes'} | ${false} | ${true} | ${true} | ${true}
- ${MOCK_SEARCH} | ${false} | ${true} | ${true} | ${true}
- `(
- 'Header Search Dropdown Items',
- ({ search, showDefault, showScoped, showAutocomplete, showDropdownNavigation }) => {
- describe(`when search is ${search}`, () => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent(
- { search },
- {
- autocompleteGroupedSearchOptions: () =>
- search.match(/^[A-Za-z]+$/g) ? MOCK_SORTED_AUTOCOMPLETE_OPTIONS : [],
- },
- );
- findHeaderSearchInput().vm.$emit('click');
- });
+ search | showDefault | showScoped | showAutocomplete
+ ${null} | ${true} | ${false} | ${false}
+ ${''} | ${true} | ${false} | ${false}
+ ${'t'} | ${false} | ${false} | ${true}
+ ${'te'} | ${false} | ${false} | ${true}
+ ${'tes'} | ${false} | ${true} | ${true}
+ ${MOCK_SEARCH} | ${false} | ${true} | ${true}
+ `('Header Search Dropdown Items', ({ search, showDefault, showScoped, showAutocomplete }) => {
+ describe(`when search is ${search}`, () => {
+ beforeEach(() => {
+ window.gon.current_username = MOCK_USERNAME;
+ createComponent({ search }, {});
+ findHeaderSearchInput().vm.$emit('click');
+ });
- it(`should${showDefault ? '' : ' not'} render the Default Dropdown Items`, () => {
- expect(findHeaderSearchDefaultItems().exists()).toBe(showDefault);
- });
+ it(`should${showDefault ? '' : ' not'} render the Default Dropdown Items`, () => {
+ expect(findHeaderSearchDefaultItems().exists()).toBe(showDefault);
+ });
- it(`should${showScoped ? '' : ' not'} render the Scoped Dropdown Items`, () => {
- expect(findHeaderSearchScopedItems().exists()).toBe(showScoped);
- });
+ it(`should${showScoped ? '' : ' not'} render the Scoped Dropdown Items`, () => {
+ expect(findHeaderSearchScopedItems().exists()).toBe(showScoped);
+ });
- it(`should${
- showAutocomplete ? '' : ' not'
- } render the Autocomplete Dropdown Items`, () => {
- expect(findHeaderSearchAutocompleteItems().exists()).toBe(showAutocomplete);
- });
+ it(`should${showAutocomplete ? '' : ' not'} render the Autocomplete Dropdown Items`, () => {
+ expect(findHeaderSearchAutocompleteItems().exists()).toBe(showAutocomplete);
+ });
- it(`should${
- showDropdownNavigation ? '' : ' not'
- } render the Dropdown Navigation Component`, () => {
- expect(findDropdownKeyboardNavigation().exists()).toBe(showDropdownNavigation);
- });
+ it(`should render the Dropdown Navigation Component`, () => {
+ expect(findDropdownKeyboardNavigation().exists()).toBe(true);
});
- },
- );
+ });
+ });
describe.each`
username | showDropdown | expectedDesc
@@ -185,12 +204,18 @@ describe('HeaderSearchApp', () => {
`(
'Search Results Description',
({ username, showDropdown, search, loading, searchOptions, expectedDesc }) => {
- describe(`search is ${search}, loading is ${loading}, and showSearchDropdown is ${
- Boolean(username) && showDropdown
- }`, () => {
+ describe(`search is "${search}", loading is ${loading}, and showSearchDropdown is ${showDropdown}`, () => {
beforeEach(() => {
window.gon.current_username = username;
- createComponent({ search, loading }, { searchOptions: () => searchOptions });
+ createComponent(
+ {
+ search,
+ loading,
+ },
+ {
+ searchOptions: () => searchOptions,
+ },
+ );
findHeaderSearchInput().vm.$emit(showDropdown ? 'click' : '');
});
@@ -200,6 +225,121 @@ describe('HeaderSearchApp', () => {
});
},
);
+
+ describe('input box', () => {
+ describe.each`
+ search | searchOptions | hasToken
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[0]]} | ${true}
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[1]]} | ${true}
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[2]]} | ${true}
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[3]]} | ${true}
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[4]]} | ${true}
+ ${'te'} | ${[MOCK_SCOPED_SEARCH_OPTIONS[5]]} | ${false}
+ ${'x'} | ${[]} | ${false}
+ `('token', ({ search, searchOptions, hasToken }) => {
+ beforeEach(() => {
+ window.gon.current_username = MOCK_USERNAME;
+ createComponent(
+ { search },
+ {
+ searchOptions: () => searchOptions,
+ },
+ );
+ });
+
+ it(`${hasToken ? 'is' : 'is NOT'} rendered when data set has type "${
+ searchOptions[0]?.html_id
+ }"`, () => {
+ expect(findScopeToken().exists()).toBe(hasToken);
+ });
+
+ it(`text ${hasToken ? 'is correctly' : 'is NOT'} rendered when text is "${
+ searchOptions[0]?.scope || searchOptions[0]?.description
+ }"`, () => {
+ expect(findScopeToken().exists() && findScopeToken().text()).toBe(
+ formatScopeName(searchOptions[0]?.scope || searchOptions[0]?.description),
+ );
+ });
+ });
+ });
+
+ describe('form wrapper', () => {
+ describe.each`
+ searchContext | search | searchOptions
+ ${MOCK_SEARCH_CONTEXT_FULL} | ${null} | ${[]}
+ ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${[]}
+ ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS}
+ ${null} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS}
+ ${null} | ${null} | ${MOCK_SCOPED_SEARCH_OPTIONS}
+ ${null} | ${null} | ${[]}
+ `('', ({ searchContext, search, searchOptions }) => {
+ beforeEach(() => {
+ window.gon.current_username = MOCK_USERNAME;
+
+ createComponent({ search, searchContext }, { searchOptions: () => searchOptions });
+
+ findHeaderSearchInput().vm.$emit('click');
+ });
+
+ const hasIcon = Boolean(searchContext?.group);
+ const isSearching = Boolean(search);
+ const isActive = Boolean(searchOptions.length > 0);
+
+ it(`${hasIcon ? 'with' : 'without'} search context classes contain "${
+ hasIcon ? 'has-icon' : 'has-no-icon'
+ }"`, () => {
+ const iconClassRegex = hasIcon ? 'has-icon' : 'has-no-icon';
+ expect(findHeaderSearchForm().classes()).toContain(iconClassRegex);
+ });
+
+ it(`${isSearching ? 'with' : 'without'} search string classes contain "${
+ isSearching ? 'is-searching' : 'is-not-searching'
+ }"`, () => {
+ const iconClassRegex = isSearching ? 'is-searching' : 'is-not-searching';
+ expect(findHeaderSearchForm().classes()).toContain(iconClassRegex);
+ });
+
+ it(`${isActive ? 'with' : 'without'} search results classes contain "${
+ isActive ? 'is-active' : 'is-not-active'
+ }"`, () => {
+ const iconClassRegex = isActive ? 'is-active' : 'is-not-active';
+ expect(findHeaderSearchForm().classes()).toContain(iconClassRegex);
+ });
+ });
+ });
+
+ describe.each`
+ search | searchOptions | hasIcon | iconName
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[0]]} | ${true} | ${ICON_PROJECT}
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[2]]} | ${true} | ${ICON_GROUP}
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[3]]} | ${true} | ${ICON_SUBGROUP}
+ ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[4]]} | ${false} | ${false}
+ `('token', ({ search, searchOptions, hasIcon, iconName }) => {
+ beforeEach(() => {
+ window.gon.current_username = MOCK_USERNAME;
+ createComponent(
+ { search },
+ {
+ searchOptions: () => searchOptions,
+ },
+ );
+ });
+
+ it(`icon for data set type "${searchOptions[0]?.html_id}" ${
+ hasIcon ? 'is' : 'is NOT'
+ } rendered`, () => {
+ expect(findScopeToken().findComponent(GlIcon).exists()).toBe(hasIcon);
+ });
+
+ it(`render ${iconName ? `"${iconName}"` : 'NO'} icon for data set type "${
+ searchOptions[0]?.html_id
+ }"`, () => {
+ expect(
+ findScopeToken().findComponent(GlIcon).exists() &&
+ findScopeToken().findComponent(GlIcon).attributes('name'),
+ ).toBe(iconName);
+ });
+ });
});
describe('events', () => {
@@ -285,18 +425,20 @@ describe('HeaderSearchApp', () => {
});
describe('computed', () => {
- describe('currentFocusedOption', () => {
- const MOCK_INDEX = 1;
-
+ describe.each`
+ MOCK_INDEX | search
+ ${1} | ${null}
+ ${SEARCH_BOX_INDEX} | ${'test'}
+ ${2} | ${'test1'}
+ `('currentFocusedOption', ({ MOCK_INDEX, search }) => {
beforeEach(() => {
- createComponent();
+ createComponent({ search });
window.gon.current_username = MOCK_USERNAME;
findHeaderSearchInput().vm.$emit('click');
});
- it(`when currentFocusIndex changes to ${MOCK_INDEX} updates the data to searchOptions[${MOCK_INDEX}]`, async () => {
+ it(`when currentFocusIndex changes to ${MOCK_INDEX} updates the data to searchOptions[${MOCK_INDEX}]`, () => {
findDropdownKeyboardNavigation().vm.$emit('change', MOCK_INDEX);
- await nextTick();
expect(wrapper.vm.currentFocusedOption).toBe(MOCK_DEFAULT_SEARCH_OPTIONS[MOCK_INDEX]);
});
});
@@ -308,15 +450,25 @@ describe('HeaderSearchApp', () => {
createComponent();
});
- it('onKey-enter submits a search', async () => {
+ it('onKey-enter submits a search', () => {
findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
- await nextTick();
-
expect(visitUrl).toHaveBeenCalledWith(MOCK_SEARCH_QUERY);
});
});
+ describe('with less than min characters and no dropdown results', () => {
+ beforeEach(() => {
+ createComponent({ search: 'x' });
+ });
+
+ it('onKey-enter will NOT submit a search', () => {
+ findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
+
+ expect(visitUrl).not.toHaveBeenCalledWith(MOCK_SEARCH_QUERY);
+ });
+ });
+
describe('with currentFocusedOption', () => {
const MOCK_INDEX = 1;
@@ -326,9 +478,9 @@ describe('HeaderSearchApp', () => {
findHeaderSearchInput().vm.$emit('click');
});
- it('onKey-enter clicks the selected dropdown item rather than submitting a search', async () => {
+ it('onKey-enter clicks the selected dropdown item rather than submitting a search', () => {
findDropdownKeyboardNavigation().vm.$emit('change', MOCK_INDEX);
- await nextTick();
+
findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
expect(visitUrl).toHaveBeenCalledWith(MOCK_DEFAULT_SEARCH_OPTIONS[MOCK_INDEX].url);
});
diff --git a/spec/frontend/header_search/components/header_search_scoped_items_spec.js b/spec/frontend/header_search/components/header_search_scoped_items_spec.js
index 8788fb23458..2db9f71d702 100644
--- a/spec/frontend/header_search/components/header_search_scoped_items_spec.js
+++ b/spec/frontend/header_search/components/header_search_scoped_items_spec.js
@@ -1,9 +1,11 @@
-import { GlDropdownItem, GlDropdownDivider } from '@gitlab/ui';
+import { GlDropdownItem, GlToken, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import { trimText } from 'helpers/text_helper';
import HeaderSearchScopedItems from '~/header_search/components/header_search_scoped_items.vue';
+import { truncate } from '~/lib/utils/text_utility';
+import { MSG_IN_ALL_GITLAB, SCOPE_TOKEN_MAX_LENGTH } from '~/header_search/constants';
import {
MOCK_SEARCH,
MOCK_SCOPED_SEARCH_OPTIONS,
@@ -41,9 +43,12 @@ describe('HeaderSearchScopedItems', () => {
});
const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findGlDropdownDivider = () => wrapper.findComponent(GlDropdownDivider);
const findFirstDropdownItem = () => findDropdownItems().at(0);
const findDropdownItemTitles = () => findDropdownItems().wrappers.map((w) => trimText(w.text()));
+ const findScopeTokens = () => wrapper.findAllComponents(GlToken);
+ const findScopeTokensText = () => findScopeTokens().wrappers.map((w) => trimText(w.text()));
+ const findScopeTokensIcons = () =>
+ findScopeTokens().wrappers.map((w) => w.findAllComponents(GlIcon));
const findDropdownItemAriaLabels = () =>
findDropdownItems().wrappers.map((w) => trimText(w.attributes('aria-label')));
const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href'));
@@ -59,15 +64,31 @@ describe('HeaderSearchScopedItems', () => {
});
it('renders titles correctly', () => {
+ findDropdownItemTitles().forEach((title) => expect(title).toContain(MOCK_SEARCH));
+ });
+
+ it('renders scope names correctly', () => {
const expectedTitles = MOCK_SCOPED_SEARCH_OPTIONS.map((o) =>
- trimText(`"${MOCK_SEARCH}" ${o.description} ${o.scope || ''}`),
+ truncate(trimText(`in ${o.description || o.scope}`), SCOPE_TOKEN_MAX_LENGTH),
);
- expect(findDropdownItemTitles()).toStrictEqual(expectedTitles);
+
+ expect(findScopeTokensText()).toStrictEqual(expectedTitles);
+ });
+
+ it('renders scope icons correctly', () => {
+ findScopeTokensIcons().forEach((icon, i) => {
+ const w = icon.wrappers[0];
+ expect(w?.attributes('name')).toBe(MOCK_SCOPED_SEARCH_OPTIONS[i].icon);
+ });
+ });
+
+ it(`renders scope ${MSG_IN_ALL_GITLAB} correctly`, () => {
+ expect(findScopeTokens().at(-1).findComponent(GlIcon).exists()).toBe(false);
});
it('renders aria-labels correctly', () => {
const expectedLabels = MOCK_SCOPED_SEARCH_OPTIONS.map((o) =>
- trimText(`${MOCK_SEARCH} ${o.description} ${o.scope || ''}`),
+ trimText(`${MOCK_SEARCH} ${o.description || o.icon} ${o.scope || ''}`),
);
expect(findDropdownItemAriaLabels()).toStrictEqual(expectedLabels);
});
@@ -98,21 +119,5 @@ describe('HeaderSearchScopedItems', () => {
});
});
});
-
- describe.each`
- autosuggestResults | showDivider
- ${[]} | ${false}
- ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${true}
- `('scoped search items', ({ autosuggestResults, showDivider }) => {
- describe(`when when we have ${autosuggestResults.length} auto-sugest results`, () => {
- beforeEach(() => {
- createComponent({}, { autocompleteGroupedSearchOptions: () => autosuggestResults }, {});
- });
-
- it(`divider should${showDivider ? '' : ' not'} be shown`, () => {
- expect(findGlDropdownDivider().exists()).toBe(showDivider);
- });
- });
- });
});
});
diff --git a/spec/frontend/header_search/init_spec.js b/spec/frontend/header_search/init_spec.js
new file mode 100644
index 00000000000..9515ca8c812
--- /dev/null
+++ b/spec/frontend/header_search/init_spec.js
@@ -0,0 +1,74 @@
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+
+import initHeaderSearch, { eventHandler, cleanEventListeners } from '~/header_search/init';
+
+describe('Header Search EventListener', () => {
+ beforeEach(() => {
+ jest.resetModules();
+ jest.restoreAllMocks();
+ setHTMLFixture(`
+ <div class="js-header-content">
+ <div class="header-search" id="js-header-search" data-autocomplete-path="/search/autocomplete" data-issues-path="/dashboard/issues" data-mr-path="/dashboard/merge_requests" data-search-context="{}" data-search-path="/search">
+ <input autocomplete="off" class="form-control gl-form-input gl-search-box-by-type-input" data-qa-selector="search_box" id="search" name="search" placeholder="Search GitLab" type="text">
+ </div>
+ </div>`);
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ jest.clearAllMocks();
+ });
+
+ it('attached event listener', () => {
+ const searchInputBox = document?.querySelector('#search');
+ const addEventListenerSpy = jest.spyOn(searchInputBox, 'addEventListener');
+ initHeaderSearch();
+
+ expect(addEventListenerSpy).toBeCalledTimes(2);
+ });
+
+ it('removes event listener ', async () => {
+ const searchInputBox = document?.querySelector('#search');
+ const removeEventListenerSpy = jest.spyOn(searchInputBox, 'removeEventListener');
+ jest.mock('~/header_search', () => ({ initHeaderSearchApp: jest.fn() }));
+ await eventHandler.apply(
+ {
+ newHeaderSearchFeatureFlag: true,
+ searchInputBox: document.querySelector('#search'),
+ },
+ [cleanEventListeners],
+ );
+
+ expect(removeEventListenerSpy).toBeCalledTimes(2);
+ });
+
+ it('attaches new vue dropdown when feature flag is enabled', async () => {
+ const mockVueApp = jest.fn();
+ jest.mock('~/header_search', () => ({ initHeaderSearchApp: mockVueApp }));
+ await eventHandler.apply(
+ {
+ newHeaderSearchFeatureFlag: true,
+ searchInputBox: document.querySelector('#search'),
+ },
+ () => {},
+ );
+
+ expect(mockVueApp).toBeCalled();
+ });
+
+ it('attaches old vue dropdown when feature flag is disabled', async () => {
+ const mockLegacyApp = jest.fn(() => ({
+ onSearchInputFocus: jest.fn(),
+ }));
+ jest.mock('~/search_autocomplete', () => mockLegacyApp);
+ await eventHandler.apply(
+ {
+ newHeaderSearchFeatureFlag: false,
+ searchInputBox: document.querySelector('#search'),
+ },
+ () => {},
+ );
+
+ expect(mockLegacyApp).toBeCalled();
+ });
+});
diff --git a/spec/frontend/header_search/mock_data.js b/spec/frontend/header_search/mock_data.js
index b6f0fdcc29d..8ccd7fb17e3 100644
--- a/spec/frontend/header_search/mock_data.js
+++ b/spec/frontend/header_search/mock_data.js
@@ -4,9 +4,12 @@ import {
MSG_MR_ASSIGNED_TO_ME,
MSG_MR_IM_REVIEWER,
MSG_MR_IVE_CREATED,
- MSG_IN_PROJECT,
- MSG_IN_GROUP,
MSG_IN_ALL_GITLAB,
+ PROJECTS_CATEGORY,
+ ICON_PROJECT,
+ GROUPS_CATEGORY,
+ ICON_GROUP,
+ ICON_SUBGROUP,
} from '~/header_search/constants';
export const MOCK_USERNAME = 'anyone';
@@ -27,12 +30,24 @@ export const MOCK_PROJECT = {
path: '/mock-project',
};
+export const MOCK_PROJECT_LONG = {
+ id: 124,
+ name: 'Mock Project Name That Is Ridiculously Long And It Goes Forever',
+ path: '/mock-project-name-that-is-ridiculously-long-and-it-goes-forever',
+};
+
export const MOCK_GROUP = {
id: 321,
name: 'MockGroup',
path: '/mock-group',
};
+export const MOCK_SUBGROUP = {
+ id: 322,
+ name: 'MockSubGroup',
+ path: `${MOCK_GROUP}/mock-subgroup`,
+};
+
export const MOCK_SEARCH_QUERY = 'http://gitlab.com/search?search=test';
export const MOCK_SEARCH = 'test';
@@ -44,6 +59,20 @@ export const MOCK_SEARCH_CONTEXT = {
group_metadata: {},
};
+export const MOCK_SEARCH_CONTEXT_FULL = {
+ group: {
+ id: 31,
+ name: 'testGroup',
+ full_name: 'testGroup',
+ },
+ group_metadata: {
+ group_path: 'testGroup',
+ name: 'testGroup',
+ issues_path: '/groups/testGroup/-/issues',
+ mr_path: '/groups/testGroup/-/merge_requests',
+ },
+};
+
export const MOCK_DEFAULT_SEARCH_OPTIONS = [
{
html_id: 'default-issues-assigned',
@@ -76,13 +105,51 @@ export const MOCK_SCOPED_SEARCH_OPTIONS = [
{
html_id: 'scoped-in-project',
scope: MOCK_PROJECT.name,
- description: MSG_IN_PROJECT,
+ scopeCategory: PROJECTS_CATEGORY,
+ icon: ICON_PROJECT,
+ url: MOCK_PROJECT.path,
+ },
+ {
+ html_id: 'scoped-in-project-long',
+ scope: MOCK_PROJECT_LONG.name,
+ scopeCategory: PROJECTS_CATEGORY,
+ icon: ICON_PROJECT,
+ url: MOCK_PROJECT_LONG.path,
+ },
+ {
+ html_id: 'scoped-in-group',
+ scope: MOCK_GROUP.name,
+ scopeCategory: GROUPS_CATEGORY,
+ icon: ICON_GROUP,
+ url: MOCK_GROUP.path,
+ },
+ {
+ html_id: 'scoped-in-subgroup',
+ scope: MOCK_SUBGROUP.name,
+ scopeCategory: GROUPS_CATEGORY,
+ icon: ICON_SUBGROUP,
+ url: MOCK_SUBGROUP.path,
+ },
+ {
+ html_id: 'scoped-in-all',
+ description: MSG_IN_ALL_GITLAB,
+ url: MOCK_ALL_PATH,
+ },
+];
+
+export const MOCK_SCOPED_SEARCH_OPTIONS_DEF = [
+ {
+ html_id: 'scoped-in-project',
+ scope: MOCK_PROJECT.name,
+ scopeCategory: PROJECTS_CATEGORY,
+ icon: ICON_PROJECT,
url: MOCK_PROJECT.path,
},
{
html_id: 'scoped-in-group',
scope: MOCK_GROUP.name,
- description: MSG_IN_GROUP,
+ scopeCategory: GROUPS_CATEGORY,
+ icon: ICON_GROUP,
url: MOCK_GROUP.path,
},
{
diff --git a/spec/frontend/header_search/store/getters_spec.js b/spec/frontend/header_search/store/getters_spec.js
index d3510de1439..c76be3c0360 100644
--- a/spec/frontend/header_search/store/getters_spec.js
+++ b/spec/frontend/header_search/store/getters_spec.js
@@ -9,6 +9,7 @@ import {
MOCK_SEARCH_CONTEXT,
MOCK_DEFAULT_SEARCH_OPTIONS,
MOCK_SCOPED_SEARCH_OPTIONS,
+ MOCK_SCOPED_SEARCH_OPTIONS_DEF,
MOCK_PROJECT,
MOCK_GROUP,
MOCK_ALL_PATH,
@@ -284,7 +285,7 @@ describe('Header Search Store Getters', () => {
it('returns the correct array', () => {
expect(getters.scopedSearchOptions(state, mockGetters)).toStrictEqual(
- MOCK_SCOPED_SEARCH_OPTIONS,
+ MOCK_SCOPED_SEARCH_OPTIONS_DEF,
);
});
});
@@ -308,6 +309,11 @@ describe('Header Search Store Getters', () => {
${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${[]} | ${MOCK_SCOPED_SEARCH_OPTIONS}
${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${[]} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS}
${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS.concat(MOCK_SORTED_AUTOCOMPLETE_OPTIONS)}
+ ${1} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${[]} | ${[]} | ${[]}
+ ${'('} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${[]} | ${[]} | ${[]}
+ ${'t'} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS}
+ ${'te'} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS}
+ ${'tes'} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS.concat(MOCK_SORTED_AUTOCOMPLETE_OPTIONS)}
`(
'searchOptions',
({
diff --git a/spec/frontend/header_spec.js b/spec/frontend/header_spec.js
index 19849fba63c..4e2fb70a2cb 100644
--- a/spec/frontend/header_spec.js
+++ b/spec/frontend/header_spec.js
@@ -1,4 +1,3 @@
-import $ from 'jquery';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import initTodoToggle, { initNavUserDropdownTracking } from '~/header';
import { loadHTMLFixture, setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
@@ -9,11 +8,17 @@ describe('Header', () => {
const fixtureTemplate = 'issues/open-issue.html';
function isTodosCountHidden() {
- return $(todosPendingCount).hasClass('hidden');
+ return document.querySelector(todosPendingCount).classList.contains('hidden');
}
function triggerToggle(newCount) {
- $(document).trigger('todo:toggle', newCount);
+ const event = new CustomEvent('todo:toggle', {
+ detail: {
+ count: newCount,
+ },
+ });
+
+ document.dispatchEvent(event);
}
beforeEach(() => {
@@ -28,7 +33,7 @@ describe('Header', () => {
it('should update todos-count after receiving the todo:toggle event', () => {
triggerToggle(5);
- expect($(todosPendingCount).text()).toEqual('5');
+ expect(document.querySelector(todosPendingCount).textContent).toEqual('5');
});
it('should hide todos-count when it is 0', () => {
@@ -53,7 +58,7 @@ describe('Header', () => {
});
it('should show 99+ for todos-count', () => {
- expect($(todosPendingCount).text()).toEqual('99+');
+ expect(document.querySelector(todosPendingCount).textContent).toEqual('99+');
});
});
});
@@ -67,7 +72,11 @@ describe('Header', () => {
<a class="js-buy-pipeline-minutes-link" data-track-action="click_buy_ci_minutes" data-track-label="free" data-track-property="user_dropdown">Buy Pipeline minutes</a>
</li>`);
- trackingSpy = mockTracking('_category_', $('.js-nav-user-dropdown').element, jest.spyOn);
+ trackingSpy = mockTracking(
+ '_category_',
+ document.querySelector('.js-nav-user-dropdown').element,
+ jest.spyOn,
+ );
document.body.dataset.page = 'some:page';
initNavUserDropdownTracking();
@@ -79,7 +88,8 @@ describe('Header', () => {
});
it('sends a tracking event when the dropdown is opened and contains Buy Pipeline minutes link', () => {
- $('.js-nav-user-dropdown').trigger('shown.bs.dropdown');
+ const event = new CustomEvent('shown.bs.dropdown');
+ document.querySelector('.js-nav-user-dropdown').dispatchEvent(event);
expect(trackingSpy).toHaveBeenCalledWith('some:page', 'show_buy_ci_minutes', {
label: 'free',
diff --git a/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js b/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
index 4f81c0aa5d3..7c48c0e6f95 100644
--- a/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
@@ -1,29 +1,21 @@
-import Vue from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import emptyState from '~/ide/components/commit_sidebar/empty_state.vue';
+import { shallowMount } from '@vue/test-utils';
+import EmptyState from '~/ide/components/commit_sidebar/empty_state.vue';
import { createStore } from '~/ide/stores';
-describe('IDE commit panel empty state', () => {
- let vm;
- let store;
+describe('IDE commit panel EmptyState component', () => {
+ let wrapper;
beforeEach(() => {
- store = createStore();
-
- const Component = Vue.extend(emptyState);
-
- Vue.set(store.state, 'noChangesStateSvgPath', 'no-changes');
-
- vm = createComponentWithStore(Component, store);
-
- vm.$mount();
+ const store = createStore();
+ store.state.noChangesStateSvgPath = 'no-changes';
+ wrapper = shallowMount(EmptyState, { store });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders no changes text when last commit message is empty', () => {
- expect(vm.$el.textContent).toContain('No changes');
+ expect(wrapper.find('h4').text()).toBe('No changes');
});
});
diff --git a/spec/frontend/ide/components/commit_sidebar/list_spec.js b/spec/frontend/ide/components/commit_sidebar/list_spec.js
index 1d42512c9ee..81c81fc0a9f 100644
--- a/spec/frontend/ide/components/commit_sidebar/list_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/list_spec.js
@@ -1,51 +1,47 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import commitSidebarList from '~/ide/components/commit_sidebar/list.vue';
-import { createStore } from '~/ide/stores';
+import { shallowMount } from '@vue/test-utils';
+import CommitSidebarList from '~/ide/components/commit_sidebar/list.vue';
+import ListItem from '~/ide/components/commit_sidebar/list_item.vue';
import { file } from '../../helpers';
describe('Multi-file editor commit sidebar list', () => {
- let store;
- let vm;
-
- beforeEach(() => {
- store = createStore();
-
- const Component = Vue.extend(commitSidebarList);
-
- vm = createComponentWithStore(Component, store, {
- title: 'Staged',
- fileList: [],
- action: 'stageAllChanges',
- actionBtnText: 'stage all',
- actionBtnIcon: 'history',
- activeFileKey: 'staged-testing',
- keyPrefix: 'staged',
+ let wrapper;
+
+ const mountComponent = ({ fileList }) =>
+ shallowMount(CommitSidebarList, {
+ propsData: {
+ title: 'Staged',
+ fileList,
+ action: 'stageAllChanges',
+ actionBtnText: 'stage all',
+ actionBtnIcon: 'history',
+ activeFileKey: 'staged-testing',
+ keyPrefix: 'staged',
+ },
});
- vm.$mount();
- });
-
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('with a list of files', () => {
beforeEach(async () => {
const f = file('file name');
f.changed = true;
- vm.fileList.push(f);
- await nextTick();
+ wrapper = mountComponent({ fileList: [f] });
});
it('renders list', () => {
- expect(vm.$el.querySelectorAll('.multi-file-commit-list > li').length).toBe(1);
+ expect(wrapper.findAllComponents(ListItem)).toHaveLength(1);
});
});
- describe('empty files array', () => {
- it('renders no changes text when empty', () => {
- expect(vm.$el.textContent).toContain('No changes');
+ describe('with empty files array', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ fileList: [] });
+ });
+
+ it('renders no changes text ', () => {
+ expect(wrapper.text()).toContain('No changes');
});
});
});
diff --git a/spec/frontend/ide/components/commit_sidebar/success_message_spec.js b/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
index 52e35bdbb73..63d51953915 100644
--- a/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
@@ -1,32 +1,22 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import successMessage from '~/ide/components/commit_sidebar/success_message.vue';
+import { shallowMount } from '@vue/test-utils';
+import SuccessMessage from '~/ide/components/commit_sidebar/success_message.vue';
import { createStore } from '~/ide/stores';
describe('IDE commit panel successful commit state', () => {
- let vm;
- let store;
+ let wrapper;
beforeEach(() => {
- store = createStore();
-
- const Component = Vue.extend(successMessage);
-
- vm = createComponentWithStore(Component, store, {
- committedStateSvgPath: 'committed-state',
- });
-
- vm.$mount();
+ const store = createStore();
+ store.state.committedStateSvgPath = 'committed-state';
+ store.state.lastCommitMsg = 'testing commit message';
+ wrapper = shallowMount(SuccessMessage, { store });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('renders last commit message when it exists', async () => {
- vm.$store.state.lastCommitMsg = 'testing commit message';
-
- await nextTick();
- expect(vm.$el.textContent).toContain('testing commit message');
+ it('renders last commit message when it exists', () => {
+ expect(wrapper.text()).toContain('testing commit message');
});
});
diff --git a/spec/frontend/ide/components/ide_spec.js b/spec/frontend/ide/components/ide_spec.js
index 37b42001a80..9172c69b10e 100644
--- a/spec/frontend/ide/components/ide_spec.js
+++ b/spec/frontend/ide/components/ide_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import CannotPushCodeAlert from '~/ide/components/cannot_push_code_alert.vue';
import ErrorMessage from '~/ide/components/error_message.vue';
import Ide from '~/ide/components/ide.vue';
@@ -40,6 +41,8 @@ describe('WebIDE', () => {
const findAlert = () => wrapper.findComponent(CannotPushCodeAlert);
beforeEach(() => {
+ stubPerformanceWebAPI();
+
store = createStore();
});
diff --git a/spec/frontend/ide/components/ide_tree_list_spec.js b/spec/frontend/ide/components/ide_tree_list_spec.js
index a85c52f5e86..0f61aa80e53 100644
--- a/spec/frontend/ide/components/ide_tree_list_spec.js
+++ b/spec/frontend/ide/components/ide_tree_list_spec.js
@@ -1,82 +1,72 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { GlSkeletonLoader } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import IdeTreeList from '~/ide/components/ide_tree_list.vue';
import { createStore } from '~/ide/stores';
+import FileTree from '~/vue_shared/components/file_tree.vue';
import { file } from '../helpers';
import { projectData } from '../mock_data';
-describe('IDE tree list', () => {
- const Component = Vue.extend(IdeTreeList);
- const normalBranchTree = [file('fileName')];
- const emptyBranchTree = [];
- let vm;
- let store;
+describe('IdeTreeList component', () => {
+ let wrapper;
- const bootstrapWithTree = (tree = normalBranchTree) => {
+ const mountComponent = ({ tree, loading = false } = {}) => {
+ const store = createStore();
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'main';
store.state.projects.abcproject = { ...projectData };
- Vue.set(store.state.trees, 'abcproject/main', {
- tree,
- loading: false,
- });
+ Vue.set(store.state.trees, 'abcproject/main', { tree, loading });
- vm = createComponentWithStore(Component, store, {
- viewerType: 'edit',
+ wrapper = shallowMount(IdeTreeList, {
+ propsData: {
+ viewerType: 'edit',
+ },
+ store,
});
};
- beforeEach(() => {
- store = createStore();
- });
-
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('normal branch', () => {
- beforeEach(() => {
- bootstrapWithTree();
-
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
-
- vm.$mount();
- });
+ const tree = [file('fileName')];
it('emits tree-ready event', () => {
- expect(vm.$emit).toHaveBeenCalledTimes(1);
- expect(vm.$emit).toHaveBeenCalledWith('tree-ready');
+ mountComponent({ tree });
+
+ expect(wrapper.emitted('tree-ready')).toEqual([[]]);
});
- it('renders loading indicator', async () => {
- store.state.trees['abcproject/main'].loading = true;
+ it('renders loading indicator', () => {
+ mountComponent({ tree, loading: true });
- await nextTick();
- expect(vm.$el.querySelector('.multi-file-loading-container')).not.toBeNull();
- expect(vm.$el.querySelectorAll('.multi-file-loading-container').length).toBe(3);
+ expect(wrapper.findAllComponents(GlSkeletonLoader)).toHaveLength(3);
});
it('renders list of files', () => {
- expect(vm.$el.textContent).toContain('fileName');
+ mountComponent({ tree });
+
+ expect(wrapper.findAllComponents(FileTree)).toHaveLength(1);
+ expect(wrapper.findComponent(FileTree).props('file')).toEqual(tree[0]);
});
});
describe('empty-branch state', () => {
beforeEach(() => {
- bootstrapWithTree(emptyBranchTree);
-
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
+ mountComponent({ tree: [] });
+ });
- vm.$mount();
+ it('emits tree-ready event', () => {
+ expect(wrapper.emitted('tree-ready')).toEqual([[]]);
});
- it('still emits tree-ready event', () => {
- expect(vm.$emit).toHaveBeenCalledWith('tree-ready');
+ it('does not render files', () => {
+ expect(wrapper.findAllComponents(FileTree)).toHaveLength(0);
});
- it('does not load files if the branch is empty', () => {
- expect(vm.$el.textContent).not.toContain('fileName');
- expect(vm.$el.textContent).toContain('No files');
+ it('renders empty state text', () => {
+ expect(wrapper.text()).toBe('No files');
});
});
});
diff --git a/spec/frontend/ide/components/nav_dropdown_button_spec.js b/spec/frontend/ide/components/nav_dropdown_button_spec.js
index 1c14685df68..8eebcdd9e08 100644
--- a/spec/frontend/ide/components/nav_dropdown_button_spec.js
+++ b/spec/frontend/ide/components/nav_dropdown_button_spec.js
@@ -1,81 +1,74 @@
-import Vue, { nextTick } from 'vue';
import { trimText } from 'helpers/text_helper';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import NavDropdownButton from '~/ide/components/nav_dropdown_button.vue';
import { createStore } from '~/ide/stores';
+import { __ } from '~/locale';
-describe('NavDropdown', () => {
+describe('NavDropdownButton component', () => {
const TEST_BRANCH_ID = 'lorem-ipsum-dolar';
const TEST_MR_ID = '12345';
- let store;
- let vm;
-
- beforeEach(() => {
- store = createStore();
- });
+ let wrapper;
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- const createComponent = (props = {}) => {
- vm = mountComponentWithStore(Vue.extend(NavDropdownButton), { props, store });
- vm.$mount();
+ const createComponent = ({ props = {}, state = {} } = {}) => {
+ const store = createStore();
+ store.replaceState(state);
+ wrapper = mountExtended(NavDropdownButton, { propsData: props, store });
};
- const findIcon = (name) => vm.$el.querySelector(`[data-testid="${name}-icon"]`);
- const findMRIcon = () => findIcon('merge-request');
- const findBranchIcon = () => findIcon('branch');
+ const findMRIcon = () => wrapper.findByLabelText(__('Merge request'));
+ const findBranchIcon = () => wrapper.findByLabelText(__('Current Branch'));
describe('normal', () => {
- beforeEach(() => {
+ it('renders empty placeholders, if state is falsey', () => {
createComponent();
- });
- it('renders empty placeholders, if state is falsey', () => {
- expect(trimText(vm.$el.textContent)).toEqual('- -');
+ expect(trimText(wrapper.text())).toBe('- -');
});
- it('renders branch name, if state has currentBranchId', async () => {
- vm.$store.state.currentBranchId = TEST_BRANCH_ID;
+ it('renders branch name, if state has currentBranchId', () => {
+ createComponent({ state: { currentBranchId: TEST_BRANCH_ID } });
- await nextTick();
- expect(trimText(vm.$el.textContent)).toEqual(`${TEST_BRANCH_ID} -`);
+ expect(trimText(wrapper.text())).toBe(`${TEST_BRANCH_ID} -`);
});
- it('renders mr id, if state has currentMergeRequestId', async () => {
- vm.$store.state.currentMergeRequestId = TEST_MR_ID;
+ it('renders mr id, if state has currentMergeRequestId', () => {
+ createComponent({ state: { currentMergeRequestId: TEST_MR_ID } });
- await nextTick();
- expect(trimText(vm.$el.textContent)).toEqual(`- !${TEST_MR_ID}`);
+ expect(trimText(wrapper.text())).toBe(`- !${TEST_MR_ID}`);
});
- it('renders branch and mr, if state has both', async () => {
- vm.$store.state.currentBranchId = TEST_BRANCH_ID;
- vm.$store.state.currentMergeRequestId = TEST_MR_ID;
+ it('renders branch and mr, if state has both', () => {
+ createComponent({
+ state: { currentBranchId: TEST_BRANCH_ID, currentMergeRequestId: TEST_MR_ID },
+ });
- await nextTick();
- expect(trimText(vm.$el.textContent)).toEqual(`${TEST_BRANCH_ID} !${TEST_MR_ID}`);
+ expect(trimText(wrapper.text())).toBe(`${TEST_BRANCH_ID} !${TEST_MR_ID}`);
});
it('shows icons', () => {
- expect(findBranchIcon()).toBeTruthy();
- expect(findMRIcon()).toBeTruthy();
+ createComponent();
+
+ expect(findBranchIcon().exists()).toBe(true);
+ expect(findMRIcon().exists()).toBe(true);
});
});
- describe('with showMergeRequests false', () => {
+ describe('when showMergeRequests=false', () => {
beforeEach(() => {
- createComponent({ showMergeRequests: false });
+ createComponent({ props: { showMergeRequests: false } });
});
it('shows single empty placeholder, if state is falsey', () => {
- expect(trimText(vm.$el.textContent)).toEqual('-');
+ expect(trimText(wrapper.text())).toBe('-');
});
it('shows only branch icon', () => {
- expect(findBranchIcon()).toBeTruthy();
- expect(findMRIcon()).toBe(null);
+ expect(findBranchIcon().exists()).toBe(true);
+ expect(findMRIcon().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/ide/components/new_dropdown/modal_spec.js b/spec/frontend/ide/components/new_dropdown/modal_spec.js
index e8635444801..68cc08d2ebc 100644
--- a/spec/frontend/ide/components/new_dropdown/modal_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/modal_spec.js
@@ -1,209 +1,419 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { GlButton, GlModal } from '@gitlab/ui';
+import { nextTick } from 'vue';
import createFlash from '~/flash';
-import modal from '~/ide/components/new_dropdown/modal.vue';
+import Modal from '~/ide/components/new_dropdown/modal.vue';
import { createStore } from '~/ide/stores';
+import { stubComponent } from 'helpers/stub_component';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createEntriesFromPaths } from '../../helpers';
jest.mock('~/flash');
+const NEW_NAME = 'babar';
+
describe('new file modal component', () => {
- const Component = Vue.extend(modal);
- let vm;
+ const showModal = jest.fn();
+ const toggleModal = jest.fn();
+
+ let store;
+ let wrapper;
+
+ const findForm = () => wrapper.findByTestId('file-name-form');
+ const findGlModal = () => wrapper.findComponent(GlModal);
+ const findInput = () => wrapper.findByTestId('file-name-field');
+ const findTemplateButtons = () => wrapper.findAllComponents(GlButton);
+ const findTemplateButtonsModel = () =>
+ findTemplateButtons().wrappers.map((x) => ({
+ text: x.text(),
+ variant: x.props('variant'),
+ category: x.props('category'),
+ }));
+
+ const open = (type, path) => {
+ // TODO: This component can not be passed props
+ // We have to interact with the open() method?
+ wrapper.vm.open(type, path);
+ };
+ const triggerSubmitForm = () => {
+ findForm().trigger('submit');
+ };
+ const triggerSubmitModal = () => {
+ findGlModal().vm.$emit('primary');
+ };
+ const triggerCancel = () => {
+ findGlModal().vm.$emit('cancel');
+ };
+
+ const mountComponent = () => {
+ const GlModalStub = stubComponent(GlModal);
+ jest.spyOn(GlModalStub.methods, 'show').mockImplementation(showModal);
+ jest.spyOn(GlModalStub.methods, 'toggle').mockImplementation(toggleModal);
+
+ wrapper = shallowMountExtended(Modal, {
+ store,
+ stubs: {
+ GlModal: GlModalStub,
+ },
+ // We need to attach to document for "focus" to work
+ attachTo: document.body,
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+
+ Object.assign(
+ store.state.entries,
+ createEntriesFromPaths([
+ 'README.md',
+ 'src',
+ 'src/deleted.js',
+ 'src/parent_dir',
+ 'src/parent_dir/foo.js',
+ ]),
+ );
+ Object.assign(store.state.entries['src/deleted.js'], { deleted: true });
+
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ });
afterEach(() => {
- vm.$destroy();
+ store = null;
+ wrapper.destroy();
+ document.body.innerHTML = '';
});
- describe.each`
- entryType | modalTitle | btnTitle | showsFileTemplates
- ${'tree'} | ${'Create new directory'} | ${'Create directory'} | ${false}
- ${'blob'} | ${'Create new file'} | ${'Create file'} | ${true}
- `('$entryType', ({ entryType, modalTitle, btnTitle, showsFileTemplates }) => {
+ describe('default', () => {
beforeEach(async () => {
- const store = createStore();
-
- vm = createComponentWithStore(Component, store).$mount();
- vm.open(entryType);
- vm.name = 'testing';
+ mountComponent();
+ // Not necessarily needed, but used to ensure that nothing extra is happening after the tick
await nextTick();
});
- afterEach(() => {
- vm.close();
+ it('renders modal', () => {
+ expect(findGlModal().props()).toMatchObject({
+ actionCancel: {
+ attributes: [{ variant: 'default' }],
+ text: 'Cancel',
+ },
+ actionPrimary: {
+ attributes: [{ variant: 'confirm' }],
+ text: 'Create file',
+ },
+ actionSecondary: null,
+ size: 'lg',
+ modalId: 'ide-new-entry',
+ title: 'Create new file',
+ });
});
- it(`sets modal title as ${entryType}`, () => {
- expect(document.querySelector('.modal-title').textContent.trim()).toBe(modalTitle);
+ it('renders name label', () => {
+ expect(wrapper.find('label').text()).toBe('Name');
});
- it(`sets button label as ${entryType}`, () => {
- expect(document.querySelector('.btn-confirm').textContent.trim()).toBe(btnTitle);
+ it('renders template buttons', () => {
+ const actual = findTemplateButtonsModel();
+
+ expect(actual.length).toBeGreaterThan(0);
+ expect(actual).toEqual(
+ store.getters['fileTemplates/templateTypes'].map((template) => ({
+ category: 'secondary',
+ text: template.name,
+ variant: 'dashed',
+ })),
+ );
});
- it(`sets form label as ${entryType}`, () => {
- expect(document.querySelector('.label-bold').textContent.trim()).toBe('Name');
+ // These negative ".not.toHaveBeenCalled" assertions complement the positive "toHaveBeenCalled"
+ // assertions that show up later in this spec. Without these, we're not guaranteed the "act"
+ // actually caused the change in behavior.
+ it('does not dispatch actions by default', () => {
+ expect(store.dispatch).not.toHaveBeenCalled();
});
- it(`shows file templates: ${showsFileTemplates}`, () => {
- const templateFilesEl = document.querySelector('.file-templates');
- expect(Boolean(templateFilesEl)).toBe(showsFileTemplates);
+ it('does not trigger modal by default', () => {
+ expect(showModal).not.toHaveBeenCalled();
+ expect(toggleModal).not.toHaveBeenCalled();
});
- });
- describe('rename entry', () => {
- beforeEach(() => {
- const store = createStore();
- store.state.entries = {
- 'test-path': {
- name: 'test',
- type: 'blob',
- path: 'test-path',
- },
- };
-
- vm = createComponentWithStore(Component, store).$mount();
+ it('does not focus input by default', () => {
+ expect(document.activeElement).toBe(document.body);
});
+ });
- it.each`
- entryType | modalTitle | btnTitle
- ${'tree'} | ${'Rename folder'} | ${'Rename folder'}
- ${'blob'} | ${'Rename file'} | ${'Rename file'}
- `(
- 'renders title and button for renaming $entryType',
- async ({ entryType, modalTitle, btnTitle }) => {
- vm.$store.state.entries['test-path'].type = entryType;
- vm.open('rename', 'test-path');
+ describe.each`
+ entryType | path | modalTitle | btnTitle | showsFileTemplates | inputValue | inputPlaceholder
+ ${'tree'} | ${''} | ${'Create new directory'} | ${'Create directory'} | ${false} | ${''} | ${'dir/'}
+ ${'blob'} | ${''} | ${'Create new file'} | ${'Create file'} | ${true} | ${''} | ${'dir/file_name'}
+ ${'blob'} | ${'foo/bar'} | ${'Create new file'} | ${'Create file'} | ${true} | ${'foo/bar/'} | ${'dir/file_name'}
+ `(
+ 'when opened as $entryType with path "$path"',
+ ({
+ entryType,
+ path,
+ modalTitle,
+ btnTitle,
+ showsFileTemplates,
+ inputValue,
+ inputPlaceholder,
+ }) => {
+ beforeEach(async () => {
+ mountComponent();
+
+ open(entryType, path);
await nextTick();
- expect(document.querySelector('.modal-title').textContent.trim()).toBe(modalTitle);
- expect(document.querySelector('.btn-confirm').textContent.trim()).toBe(btnTitle);
- },
- );
+ });
- describe('entryName', () => {
- it('returns entries name', () => {
- vm.open('rename', 'test-path');
+ it('sets modal props', () => {
+ expect(findGlModal().props()).toMatchObject({
+ title: modalTitle,
+ actionPrimary: {
+ attributes: [{ variant: 'confirm' }],
+ text: btnTitle,
+ },
+ });
+ });
- expect(vm.entryName).toBe('test-path');
+ it('sets input attributes', () => {
+ expect(findInput().element.value).toBe(inputValue);
+ expect(findInput().attributes('placeholder')).toBe(inputPlaceholder);
});
- it('does not reset entryName to its old value if empty', () => {
- vm.entryName = 'hello';
- vm.entryName = '';
+ it(`shows file templates: ${showsFileTemplates}`, () => {
+ const actual = findTemplateButtonsModel().length > 0;
- expect(vm.entryName).toBe('');
+ expect(actual).toBe(showsFileTemplates);
+ });
+
+ it('shows modal', () => {
+ expect(showModal).toHaveBeenCalled();
});
- });
- describe('open', () => {
- it('sets entryName to path provided if modalType is rename', () => {
- vm.open('rename', 'test-path');
+ it('focus on input', () => {
+ expect(document.activeElement).toBe(findInput().element);
+ });
+
+ it('resets when canceled', async () => {
+ triggerCancel();
+
+ await nextTick();
- expect(vm.entryName).toBe('test-path');
+ // Resets input value
+ expect(findInput().element.value).toBe('');
+ // Resets to blob mode
+ expect(findGlModal().props('title')).toBe('Create new file');
});
+ },
+ );
+
+ describe.each`
+ modalType | name | expectedName
+ ${'blob'} | ${'foo/bar.js'} | ${'foo/bar.js'}
+ ${'blob'} | ${'foo /bar.js'} | ${'foo/bar.js'}
+ ${'tree'} | ${'foo/dir'} | ${'foo/dir'}
+ ${'tree'} | ${'foo /dir'} | ${'foo/dir'}
+ `('when submitting as $modalType with "$name"', ({ modalType, name, expectedName }) => {
+ describe('when using the modal primary button', () => {
+ beforeEach(async () => {
+ mountComponent();
+
+ open(modalType, '');
+ await nextTick();
- it("appends '/' to the path if modalType isn't rename", () => {
- vm.open('blob', 'test-path');
+ findInput().setValue(name);
+ triggerSubmitModal();
+ });
- expect(vm.entryName).toBe('test-path/');
+ it('triggers createTempEntry action', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('createTempEntry', {
+ name: expectedName,
+ type: modalType,
+ });
});
+ });
+
+ describe('when triggering form submit (pressing enter)', () => {
+ beforeEach(async () => {
+ mountComponent();
+
+ open(modalType, '');
+ await nextTick();
- it('leaves entryName blank if no path is provided', () => {
- vm.open('blob');
+ findInput().setValue(name);
+ triggerSubmitForm();
+ });
- expect(vm.entryName).toBe('');
+ it('triggers createTempEntry action', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('createTempEntry', {
+ name: expectedName,
+ type: modalType,
+ });
});
});
});
- describe('createFromTemplate', () => {
- let store;
+ describe('when creating from template type', () => {
+ beforeEach(async () => {
+ mountComponent();
- beforeEach(() => {
- store = createStore();
- store.state.entries = {
- 'test-path/test': {
- name: 'test',
- deleted: false,
- },
- };
+ open('blob', 'some_dir');
- vm = createComponentWithStore(Component, store).$mount();
- vm.open('blob');
+ await nextTick();
- jest.spyOn(vm, 'createTempEntry').mockImplementation();
+ // Set input, then trigger button
+ findInput().setValue('some_dir/foo.js');
+ findTemplateButtons().at(1).vm.$emit('click');
});
- it.each`
- entryName | newFilePath
- ${''} | ${'.gitignore'}
- ${'README.md'} | ${'.gitignore'}
- ${'test-path/test/'} | ${'test-path/test/.gitignore'}
- ${'test-path/test'} | ${'test-path/.gitignore'}
- ${'test-path/test/abc.md'} | ${'test-path/test/.gitignore'}
- `(
- 'creates a new file with the given template name in appropriate directory for path: $path',
- ({ entryName, newFilePath }) => {
- vm.entryName = entryName;
+ it('triggers createTempEntry action', () => {
+ const { name: expectedName } = store.getters['fileTemplates/templateTypes'][1];
- vm.createFromTemplate({ name: '.gitignore' });
+ expect(store.dispatch).toHaveBeenCalledWith('createTempEntry', {
+ name: `some_dir/${expectedName}`,
+ type: 'blob',
+ });
+ });
- expect(vm.createTempEntry).toHaveBeenCalledWith({
- name: newFilePath,
- type: 'blob',
- });
- },
- );
+ it('toggles modal', () => {
+ expect(toggleModal).toHaveBeenCalled();
+ });
});
- describe('submitForm', () => {
- let store;
+ describe.each`
+ origPath | title | inputValue | inputSelectionStart
+ ${'src/parent_dir'} | ${'Rename folder'} | ${'src/parent_dir'} | ${'src/'.length}
+ ${'README.md'} | ${'Rename file'} | ${'README.md'} | ${0}
+ `('when renaming for $origPath', ({ origPath, title, inputValue, inputSelectionStart }) => {
+ beforeEach(async () => {
+ mountComponent();
+
+ open('rename', origPath);
+
+ await nextTick();
+ });
- beforeEach(() => {
- store = createStore();
- store.state.entries = {
- 'test-path/test': {
- name: 'test',
- deleted: false,
+ it('sets modal props for renaming', () => {
+ expect(findGlModal().props()).toMatchObject({
+ title,
+ actionPrimary: {
+ attributes: [{ variant: 'confirm' }],
+ text: title,
},
- };
+ });
+ });
+
+ it('sets input value', () => {
+ expect(findInput().element.value).toBe(inputValue);
+ });
- vm = createComponentWithStore(Component, store).$mount();
+ it(`does not show file templates`, () => {
+ expect(findTemplateButtonsModel()).toHaveLength(0);
});
- it('throws an error when target entry exists', () => {
- vm.open('rename', 'test-path/test');
+ it('shows modal when renaming', () => {
+ expect(showModal).toHaveBeenCalled();
+ });
- expect(createFlash).not.toHaveBeenCalled();
+ it('focus on input when renaming', () => {
+ expect(document.activeElement).toBe(findInput().element);
+ });
+
+ it('selects name part of the input', () => {
+ expect(findInput().element.selectionStart).toBe(inputSelectionStart);
+ expect(findInput().element.selectionEnd).toBe(origPath.length);
+ });
+
+ describe('when renames is submitted successfully', () => {
+ describe('when using the modal primary button', () => {
+ beforeEach(() => {
+ findInput().setValue(NEW_NAME);
+ triggerSubmitModal();
+ });
+
+ it('dispatches renameEntry event', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('renameEntry', {
+ path: origPath,
+ parentPath: '',
+ name: NEW_NAME,
+ });
+ });
+
+ it('does not trigger flash', () => {
+ expect(createFlash).not.toHaveBeenCalled();
+ });
+ });
- vm.submitForm();
+ describe('when triggering form submit (pressing enter)', () => {
+ beforeEach(() => {
+ findInput().setValue(NEW_NAME);
+ triggerSubmitForm();
+ });
+ it('dispatches renameEntry event', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('renameEntry', {
+ path: origPath,
+ parentPath: '',
+ name: NEW_NAME,
+ });
+ });
+
+ it('does not trigger flash', () => {
+ expect(createFlash).not.toHaveBeenCalled();
+ });
+ });
+ });
+ });
+
+ describe('when renaming and file already exists', () => {
+ beforeEach(async () => {
+ mountComponent();
+
+ open('rename', 'src/parent_dir');
+
+ await nextTick();
+
+ // Set to something that already exists!
+ findInput().setValue('src');
+ triggerSubmitModal();
+ });
+
+ it('creates flash', () => {
expect(createFlash).toHaveBeenCalledWith({
- message: 'The name "test-path/test" is already taken in this directory.',
+ message: 'The name "src" is already taken in this directory.',
fadeTransition: false,
addBodyClass: true,
});
});
- it('does not throw error when target entry does not exist', () => {
- jest.spyOn(vm, 'renameEntry').mockImplementation();
+ it('does not dispatch event', () => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+ });
- vm.open('rename', 'test-path/test');
- vm.entryName = 'test-path/test2';
- vm.submitForm();
+ describe('when renaming and file has been deleted', () => {
+ beforeEach(async () => {
+ mountComponent();
- expect(createFlash).not.toHaveBeenCalled();
- });
+ open('rename', 'src/parent_dir/foo.js');
- it('removes leading/trailing found in the new name', () => {
- vm.open('rename', 'test-path/test');
+ await nextTick();
- vm.entryName = 'test-path /test';
+ findInput().setValue('src/deleted.js');
+ triggerSubmitModal();
+ });
- vm.submitForm();
+ it('does not create flash', () => {
+ expect(createFlash).not.toHaveBeenCalled();
+ });
- expect(vm.entryName).toBe('test-path/test');
+ it('dispatches event', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('renameEntry', {
+ path: 'src/parent_dir/foo.js',
+ name: 'deleted.js',
+ parentPath: 'src',
+ });
});
});
});
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index b44651481e9..7a0bcda1b7a 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -1,3 +1,4 @@
+import { GlTab } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { editor as monacoEditor, Range } from 'monaco-editor';
import Vue, { nextTick } from 'vue';
@@ -5,6 +6,7 @@ import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
import '~/behaviors/markdown/render_gfm';
import waitForPromises from 'helpers/wait_for_promises';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import { exampleConfigs, exampleFiles } from 'jest/ide/lib/editorconfig/mock_data';
import { EDITOR_CODE_INSTANCE_FN, EDITOR_DIFF_INSTANCE_FN } from '~/editor/constants';
import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext';
@@ -125,10 +127,12 @@ describe('RepoEditor', () => {
};
const findEditor = () => wrapper.find('[data-testid="editor-container"]');
- const findTabs = () => wrapper.findAll('.ide-mode-tabs .nav-links li');
+ const findTabs = () => wrapper.findAllComponents(GlTab);
const findPreviewTab = () => wrapper.find('[data-testid="preview-tab"]');
beforeEach(() => {
+ stubPerformanceWebAPI();
+
createInstanceSpy = jest.spyOn(SourceEditor.prototype, EDITOR_CODE_INSTANCE_FN);
createDiffInstanceSpy = jest.spyOn(SourceEditor.prototype, EDITOR_DIFF_INSTANCE_FN);
createModelSpy = jest.spyOn(monacoEditor, 'createModel');
@@ -201,12 +205,12 @@ describe('RepoEditor', () => {
const tabs = findTabs();
expect(tabs).toHaveLength(2);
- expect(tabs.at(0).text()).toBe('Edit');
- expect(tabs.at(1).text()).toBe('Preview Markdown');
+ expect(tabs.at(0).element.dataset.testid).toBe('edit-tab');
+ expect(tabs.at(1).element.dataset.testid).toBe('preview-tab');
});
it('renders markdown for tempFile', async () => {
- findPreviewTab().trigger('click');
+ findPreviewTab().vm.$emit('click');
await waitForPromises();
expect(wrapper.find(ContentViewer).html()).toContain(dummyFile.text.content);
});
diff --git a/spec/frontend/ide/ide_router_spec.js b/spec/frontend/ide/ide_router_spec.js
index cd10812f8ea..adbdba1b11e 100644
--- a/spec/frontend/ide/ide_router_spec.js
+++ b/spec/frontend/ide/ide_router_spec.js
@@ -1,4 +1,5 @@
import waitForPromises from 'helpers/wait_for_promises';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import { createRouter } from '~/ide/ide_router';
import { createStore } from '~/ide/stores';
@@ -12,6 +13,8 @@ describe('IDE router', () => {
let router;
beforeEach(() => {
+ stubPerformanceWebAPI();
+
window.history.replaceState({}, '', '/');
store = createStore();
router = createRouter(store, DEFAULT_BRANCH);
diff --git a/spec/frontend/ide/stores/actions/file_spec.js b/spec/frontend/ide/stores/actions/file_spec.js
index 45d1beea3f8..6c1dee1e5ca 100644
--- a/spec/frontend/ide/stores/actions/file_spec.js
+++ b/spec/frontend/ide/stores/actions/file_spec.js
@@ -7,6 +7,7 @@ import { createStore } from '~/ide/stores';
import * as actions from '~/ide/stores/actions/file';
import * as types from '~/ide/stores/mutation_types';
import axios from '~/lib/utils/axios_utils';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import { file, createTriggerRenameAction, createTriggerUpdatePayload } from '../../helpers';
const ORIGINAL_CONTENT = 'original content';
@@ -19,6 +20,8 @@ describe('IDE store file actions', () => {
let router;
beforeEach(() => {
+ stubPerformanceWebAPI();
+
mock = new MockAdapter(axios);
originalGon = window.gon;
window.gon = {
diff --git a/spec/frontend/ide/stores/actions/merge_request_spec.js b/spec/frontend/ide/stores/actions/merge_request_spec.js
index 5592e2664c4..abc3ba5b0a2 100644
--- a/spec/frontend/ide/stores/actions/merge_request_spec.js
+++ b/spec/frontend/ide/stores/actions/merge_request_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { range } from 'lodash';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
import createFlash from '~/flash';
@@ -35,6 +36,8 @@ describe('IDE store merge request actions', () => {
let mock;
beforeEach(() => {
+ stubPerformanceWebAPI();
+
store = createStore();
mock = new MockAdapter(axios);
diff --git a/spec/frontend/ide/stores/actions/tree_spec.js b/spec/frontend/ide/stores/actions/tree_spec.js
index fc44cbb21ae..d43393875eb 100644
--- a/spec/frontend/ide/stores/actions/tree_spec.js
+++ b/spec/frontend/ide/stores/actions/tree_spec.js
@@ -1,4 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
import { createRouter } from '~/ide/ide_router';
@@ -24,6 +25,8 @@ describe('Multi-file store tree actions', () => {
};
beforeEach(() => {
+ stubPerformanceWebAPI();
+
store = createStore();
router = createRouter(store);
jest.spyOn(router, 'push').mockImplementation();
diff --git a/spec/frontend/ide/stores/actions_spec.js b/spec/frontend/ide/stores/actions_spec.js
index 3889c4f11c3..f6d54491d77 100644
--- a/spec/frontend/ide/stores/actions_spec.js
+++ b/spec/frontend/ide/stores/actions_spec.js
@@ -1,4 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import testAction from 'helpers/vuex_action_helper';
import eventHub from '~/ide/eventhub';
import { createRouter } from '~/ide/ide_router';
@@ -34,6 +35,8 @@ describe('Multi-file store actions', () => {
let router;
beforeEach(() => {
+ stubPerformanceWebAPI();
+
store = createStore();
router = createRouter(store);
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 0279ad454d2..cdc508a0033 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -50,13 +50,13 @@ describe('import table', () => {
const findPaginationDropdownText = () => findPaginationDropdown().find('button').text();
const findSelectionCount = () => wrapper.find('[data-test-id="selection-count"]');
- const triggerSelectAllCheckbox = () =>
- wrapper.find('thead input[type=checkbox]').trigger('click');
+ const triggerSelectAllCheckbox = (checked = true) =>
+ wrapper.find('thead input[type=checkbox]').setChecked(checked);
const selectRow = (idx) =>
- wrapper.findAll('tbody td input[type=checkbox]').at(idx).trigger('click');
+ wrapper.findAll('tbody td input[type=checkbox]').at(idx).setChecked(true);
- const createComponent = ({ bulkImportSourceGroups, importGroups }) => {
+ const createComponent = ({ bulkImportSourceGroups, importGroups, defaultTargetNamespace }) => {
apolloProvider = createMockApollo([], {
Query: {
availableNamespaces: () => availableNamespacesFixture,
@@ -73,6 +73,7 @@ describe('import table', () => {
jobsPath: '/fake_job_path',
sourceUrl: SOURCE_URL,
historyPath: '/fake_history_path',
+ defaultTargetNamespace,
},
apolloProvider,
});
@@ -165,6 +166,27 @@ describe('import table', () => {
expect(targetNamespaceDropdownButton.text()).toBe('No parent');
});
+ it('respects default namespace if provided', async () => {
+ const targetNamespace = availableNamespacesFixture[1];
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: FAKE_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ versionValidation: FAKE_VERSION_VALIDATION,
+ }),
+ defaultTargetNamespace: targetNamespace.id,
+ });
+
+ await waitForPromises();
+
+ const firstRow = wrapper.find('tbody tr');
+ const targetNamespaceDropdownButton = findTargetNamespaceDropdown(firstRow).find(
+ '[aria-haspopup]',
+ );
+ expect(targetNamespaceDropdownButton.text()).toBe(targetNamespace.fullPath);
+ });
+
it('does not render status string when result list is empty', async () => {
createComponent({
bulkImportSourceGroups: jest.fn().mockResolvedValue({
@@ -388,7 +410,7 @@ describe('import table', () => {
expect(findSelectionCount().text()).toMatchInterpolatedText('0 selected');
await triggerSelectAllCheckbox();
expect(findSelectionCount().text()).toMatchInterpolatedText('2 selected');
- await triggerSelectAllCheckbox();
+ await triggerSelectAllCheckbox(false);
expect(findSelectionCount().text()).toMatchInterpolatedText('0 selected');
});
diff --git a/spec/frontend/integrations/edit/components/active_checkbox_spec.js b/spec/frontend/integrations/edit/components/active_checkbox_spec.js
index 633389578a0..1f7a5f0dbc9 100644
--- a/spec/frontend/integrations/edit/components/active_checkbox_spec.js
+++ b/spec/frontend/integrations/edit/components/active_checkbox_spec.js
@@ -1,7 +1,6 @@
import { GlFormCheckbox } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
import ActiveCheckbox from '~/integrations/edit/components/active_checkbox.vue';
import { createStore } from '~/integrations/edit/store';
@@ -74,22 +73,13 @@ describe('ActiveCheckbox', () => {
expect(findGlFormCheckbox().vm.$attrs.checked).toBe(true);
});
- describe('on checkbox click', () => {
- it('switches the form value', async () => {
- findInputInCheckbox().trigger('click');
-
- await nextTick();
- expect(findGlFormCheckbox().vm.$attrs.checked).toBe(false);
- });
- });
-
it('emits `toggle-integration-active` event with `true` on mount', () => {
expect(wrapper.emitted('toggle-integration-active')[0]).toEqual([true]);
});
describe('on checkbox `change` event', () => {
- it('emits `toggle-integration-active` event', () => {
- findGlFormCheckbox().vm.$emit('change', false);
+ it('emits `toggle-integration-active` event', async () => {
+ await findInputInCheckbox().setChecked(false);
expect(wrapper.emitted('toggle-integration-active')[1]).toEqual([false]);
});
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index a2bdece821f..21e57a2e33c 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -596,37 +596,42 @@ describe('IntegrationForm', () => {
});
describe.each`
- scenario | replyStatus | errorMessage | expectToast | expectSentry
- ${'when "test settings" request fails'} | ${httpStatus.INTERNAL_SERVER_ERROR} | ${undefined} | ${I18N_DEFAULT_ERROR_MESSAGE} | ${true}
- ${'when "test settings" returns an error'} | ${httpStatus.OK} | ${'an error'} | ${'an error'} | ${false}
- ${'when "test settings" succeeds'} | ${httpStatus.OK} | ${undefined} | ${I18N_SUCCESSFUL_CONNECTION_MESSAGE} | ${false}
- `('$scenario', ({ replyStatus, errorMessage, expectToast, expectSentry }) => {
- beforeEach(async () => {
- mockAxios.onPut(mockTestPath).replyOnce(replyStatus, {
- error: Boolean(errorMessage),
- message: errorMessage,
+ scenario | replyStatus | errorMessage | serviceResponse | expectToast | expectSentry
+ ${'when "test settings" request fails'} | ${httpStatus.INTERNAL_SERVER_ERROR} | ${undefined} | ${undefined} | ${I18N_DEFAULT_ERROR_MESSAGE} | ${true}
+ ${'when "test settings" returns an error'} | ${httpStatus.OK} | ${'an error'} | ${undefined} | ${'an error'} | ${false}
+ ${'when "test settings" returns an error with details'} | ${httpStatus.OK} | ${'an error.'} | ${'extra info'} | ${'an error. extra info'} | ${false}
+ ${'when "test settings" succeeds'} | ${httpStatus.OK} | ${undefined} | ${undefined} | ${I18N_SUCCESSFUL_CONNECTION_MESSAGE} | ${false}
+ `(
+ '$scenario',
+ ({ replyStatus, errorMessage, serviceResponse, expectToast, expectSentry }) => {
+ beforeEach(async () => {
+ mockAxios.onPut(mockTestPath).replyOnce(replyStatus, {
+ error: Boolean(errorMessage),
+ message: errorMessage,
+ service_response: serviceResponse,
+ });
+
+ await findTestButton().vm.$emit('click', new Event('click'));
+ await waitForPromises();
});
- await findTestButton().vm.$emit('click', new Event('click'));
- await waitForPromises();
- });
-
- it(`calls toast with '${expectToast}'`, () => {
- expect(mockToastShow).toHaveBeenCalledWith(expectToast);
- });
+ it(`calls toast with '${expectToast}'`, () => {
+ expect(mockToastShow).toHaveBeenCalledWith(expectToast);
+ });
- it('sets `loading` prop of test button to `false`', () => {
- expect(findTestButton().props('loading')).toBe(false);
- });
+ it('sets `loading` prop of test button to `false`', () => {
+ expect(findTestButton().props('loading')).toBe(false);
+ });
- it('sets save button `disabled` prop to `false`', () => {
- expect(findProjectSaveButton().props('disabled')).toBe(false);
- });
+ it('sets save button `disabled` prop to `false`', () => {
+ expect(findProjectSaveButton().props('disabled')).toBe(false);
+ });
- it(`${expectSentry ? 'does' : 'does not'} capture exception in Sentry`, () => {
- expect(Sentry.captureException).toHaveBeenCalledTimes(expectSentry ? 1 : 0);
- });
- });
+ it(`${expectSentry ? 'does' : 'does not'} capture exception in Sentry`, () => {
+ expect(Sentry.captureException).toHaveBeenCalledTimes(expectSentry ? 1 : 0);
+ });
+ },
+ );
});
});
diff --git a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
index 49fbebb9396..6011b3e6edc 100644
--- a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
@@ -115,9 +115,8 @@ describe('JiraTriggerFields', () => {
const checkbox = findIssueTransitionEnabled();
expect(checkbox.element.checked).toBe(false);
- checkbox.trigger('click');
+ await checkbox.setChecked(true);
- await nextTick();
const [radio1, radio2] = findIssueTransitionModeRadios().wrappers;
expect(radio1.element.checked).toBe(true);
expect(radio2.element.checked).toBe(false);
diff --git a/spec/frontend/invite_members/components/import_a_project_modal_spec.js b/spec/frontend/invite_members/components/import_project_members_modal_spec.js
index 6db881d5c75..b4d42d90d99 100644
--- a/spec/frontend/invite_members/components/import_a_project_modal_spec.js
+++ b/spec/frontend/invite_members/components/import_project_members_modal_spec.js
@@ -5,7 +5,7 @@ import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import * as ProjectsApi from '~/api/projects_api';
-import ImportAProjectModal from '~/invite_members/components/import_a_project_modal.vue';
+import ImportProjectMembersModal from '~/invite_members/components/import_project_members_modal.vue';
import ProjectSelect from '~/invite_members/components/project_select.vue';
import axios from '~/lib/utils/axios_utils';
@@ -20,7 +20,7 @@ const $toast = {
};
const createComponent = () => {
- wrapper = shallowMountExtended(ImportAProjectModal, {
+ wrapper = shallowMountExtended(ImportProjectMembersModal, {
propsData: {
projectId,
projectName,
@@ -51,12 +51,11 @@ afterEach(() => {
mock.restore();
});
-describe('ImportAProjectModal', () => {
+describe('ImportProjectMembersModal', () => {
+ const findGlModal = () => wrapper.findComponent(GlModal);
const findIntroText = () => wrapper.find({ ref: 'modalIntro' }).text();
- const findCancelButton = () => wrapper.findByTestId('cancel-button');
- const findImportButton = () => wrapper.findByTestId('import-button');
- const clickImportButton = () => findImportButton().vm.$emit('click');
- const clickCancelButton = () => findCancelButton().vm.$emit('click');
+ const clickImportButton = () => findGlModal().vm.$emit('primary', { preventDefault: jest.fn() });
+ const closeModal = () => findGlModal().vm.$emit('hidden', { preventDefault: jest.fn() });
const findFormGroup = () => wrapper.findByTestId('form-group');
const formGroupInvalidFeedback = () => findFormGroup().props('invalidFeedback');
const formGroupErrorState = () => findFormGroup().props('state');
@@ -68,37 +67,40 @@ describe('ImportAProjectModal', () => {
});
it('renders the modal with the correct title', () => {
- expect(wrapper.findComponent(GlModal).props('title')).toBe(
- 'Import members from another project',
- );
+ expect(findGlModal().props('title')).toBe('Import members from another project');
});
it('renders the Cancel button text correctly', () => {
- expect(findCancelButton().text()).toBe('Cancel');
+ expect(findGlModal().props('actionCancel')).toMatchObject({
+ text: 'Cancel',
+ });
});
it('renders the Import button text correctly', () => {
- expect(findImportButton().text()).toBe('Import project members');
+ expect(findGlModal().props('actionPrimary')).toMatchObject({
+ text: 'Import project members',
+ attributes: {
+ variant: 'confirm',
+ disabled: true,
+ loading: false,
+ },
+ });
});
it('renders the modal intro text correctly', () => {
expect(findIntroText()).toBe("You're importing members to the test name project.");
});
- it('renders the Import button modal without isLoading', () => {
- expect(findImportButton().props('loading')).toBe(false);
- });
-
it('sets isLoading to true when the Invite button is clicked', async () => {
clickImportButton();
await nextTick();
- expect(findImportButton().props('loading')).toBe(true);
+ expect(findGlModal().props('actionPrimary').attributes.loading).toBe(true);
});
});
- describe('submitting the import form', () => {
+ describe('submitting the import', () => {
describe('when the import is successful', () => {
beforeEach(() => {
createComponent();
@@ -125,7 +127,7 @@ describe('ImportAProjectModal', () => {
});
it('sets isLoading to false after success', () => {
- expect(findImportButton().props('loading')).toBe(false);
+ expect(findGlModal().props('actionPrimary').attributes.loading).toBe(false);
});
});
@@ -149,14 +151,14 @@ describe('ImportAProjectModal', () => {
});
it('sets isLoading to false after error', () => {
- expect(findImportButton().props('loading')).toBe(false);
+ expect(findGlModal().props('actionPrimary').attributes.loading).toBe(false);
});
it('clears the error when the modal is closed with an error', async () => {
expect(formGroupInvalidFeedback()).toBe('Unable to import project members');
expect(formGroupErrorState()).toBe(false);
- clickCancelButton();
+ closeModal();
await nextTick();
diff --git a/spec/frontend/invite_members/components/import_project_members_trigger_spec.js b/spec/frontend/invite_members/components/import_project_members_trigger_spec.js
new file mode 100644
index 00000000000..b6375fcfa22
--- /dev/null
+++ b/spec/frontend/invite_members/components/import_project_members_trigger_spec.js
@@ -0,0 +1,49 @@
+import { GlButton } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import ImportProjectMembersTrigger from '~/invite_members/components/import_project_members_trigger.vue';
+import eventHub from '~/invite_members/event_hub';
+
+const displayText = 'Import Project Members';
+
+const createComponent = (props = {}) => {
+ return mount(ImportProjectMembersTrigger, {
+ propsData: {
+ displayText,
+ ...props,
+ },
+ });
+};
+
+describe('ImportProjectMembersTrigger', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ describe('displayText', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('includes the correct displayText for the link', () => {
+ expect(findButton().text()).toBe(displayText);
+ });
+ });
+
+ describe('when button is clicked', () => {
+ beforeEach(() => {
+ eventHub.$emit = jest.fn();
+
+ wrapper = createComponent();
+
+ findButton().trigger('click');
+ });
+
+ it('emits event that triggers opening the modal', () => {
+ expect(eventHub.$emit).toHaveBeenLastCalledWith('openProjectMembersModal');
+ });
+ });
+});
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 13985ce7d74..045a454e63a 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -35,6 +35,7 @@ import {
user2,
user3,
user4,
+ user5,
GlEmoji,
} from '../mock_data/member_modal';
@@ -93,6 +94,11 @@ describe('InviteMembersModal', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findBase = () => wrapper.findComponent(InviteModalBase);
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
+ const findMemberErrorAlert = () => wrapper.findByTestId('alert-member-error');
+ const findMemberErrorMessage = (element) =>
+ `${Object.keys(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[element]}: ${
+ Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[element]
+ }`;
const emitEventFromModal = (eventName) => () =>
findModal().vm.$emit(eventName, { preventDefault: jest.fn() });
const clickInviteButton = emitEventFromModal('primary');
@@ -123,6 +129,10 @@ describe('InviteMembersModal', () => {
findBase().vm.$emit('access-level', val);
await nextTick();
};
+ const removeMembersToken = async (val) => {
+ findMembersSelect().vm.$emit('token-remove', val);
+ await nextTick();
+ };
describe('rendering the tasks to be done', () => {
const setupComponent = async (props = {}, urlParameter = ['invite_members_for_task']) => {
@@ -431,17 +441,20 @@ describe('InviteMembersModal', () => {
});
it('clears the error when the list of members to invite is cleared', async () => {
- expect(membersFormGroupInvalidFeedback()).toBe(
+ expect(findMemberErrorAlert().exists()).toBe(true);
+ expect(findMemberErrorAlert().text()).toContain(
Object.values(invitationsApiResponse.EMAIL_TAKEN.message)[0],
);
- expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
findMembersSelect().vm.$emit('clear');
await nextTick();
+ expect(findMemberErrorAlert().exists()).toBe(false);
expect(membersFormGroupInvalidFeedback()).toBe('');
- expect(findMembersSelect().props('validationState')).not.toBe(false);
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
it('clears the error when the cancel button is clicked', async () => {
@@ -450,7 +463,7 @@ describe('InviteMembersModal', () => {
await nextTick();
expect(membersFormGroupInvalidFeedback()).toBe('');
- expect(findMembersSelect().props('validationState')).not.toBe(false);
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
it('clears the error when the modal is hidden', async () => {
@@ -458,33 +471,12 @@ describe('InviteMembersModal', () => {
await nextTick();
+ expect(findMemberErrorAlert().exists()).toBe(false);
expect(membersFormGroupInvalidFeedback()).toBe('');
- expect(findMembersSelect().props('validationState')).not.toBe(false);
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
});
- it('clears the invalid state and message once the list of members to invite is cleared', async () => {
- mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.EMAIL_TAKEN);
-
- clickInviteButton();
-
- await waitForPromises();
-
- expect(membersFormGroupInvalidFeedback()).toBe(
- Object.values(invitationsApiResponse.EMAIL_TAKEN.message)[0],
- );
- expect(findMembersSelect().props('validationState')).toBe(false);
- expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
-
- findMembersSelect().vm.$emit('clear');
-
- await waitForPromises();
-
- expect(membersFormGroupInvalidFeedback()).toBe('');
- expect(findMembersSelect().props('validationState')).toBe(null);
- expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
- });
-
it('displays the generic error for http server error', async () => {
mockInvitationsApi(
httpStatus.INTERNAL_SERVER_ERROR,
@@ -496,6 +488,7 @@ describe('InviteMembersModal', () => {
await waitForPromises();
expect(membersFormGroupInvalidFeedback()).toBe('Something went wrong');
+ expect(findMembersSelect().props('exceptionState')).toBe(false);
});
it('displays the restricted user api message for response with bad request', async () => {
@@ -505,20 +498,31 @@ describe('InviteMembersModal', () => {
await waitForPromises();
- expect(membersFormGroupInvalidFeedback()).toBe(expectedEmailRestrictedError);
+ expect(findMemberErrorAlert().exists()).toBe(true);
+ expect(findMemberErrorAlert().text()).toContain(expectedEmailRestrictedError);
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
- it('displays the first part of the error when multiple existing users are restricted by email', async () => {
+ it('displays all errors when there are multiple existing users that are restricted by email', async () => {
mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.MULTIPLE_RESTRICTED);
clickInviteButton();
await waitForPromises();
- expect(membersFormGroupInvalidFeedback()).toBe(
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
+ expect(findMemberErrorAlert().exists()).toBe(true);
+ expect(findMemberErrorAlert().text()).toContain(
+ Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[0],
+ );
+ expect(findMemberErrorAlert().text()).toContain(
+ Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[1],
);
- expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findMemberErrorAlert().text()).toContain(
+ Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[2],
+ );
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
});
});
@@ -573,10 +577,30 @@ describe('InviteMembersModal', () => {
await waitForPromises();
expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
- expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findMembersSelect().props('exceptionState')).toBe(false);
expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
});
+ it('clears the error when the modal is hidden', async () => {
+ mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
+ expect(findMembersSelect().props('exceptionState')).toBe(false);
+ expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
+
+ findModal().vm.$emit('hidden');
+
+ await nextTick();
+
+ expect(findMemberErrorAlert().exists()).toBe(false);
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
+ });
+
it('displays the restricted email error when restricted email is invited', async () => {
mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.EMAIL_RESTRICTED);
@@ -584,20 +608,32 @@ describe('InviteMembersModal', () => {
await waitForPromises();
- expect(membersFormGroupInvalidFeedback()).toContain(expectedEmailRestrictedError);
- expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findMemberErrorAlert().exists()).toBe(true);
+ expect(findMemberErrorAlert().text()).toContain(expectedEmailRestrictedError);
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
});
- it('displays the first error message when multiple emails return a restricted error message', async () => {
+ it('displays all errors when there are multiple emails that return a restricted error message', async () => {
mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.MULTIPLE_RESTRICTED);
clickInviteButton();
await waitForPromises();
- expect(membersFormGroupInvalidFeedback()).toContain(expectedEmailRestrictedError);
- expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findMemberErrorAlert().exists()).toBe(true);
+ expect(findMemberErrorAlert().text()).toContain(
+ Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[0],
+ );
+ expect(findMemberErrorAlert().text()).toContain(
+ Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[1],
+ );
+ expect(findMemberErrorAlert().text()).toContain(
+ Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[2],
+ );
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
it('displays the invalid syntax error for bad request', async () => {
@@ -608,7 +644,7 @@ describe('InviteMembersModal', () => {
await waitForPromises();
expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
- expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findMembersSelect().props('exceptionState')).toBe(false);
});
});
@@ -617,14 +653,51 @@ describe('InviteMembersModal', () => {
createInviteMembersToGroupWrapper();
await triggerMembersTokenSelect([user3, user4]);
- mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.ERROR_EMAIL_INVALID);
+ mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.ERROR_EMAIL_INVALID);
clickInviteButton();
await waitForPromises();
expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
- expect(findMembersSelect().props('validationState')).toBe(false);
+ expect(findMembersSelect().props('exceptionState')).toBe(false);
+ });
+
+ it('displays errors for multiple and allows clearing', async () => {
+ createInviteMembersToGroupWrapper();
+
+ await triggerMembersTokenSelect([user3, user4, user5]);
+ mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.MULTIPLE_RESTRICTED);
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(findMemberErrorAlert().exists()).toBe(true);
+ expect(findMemberErrorAlert().props('title')).toContain(
+ "The following 3 members couldn't be invited",
+ );
+ expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(0));
+ expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(1));
+ expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(2));
+
+ await removeMembersToken(user3);
+
+ expect(findMemberErrorAlert().props('title')).toContain(
+ "The following 2 members couldn't be invited",
+ );
+ expect(findMemberErrorAlert().text()).not.toContain(findMemberErrorMessage(0));
+
+ await removeMembersToken(user4);
+
+ expect(findMemberErrorAlert().props('title')).toContain(
+ "The following member couldn't be invited",
+ );
+ expect(findMemberErrorAlert().text()).not.toContain(findMemberErrorMessage(1));
+
+ await removeMembersToken(user5);
+
+ expect(findMemberErrorAlert().exists()).toBe(false);
});
});
});
@@ -675,24 +748,6 @@ describe('InviteMembersModal', () => {
});
});
});
-
- describe('when any invite failed for any reason', () => {
- beforeEach(async () => {
- createInviteMembersToGroupWrapper();
-
- await triggerMembersTokenSelect([user1, user3]);
-
- mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
-
- clickInviteButton();
- });
-
- it('displays the first error message', async () => {
- await waitForPromises();
-
- expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
- });
- });
});
describe('tracking', () => {
diff --git a/spec/frontend/invite_members/components/invite_modal_base_spec.js b/spec/frontend/invite_members/components/invite_modal_base_spec.js
index cc19e90a5fa..b55eeb72471 100644
--- a/spec/frontend/invite_members/components/invite_modal_base_spec.js
+++ b/spec/frontend/invite_members/components/invite_modal_base_spec.js
@@ -254,7 +254,7 @@ describe('InviteModalBase', () => {
expect(wrapper.findComponent(GlModal).props('actionPrimary').attributes.loading).toBe(true);
});
- it('with invalidFeedbackMessage, set members form group validation state', () => {
+ it('with invalidFeedbackMessage, set members form group exception state', () => {
createComponent({
invalidFeedbackMessage: 'invalid message!',
});
diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js
index bf5564e4d63..6375d0f7e2e 100644
--- a/spec/frontend/invite_members/components/members_token_select_spec.js
+++ b/spec/frontend/invite_members/components/members_token_select_spec.js
@@ -16,6 +16,7 @@ const createComponent = (props) => {
return shallowMount(MembersTokenSelect, {
propsData: {
ariaLabelledby: label,
+ invalidMembers: {},
placeholder,
...props,
},
@@ -124,12 +125,14 @@ describe('MembersTokenSelect', () => {
findTokenSelector().vm.$emit('token-remove', [user1]);
expect(wrapper.emitted('clear')).toEqual([[]]);
+ expect(wrapper.emitted('token-remove')).toBeUndefined();
});
- it('does not emit `clear` event when there are still tokens selected', () => {
+ it('emits `token-remove` event with the token when there are still tokens selected', () => {
findTokenSelector().vm.$emit('input', [user1, user2]);
findTokenSelector().vm.$emit('token-remove', [user1]);
+ expect(wrapper.emitted('token-remove')).toEqual([[[user1]]]);
expect(wrapper.emitted('clear')).toBeUndefined();
});
});
diff --git a/spec/frontend/invite_members/mock_data/member_modal.js b/spec/frontend/invite_members/mock_data/member_modal.js
index 474234cfacb..7d675b6206c 100644
--- a/spec/frontend/invite_members/mock_data/member_modal.js
+++ b/spec/frontend/invite_members/mock_data/member_modal.js
@@ -26,13 +26,17 @@ export const user2 = { id: 2, name: 'Name Two', username: 'one_2', avatar_url: '
export const user3 = {
id: 'user-defined-token',
name: 'email@example.com',
- username: 'one_2',
avatar_url: '',
};
export const user4 = {
- id: 'user-defined-token',
+ id: 'user-defined-token2',
name: 'email4@example.com',
- username: 'one_4',
+ avatar_url: '',
+};
+export const user5 = {
+ id: '3',
+ username: 'root',
+ name: 'root',
avatar_url: '',
};
diff --git a/spec/frontend/invite_members/utils/member_utils_spec.js b/spec/frontend/invite_members/utils/member_utils_spec.js
new file mode 100644
index 00000000000..eb76c9845d4
--- /dev/null
+++ b/spec/frontend/invite_members/utils/member_utils_spec.js
@@ -0,0 +1,12 @@
+import { memberName } from '~/invite_members/utils/member_utils';
+
+describe('Member Name', () => {
+ it.each([
+ [{ username: '_username_', name: '_name_' }, '_username_'],
+ [{ username: '_username_' }, '_username_'],
+ [{ name: '_name_' }, '_name_'],
+ [{}, undefined],
+ ])(`returns name from supplied member token: %j`, (member, result) => {
+ expect(memberName(member)).toBe(result);
+ });
+});
diff --git a/spec/frontend/invite_members/utils/response_message_parser_spec.js b/spec/frontend/invite_members/utils/response_message_parser_spec.js
index 8b2064df374..92f38c54c99 100644
--- a/spec/frontend/invite_members/utils/response_message_parser_spec.js
+++ b/spec/frontend/invite_members/utils/response_message_parser_spec.js
@@ -1,5 +1,5 @@
import {
- responseMessageFromSuccess,
+ responseFromSuccess,
responseMessageFromError,
} from '~/invite_members/utils/response_message_parser';
import { invitationsApiResponse } from '../mock_data/api_responses';
@@ -11,12 +11,12 @@ describe('Response message parser', () => {
const exampleKeyedMsg = { 'email@example.com': expectedMessage };
it.each([
- [{ data: { message: expectedMessage } }],
- [{ data: { error: expectedMessage } }],
- [{ data: { message: [expectedMessage] } }],
- [{ data: { message: exampleKeyedMsg } }],
- ])(`returns "${expectedMessage}" from success response: %j`, (successResponse) => {
- expect(responseMessageFromSuccess(successResponse)).toBe(expectedMessage);
+ [{ data: { message: expectedMessage } }, { error: true, message: expectedMessage }],
+ [{ data: { error: expectedMessage } }, { error: true, message: expectedMessage }],
+ [{ data: { message: [expectedMessage] } }, { error: true, message: expectedMessage }],
+ [{ data: { message: exampleKeyedMsg } }, { error: true, message: { ...exampleKeyedMsg } }],
+ ])(`returns "${expectedMessage}" from success response: %j`, (successResponse, result) => {
+ expect(responseFromSuccess(successResponse)).toStrictEqual(result);
});
});
@@ -30,15 +30,18 @@ describe('Response message parser', () => {
});
});
- describe('displaying only the first error when a response has messages for multiple users', () => {
- const expected =
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.";
-
+ describe('displaying all errors when a response has messages for multiple users', () => {
it.each([
- [{ data: invitationsApiResponse.MULTIPLE_RESTRICTED }],
- [{ data: invitationsApiResponse.EMAIL_RESTRICTED }],
- ])(`returns "${expectedMessage}" from success response: %j`, (restrictedResponse) => {
- expect(responseMessageFromSuccess(restrictedResponse)).toBe(expected);
+ [
+ { data: invitationsApiResponse.MULTIPLE_RESTRICTED },
+ { error: true, message: { ...invitationsApiResponse.MULTIPLE_RESTRICTED.message } },
+ ],
+ [
+ { data: invitationsApiResponse.EMAIL_RESTRICTED },
+ { error: true, message: { ...invitationsApiResponse.EMAIL_RESTRICTED.message } },
+ ],
+ ])(`returns "${expectedMessage}" from success response: %j`, (restrictedResponse, result) => {
+ expect(responseFromSuccess(restrictedResponse)).toStrictEqual(result);
});
});
});
diff --git a/spec/frontend/issuable/issuable_form_spec.js b/spec/frontend/issuable/issuable_form_spec.js
index a1583076b41..d844f3394d5 100644
--- a/spec/frontend/issuable/issuable_form_spec.js
+++ b/spec/frontend/issuable/issuable_form_spec.js
@@ -47,6 +47,25 @@ describe('IssuableForm', () => {
});
});
+ describe('resetAutosave', () => {
+ it('resets autosave on elements with the .js-reset-autosave class', () => {
+ setHTMLFixture(`
+ <form>
+ <input name="[title]" />
+ <textarea name="[description]"></textarea>
+ <a class="js-reset-autosave">Cancel</a>
+ </form>
+ `);
+ const $form = $('form');
+ const resetAutosave = jest.spyOn(IssuableForm.prototype, 'resetAutosave');
+ createIssuable($form);
+
+ $form.find('.js-reset-autosave').trigger('click');
+
+ expect(resetAutosave).toHaveBeenCalled();
+ });
+ });
+
describe('removeWip', () => {
it.each`
prefix
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index 3f2c3c3ec5f..3d3dbfa6853 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -29,6 +29,7 @@ import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_ro
import { IssuableListTabs, IssuableStates } from '~/vue_shared/issuable/list/constants';
import IssuesListApp from '~/issues/list/components/issues_list_app.vue';
import NewIssueDropdown from '~/issues/list/components/new_issue_dropdown.vue';
+
import {
CREATED_DESC,
RELATIVE_POSITION,
@@ -98,6 +99,7 @@ describe('CE IssuesListApp component', () => {
};
let defaultQueryResponse = getIssuesQueryResponse;
+ let router;
if (IS_EE) {
defaultQueryResponse = cloneDeep(getIssuesQueryResponse);
defaultQueryResponse.data.project.issues.nodes[0].blockingCount = 1;
@@ -133,9 +135,11 @@ describe('CE IssuesListApp component', () => {
[setSortPreferenceMutation, sortPreferenceMutationResponse],
];
+ router = new VueRouter({ mode: 'history' });
+
return mountFn(IssuesListApp, {
apolloProvider: createMockApollo(requestHandlers),
- router: new VueRouter({ mode: 'history' }),
+ router,
provide: {
...defaultProvide,
...provide,
@@ -736,7 +740,7 @@ describe('CE IssuesListApp component', () => {
describe('when "click-tab" event is emitted by IssuableList', () => {
beforeEach(() => {
wrapper = mountComponent();
- jest.spyOn(wrapper.vm.$router, 'push');
+ router.push = jest.fn();
findIssuableList().vm.$emit('click-tab', IssuableStates.Closed);
});
@@ -746,16 +750,26 @@ describe('CE IssuesListApp component', () => {
});
it('updates url to the new tab', () => {
- expect(wrapper.vm.$router.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
query: expect.objectContaining({ state: IssuableStates.Closed }),
});
});
});
describe.each`
- event | params
- ${'next-page'} | ${{ page_after: 'endCursor', page_before: undefined, first_page_size: 20, last_page_size: undefined }}
- ${'previous-page'} | ${{ page_after: undefined, page_before: 'startCursor', first_page_size: undefined, last_page_size: 20 }}
+ event | params
+ ${'next-page'} | ${{
+ page_after: 'endCursor',
+ page_before: undefined,
+ first_page_size: 20,
+ last_page_size: undefined,
+}}
+ ${'previous-page'} | ${{
+ page_after: undefined,
+ page_before: 'startCursor',
+ first_page_size: undefined,
+ last_page_size: 20,
+}}
`('when "$event" event is emitted by IssuableList', ({ event, params }) => {
beforeEach(() => {
wrapper = mountComponent({
@@ -766,7 +780,7 @@ describe('CE IssuesListApp component', () => {
},
},
});
- jest.spyOn(wrapper.vm.$router, 'push');
+ router.push = jest.fn();
findIssuableList().vm.$emit(event);
});
@@ -776,7 +790,7 @@ describe('CE IssuesListApp component', () => {
});
it(`updates url`, () => {
- expect(wrapper.vm.$router.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
query: expect.objectContaining(params),
});
});
@@ -888,13 +902,13 @@ describe('CE IssuesListApp component', () => {
'updates to the new sort when payload is `%s`',
async (sortKey) => {
wrapper = mountComponent();
- jest.spyOn(wrapper.vm.$router, 'push');
+ router.push = jest.fn();
findIssuableList().vm.$emit('sort', sortKey);
jest.runOnlyPendingTimers();
await nextTick();
- expect(wrapper.vm.$router.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
query: expect.objectContaining({ sort: urlSortParams[sortKey] }),
});
},
@@ -907,13 +921,13 @@ describe('CE IssuesListApp component', () => {
wrapper = mountComponent({
provide: { initialSort, isIssueRepositioningDisabled: true },
});
- jest.spyOn(wrapper.vm.$router, 'push');
+ router.push = jest.fn();
findIssuableList().vm.$emit('sort', RELATIVE_POSITION_ASC);
});
it('does not update the sort to manual', () => {
- expect(wrapper.vm.$router.push).not.toHaveBeenCalled();
+ expect(router.push).not.toHaveBeenCalled();
});
it('shows an alert to tell the user that manual reordering is disabled', () => {
@@ -978,12 +992,12 @@ describe('CE IssuesListApp component', () => {
describe('when "filter" event is emitted by IssuableList', () => {
it('updates IssuableList with url params', async () => {
wrapper = mountComponent();
- jest.spyOn(wrapper.vm.$router, 'push');
+ router.push = jest.fn();
findIssuableList().vm.$emit('filter', filteredTokens);
await nextTick();
- expect(wrapper.vm.$router.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
query: expect.objectContaining(urlParams),
});
});
@@ -993,13 +1007,13 @@ describe('CE IssuesListApp component', () => {
wrapper = mountComponent({
provide: { isAnonymousSearchDisabled: true, isSignedIn: false },
});
- jest.spyOn(wrapper.vm.$router, 'push');
+ router.push = jest.fn();
findIssuableList().vm.$emit('filter', filteredTokens);
});
it('does not update url params', () => {
- expect(wrapper.vm.$router.push).not.toHaveBeenCalled();
+ expect(router.push).not.toHaveBeenCalled();
});
it('shows an alert to tell the user they must be signed in to search', () => {
@@ -1030,4 +1044,19 @@ describe('CE IssuesListApp component', () => {
expect(mockQuery).toHaveBeenCalledWith(expect.objectContaining({ hideUsers }));
});
});
+
+ describe('when "page-size-change" event is emitted by IssuableList', () => {
+ it('updates url params with new page size', async () => {
+ wrapper = mountComponent();
+ router.push = jest.fn();
+
+ findIssuableList().vm.$emit('page-size-change', 50);
+ await nextTick();
+
+ expect(router.push).toHaveBeenCalledTimes(1);
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining({ first_page_size: 50 }),
+ });
+ });
+ });
});
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index 42f2d08082e..4347c580a4d 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -32,6 +32,7 @@ export const getIssuesQueryResponse = {
state: 'opened',
title: 'Issue title',
updatedAt: '2021-05-22T04:08:01Z',
+ closedAt: null,
upvotes: 3,
userDiscussionsCount: 4,
webPath: 'project/-/issues/789',
diff --git a/spec/frontend/issues/list/utils_spec.js b/spec/frontend/issues/list/utils_spec.js
index e8ffba9bc80..3c6332d5728 100644
--- a/spec/frontend/issues/list/utils_spec.js
+++ b/spec/frontend/issues/list/utils_spec.js
@@ -10,12 +10,7 @@ import {
urlParams,
urlParamsWithSpecialValues,
} from 'jest/issues/list/mock_data';
-import {
- PAGE_SIZE,
- PAGE_SIZE_MANUAL,
- RELATIVE_POSITION_ASC,
- urlSortParams,
-} from '~/issues/list/constants';
+import { PAGE_SIZE, urlSortParams } from '~/issues/list/constants';
import {
convertToApiParams,
convertToSearchQuery,
@@ -29,52 +24,30 @@ import {
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
describe('getInitialPageParams', () => {
- it.each(Object.keys(urlSortParams))(
- 'returns the correct page params for sort key %s',
- (sortKey) => {
- const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
+ it('returns page params with a default page size when no arguments are given', () => {
+ expect(getInitialPageParams()).toEqual({ firstPageSize: PAGE_SIZE });
+ });
- expect(getInitialPageParams(sortKey)).toEqual({ firstPageSize });
- },
- );
+ it('returns page params with the given page size', () => {
+ const pageSize = 100;
+ expect(getInitialPageParams(pageSize)).toEqual({ firstPageSize: pageSize });
+ });
- it.each(Object.keys(urlSortParams))(
- 'returns the correct page params for sort key %s with afterCursor',
- (sortKey) => {
- const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
- const lastPageSize = undefined;
- const afterCursor = 'randomCursorString';
- const beforeCursor = undefined;
- const pageParams = getInitialPageParams(
- sortKey,
- firstPageSize,
- lastPageSize,
- afterCursor,
- beforeCursor,
- );
-
- expect(pageParams).toEqual({ firstPageSize, afterCursor });
- },
- );
+ it('does not return firstPageSize when lastPageSize is provided', () => {
+ const firstPageSize = 100;
+ const lastPageSize = 50;
+ const afterCursor = undefined;
+ const beforeCursor = 'randomCursorString';
+ const pageParams = getInitialPageParams(
+ 100,
+ firstPageSize,
+ lastPageSize,
+ afterCursor,
+ beforeCursor,
+ );
- it.each(Object.keys(urlSortParams))(
- 'returns the correct page params for sort key %s with beforeCursor',
- (sortKey) => {
- const firstPageSize = undefined;
- const lastPageSize = PAGE_SIZE;
- const afterCursor = undefined;
- const beforeCursor = 'anotherRandomCursorString';
- const pageParams = getInitialPageParams(
- sortKey,
- firstPageSize,
- lastPageSize,
- afterCursor,
- beforeCursor,
- );
-
- expect(pageParams).toEqual({ lastPageSize, beforeCursor });
- },
- );
+ expect(pageParams).toEqual({ lastPageSize, beforeCursor });
+ });
});
describe('getSortKey', () => {
@@ -97,10 +70,10 @@ describe('isSortKey', () => {
describe('getSortOptions', () => {
describe.each`
hasIssueWeightsFeature | hasBlockedIssuesFeature | length | containsWeight | containsBlocking
- ${false} | ${false} | ${9} | ${false} | ${false}
- ${true} | ${false} | ${10} | ${true} | ${false}
- ${false} | ${true} | ${10} | ${false} | ${true}
- ${true} | ${true} | ${11} | ${true} | ${true}
+ ${false} | ${false} | ${10} | ${false} | ${false}
+ ${true} | ${false} | ${11} | ${true} | ${false}
+ ${false} | ${true} | ${11} | ${false} | ${true}
+ ${true} | ${true} | ${12} | ${true} | ${true}
`(
'when hasIssueWeightsFeature=$hasIssueWeightsFeature and hasBlockedIssuesFeature=$hasBlockedIssuesFeature',
({
diff --git a/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap b/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap
index 881dcda126f..1a199ed2ee9 100644
--- a/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap
+++ b/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap
@@ -2,10 +2,11 @@
exports[`Issue type info popover renders 1`] = `
<span
+ class="gl-ml-2"
id="popovercontainer"
>
<gl-icon-stub
- class="gl-ml-5 gl-text-gray-500"
+ class="gl-text-blue-600"
id="issue-type-info"
name="question-o"
size="16"
diff --git a/spec/frontend/issues/show/components/description_spec.js b/spec/frontend/issues/show/components/description_spec.js
index 2cc27309e59..8ee57f97754 100644
--- a/spec/frontend/issues/show/components/description_spec.js
+++ b/spec/frontend/issues/show/components/description_spec.js
@@ -15,10 +15,15 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Description from '~/issues/show/components/description.vue';
import { updateHistory } from '~/lib/utils/url_utility';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+import workItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
+import createWorkItemFromTaskMutation from '~/work_items/graphql/create_work_item_from_task.mutation.graphql';
import TaskList from '~/task_list';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
-import CreateWorkItem from '~/work_items/pages/create_work_item.vue';
+import {
+ projectWorkItemTypesQueryResponse,
+ createWorkItemFromTaskMutationResponse,
+} from 'jest/work_items/mock_data';
import {
descriptionProps as initialProps,
descriptionHtmlWithCheckboxes,
@@ -46,6 +51,10 @@ const workItemQueryResponse = {
};
const queryHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
+const workItemTypesQueryHandler = jest.fn().mockResolvedValue(projectWorkItemTypesQueryResponse);
+const createWorkItemFromTaskSuccessHandler = jest
+ .fn()
+ .mockResolvedValue(createWorkItemFromTaskMutationResponse);
describe('Description component', () => {
let wrapper;
@@ -60,18 +69,24 @@ describe('Description component', () => {
const findTooltips = () => wrapper.findAllComponents(GlTooltip);
const findModal = () => wrapper.findComponent(GlModal);
- const findCreateWorkItem = () => wrapper.findComponent(CreateWorkItem);
const findWorkItemDetailModal = () => wrapper.findComponent(WorkItemDetailModal);
- function createComponent({ props = {}, provide = {} } = {}) {
+ function createComponent({ props = {}, provide } = {}) {
wrapper = shallowMountExtended(Description, {
propsData: {
issueId: 1,
...initialProps,
...props,
},
- provide,
- apolloProvider: createMockApollo([[workItemQuery, queryHandler]]),
+ provide: {
+ fullPath: 'gitlab-org/gitlab-test',
+ ...provide,
+ },
+ apolloProvider: createMockApollo([
+ [workItemQuery, queryHandler],
+ [workItemTypesQuery, workItemTypesQueryHandler],
+ [createWorkItemFromTaskMutation, createWorkItemFromTaskSuccessHandler],
+ ]),
mocks: {
$toast,
},
@@ -299,24 +314,16 @@ describe('Description component', () => {
});
it('does not show a modal by default', () => {
- expect(findModal().props('visible')).toBe(false);
+ expect(findModal().exists()).toBe(false);
});
- it('opens a modal when a button is clicked and displays correct title', async () => {
- await findConvertToTaskButton().trigger('click');
- expect(findCreateWorkItem().props('initialTitle').trim()).toBe('todo 1');
- });
+ it('emits `updateDescription` after creating new work item', async () => {
+ const newDescription = `<p>New description</p>`;
- it('closes the modal on `closeCreateTaskModal` event', async () => {
await findConvertToTaskButton().trigger('click');
- findCreateWorkItem().vm.$emit('closeModal');
- expect(hideModal).toHaveBeenCalled();
- });
- it('emits `updateDescription` on `onCreate` event', () => {
- const newDescription = `<p>New description</p>`;
- findCreateWorkItem().vm.$emit('onCreate', newDescription);
- expect(hideModal).toHaveBeenCalled();
+ await waitForPromises();
+
expect(wrapper.emitted('updateDescription')).toEqual([[newDescription]]);
});
@@ -325,7 +332,7 @@ describe('Description component', () => {
findWorkItemDetailModal().vm.$emit('workItemDeleted', newDesc);
expect(wrapper.emitted('updateDescription')).toEqual([[newDesc]]);
- expect($toast.show).toHaveBeenCalledWith('Work item deleted');
+ expect($toast.show).toHaveBeenCalledWith('Task deleted');
});
});
diff --git a/spec/frontend/issues/show/components/edited_spec.js b/spec/frontend/issues/show/components/edited_spec.js
index 8a8fe23230a..8a240c38b5f 100644
--- a/spec/frontend/issues/show/components/edited_spec.js
+++ b/spec/frontend/issues/show/components/edited_spec.js
@@ -1,49 +1,50 @@
-import Vue from 'vue';
-import edited from '~/issues/show/components/edited.vue';
-
-function formatText(text) {
- return text.trim().replace(/\s\s+/g, ' ');
-}
-
-describe('edited', () => {
- const EditedComponent = Vue.extend(edited);
-
- it('should render an edited at+by string', () => {
- const editedComponent = new EditedComponent({
- propsData: {
- updatedAt: '2017-05-15T12:31:04.428Z',
- updatedByName: 'Some User',
- updatedByPath: '/some_user',
- },
- }).$mount();
-
- expect(formatText(editedComponent.$el.innerText)).toMatch(/Edited[\s\S]+?by Some User/);
- expect(editedComponent.$el.querySelector('.author-link').href).toMatch(/\/some_user$/);
- expect(editedComponent.$el.querySelector('time')).toBeTruthy();
+import { shallowMount } from '@vue/test-utils';
+import Edited from '~/issues/show/components/edited.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+describe('Edited component', () => {
+ let wrapper;
+
+ const findAuthorLink = () => wrapper.find('a');
+ const findTimeAgoTooltip = () => wrapper.findComponent(TimeAgoTooltip);
+ const formatText = (text) => text.trim().replace(/\s\s+/g, ' ');
+
+ const mountComponent = (propsData) => shallowMount(Edited, { propsData });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders an edited at+by string', () => {
+ wrapper = mountComponent({
+ updatedAt: '2017-05-15T12:31:04.428Z',
+ updatedByName: 'Some User',
+ updatedByPath: '/some_user',
+ });
+
+ expect(formatText(wrapper.text())).toBe('Edited by Some User');
+ expect(findAuthorLink().attributes('href')).toBe('/some_user');
+ expect(findTimeAgoTooltip().exists()).toBe(true);
});
it('if no updatedAt is provided, no time element will be rendered', () => {
- const editedComponent = new EditedComponent({
- propsData: {
- updatedByName: 'Some User',
- updatedByPath: '/some_user',
- },
- }).$mount();
-
- expect(formatText(editedComponent.$el.innerText)).toMatch(/Edited by Some User/);
- expect(editedComponent.$el.querySelector('.author-link').href).toMatch(/\/some_user$/);
- expect(editedComponent.$el.querySelector('time')).toBeFalsy();
+ wrapper = mountComponent({
+ updatedByName: 'Some User',
+ updatedByPath: '/some_user',
+ });
+
+ expect(formatText(wrapper.text())).toBe('Edited by Some User');
+ expect(findAuthorLink().attributes('href')).toBe('/some_user');
+ expect(findTimeAgoTooltip().exists()).toBe(false);
});
it('if no updatedByName and updatedByPath is provided, no user element will be rendered', () => {
- const editedComponent = new EditedComponent({
- propsData: {
- updatedAt: '2017-05-15T12:31:04.428Z',
- },
- }).$mount();
-
- expect(formatText(editedComponent.$el.innerText)).not.toMatch(/by Some User/);
- expect(editedComponent.$el.querySelector('.author-link')).toBeFalsy();
- expect(editedComponent.$el.querySelector('time')).toBeTruthy();
+ wrapper = mountComponent({
+ updatedAt: '2017-05-15T12:31:04.428Z',
+ });
+
+ expect(formatText(wrapper.text())).toBe('Edited');
+ expect(findAuthorLink().exists()).toBe(false);
+ expect(findTimeAgoTooltip().exists()).toBe(true);
});
});
diff --git a/spec/frontend/issues/show/components/incidents/highlight_bar_spec.js b/spec/frontend/issues/show/components/incidents/highlight_bar_spec.js
index a4910d63bb5..155ae703e48 100644
--- a/spec/frontend/issues/show/components/incidents/highlight_bar_spec.js
+++ b/spec/frontend/issues/show/components/incidents/highlight_bar_spec.js
@@ -74,7 +74,7 @@ describe('Highlight Bar', () => {
});
it('renders a number of alert events', () => {
- expect(wrapper.text()).toContain(alert.eventCount);
+ expect(wrapper.text()).toContain(alert.eventCount.toString());
});
});
diff --git a/spec/frontend/issues/show/components/incidents/mock_data.js b/spec/frontend/issues/show/components/incidents/mock_data.js
index b5346a6089a..afc6099caf4 100644
--- a/spec/frontend/issues/show/components/incidents/mock_data.js
+++ b/spec/frontend/issues/show/components/incidents/mock_data.js
@@ -70,3 +70,36 @@ export const timelineEventsQueryEmptyResponse = {
},
},
};
+
+export const timelineEventsCreateEventResponse = {
+ timelineEvent: {
+ ...mockEvents[0],
+ },
+ errors: [],
+};
+
+export const timelineEventsCreateEventError = {
+ data: {
+ timelineEventCreate: {
+ timelineEvent: {
+ ...mockEvents[0],
+ },
+ errors: ['Create error'],
+ },
+ },
+};
+
+const timelineEventDeleteData = (errors = []) => {
+ return {
+ data: {
+ timelineEventDestroy: {
+ timelineEvent: { ...mockEvents[0] },
+ errors,
+ },
+ },
+ };
+};
+
+export const timelineEventsDeleteEventResponse = timelineEventDeleteData();
+
+export const timelineEventsDeleteEventError = timelineEventDeleteData(['Item does not exist']);
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
new file mode 100644
index 00000000000..620cdfc53b0
--- /dev/null
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
@@ -0,0 +1,181 @@
+import VueApollo from 'vue-apollo';
+import Vue, { nextTick } from 'vue';
+import { GlDatepicker } from '@gitlab/ui';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import IncidentTimelineEventForm from '~/issues/show/components/incidents/timeline_events_form.vue';
+import createTimelineEventMutation from '~/issues/show/components/incidents/graphql/queries/create_timeline_event.mutation.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createAlert } from '~/flash';
+import { useFakeDate } from 'helpers/fake_date';
+import { timelineEventsCreateEventResponse, timelineEventsCreateEventError } from './mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/flash');
+
+const addEventResponse = jest.fn().mockResolvedValue(timelineEventsCreateEventResponse);
+
+function createMockApolloProvider(response = addEventResponse) {
+ const requestHandlers = [[createTimelineEventMutation, response]];
+ return createMockApollo(requestHandlers);
+}
+
+describe('Timeline events form', () => {
+ // July 8 2020
+ useFakeDate(2020, 6, 8);
+ let wrapper;
+
+ const mountComponent = ({ mockApollo, mountMethod = shallowMountExtended, stubs }) => {
+ wrapper = mountMethod(IncidentTimelineEventForm, {
+ propsData: {
+ hasTimelineEvents: true,
+ },
+ provide: {
+ fullPath: 'group/project',
+ issuableId: '1',
+ },
+ apolloProvider: mockApollo,
+ stubs,
+ });
+ };
+
+ afterEach(() => {
+ addEventResponse.mockReset();
+ createAlert.mockReset();
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ const findSubmitButton = () => wrapper.findByText('Save');
+ const findSubmitAndAddButton = () => wrapper.findByText('Save and add another event');
+ const findCancelButton = () => wrapper.findByText('Cancel');
+ const findDatePicker = () => wrapper.findComponent(GlDatepicker);
+ const findDatePickerInput = () => wrapper.findByTestId('input-datepicker');
+ const findHourInput = () => wrapper.findByTestId('input-hours');
+ const findMinuteInput = () => wrapper.findByTestId('input-minutes');
+ const setDatetime = () => {
+ findDatePicker().vm.$emit('input', new Date('2021-08-12'));
+ findHourInput().vm.$emit('input', 5);
+ findMinuteInput().vm.$emit('input', 45);
+ };
+
+ const submitForm = async () => {
+ findSubmitButton().trigger('click');
+ await waitForPromises();
+ };
+ const submitFormAndAddAnother = async () => {
+ findSubmitAndAddButton().trigger('click');
+ await waitForPromises();
+ };
+ const cancelForm = async () => {
+ findCancelButton().trigger('click');
+ await waitForPromises();
+ };
+
+ describe('form button behaviour', () => {
+ const closeFormEvent = { 'hide-incident-timeline-event-form': [[]] };
+ beforeEach(() => {
+ mountComponent({ mockApollo: createMockApolloProvider(), mountMethod: mountExtended });
+ });
+
+ it('should close the form on submit', async () => {
+ await submitForm();
+ expect(wrapper.emitted()).toEqual(closeFormEvent);
+ });
+
+ it('should not close the form on "submit and add another"', async () => {
+ await submitFormAndAddAnother();
+ expect(wrapper.emitted()).toEqual({});
+ });
+
+ it('should close the form on cancel', async () => {
+ await cancelForm();
+ expect(wrapper.emitted()).toEqual(closeFormEvent);
+ });
+
+ it('should clear the form', async () => {
+ setDatetime();
+ await nextTick();
+
+ expect(findDatePickerInput().element.value).toBe('2021-08-12');
+ expect(findHourInput().element.value).toBe('5');
+ expect(findMinuteInput().element.value).toBe('45');
+
+ wrapper.vm.clear();
+ await nextTick();
+
+ expect(findDatePickerInput().element.value).toBe('2020-07-08');
+ expect(findHourInput().element.value).toBe('0');
+ expect(findMinuteInput().element.value).toBe('0');
+ });
+ });
+
+ describe('addTimelineEventQuery', () => {
+ const expectedData = {
+ input: {
+ incidentId: 'gid://gitlab/Issue/1',
+ note: '',
+ occurredAt: '2020-07-08T00:00:00.000Z',
+ },
+ };
+
+ let mockApollo;
+
+ beforeEach(() => {
+ mockApollo = createMockApolloProvider();
+ mountComponent({ mockApollo, mountMethod: mountExtended });
+ });
+
+ it('should call the mutation with the right variables', async () => {
+ await submitForm();
+
+ expect(addEventResponse).toHaveBeenCalledWith(expectedData);
+ });
+
+ it('should call the mutation with user selected variables', async () => {
+ const expectedUserSelectedData = {
+ input: {
+ ...expectedData.input,
+ occurredAt: '2021-08-12T05:45:00.000Z',
+ },
+ };
+
+ setDatetime();
+
+ await nextTick();
+ await submitForm();
+
+ expect(addEventResponse).toHaveBeenCalledWith(expectedUserSelectedData);
+ });
+ });
+
+ describe('error handling', () => {
+ it('should show an error when submission returns an error', async () => {
+ const expectedAlertArgs = {
+ message: 'Error creating incident timeline event: Create error',
+ };
+ addEventResponse.mockResolvedValueOnce(timelineEventsCreateEventError);
+ mountComponent({ mockApollo: createMockApolloProvider(), mountMethod: mountExtended });
+
+ await submitForm();
+
+ expect(createAlert).toHaveBeenCalledWith(expectedAlertArgs);
+ });
+
+ it('should show an error when submission fails', async () => {
+ const expectedAlertArgs = {
+ captureError: true,
+ error: new Error(),
+ message: 'Something went wrong while creating the incident timeline event.',
+ };
+ addEventResponse.mockRejectedValueOnce();
+ mountComponent({ mockApollo: createMockApolloProvider(), mountMethod: mountExtended });
+
+ await submitForm();
+
+ expect(createAlert).toHaveBeenCalledWith(expectedAlertArgs);
+ });
+ });
+});
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js
index 7e51219ffa7..e686f2eb4ec 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js
@@ -1,6 +1,6 @@
import timezoneMock from 'timezone-mock';
-import merge from 'lodash/merge';
-import { GlIcon } from '@gitlab/ui';
+import { GlIcon, GlDropdown } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import IncidentTimelineEventListItem from '~/issues/show/components/incidents/timeline_events_list_item.vue';
import { mockEvents } from './mock_data';
@@ -8,25 +8,28 @@ import { mockEvents } from './mock_data';
describe('IncidentTimelineEventList', () => {
let wrapper;
- const mountComponent = (propsData) => {
+ const mountComponent = ({ propsData, provide } = {}) => {
const { action, noteHtml, occurredAt } = mockEvents[0];
- wrapper = mountExtended(
- IncidentTimelineEventListItem,
- merge({
- propsData: {
- action,
- noteHtml,
- occurredAt,
- isLastItem: false,
- ...propsData,
- },
- }),
- );
+ wrapper = mountExtended(IncidentTimelineEventListItem, {
+ propsData: {
+ action,
+ noteHtml,
+ occurredAt,
+ isLastItem: false,
+ ...propsData,
+ },
+ provide: {
+ canUpdate: false,
+ ...provide,
+ },
+ });
};
const findCommentIcon = () => wrapper.findComponent(GlIcon);
const findTextContainer = () => wrapper.findByTestId('event-text-container');
const findEventTime = () => wrapper.findByTestId('event-time');
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDeleteButton = () => wrapper.findByText('Delete');
describe('template', () => {
it('shows comment icon', () => {
@@ -55,7 +58,7 @@ describe('IncidentTimelineEventList', () => {
});
it('does not show a bottom border when the last item', () => {
- mountComponent({ isLastItem: true });
+ mountComponent({ propsData: { isLastItem: true } });
expect(wrapper.classes()).not.toContain('gl-border-1');
});
@@ -83,5 +86,31 @@ describe('IncidentTimelineEventList', () => {
});
});
});
+
+ describe('action dropdown', () => {
+ it('does not show the action dropdown by default', () => {
+ mountComponent();
+
+ expect(findDropdown().exists()).toBe(false);
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+
+ it('shows dropdown and delete item when user has update permission', () => {
+ mountComponent({ provide: { canUpdate: true } });
+
+ expect(findDropdown().exists()).toBe(true);
+ expect(findDeleteButton().exists()).toBe(true);
+ });
+
+ it('triggers a delete when the delete button is clicked', async () => {
+ mountComponent({ provide: { canUpdate: true } });
+
+ findDeleteButton().trigger('click');
+
+ await nextTick();
+
+ expect(wrapper.emitted().delete).toBeTruthy();
+ });
+ });
});
});
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
index 6610ea0b832..ae07237cf7d 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
@@ -1,41 +1,81 @@
import timezoneMock from 'timezone-mock';
-import merge from 'lodash/merge';
-import { shallowMountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import IncidentTimelineEventList from '~/issues/show/components/incidents/timeline_events_list.vue';
-import { mockEvents } from './mock_data';
+import IncidentTimelineEventListItem from '~/issues/show/components/incidents/timeline_events_list_item.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import deleteTimelineEventMutation from '~/issues/show/components/incidents/graphql/queries/delete_timeline_event.mutation.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createAlert } from '~/flash';
+import {
+ mockEvents,
+ timelineEventsDeleteEventResponse,
+ timelineEventsDeleteEventError,
+} from './mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
+
+const deleteEventResponse = jest.fn();
+
+function createMockApolloProvider() {
+ deleteEventResponse.mockResolvedValue(timelineEventsDeleteEventResponse);
+ const requestHandlers = [[deleteTimelineEventMutation, deleteEventResponse]];
+ return createMockApollo(requestHandlers);
+}
+
+const mockConfirmAction = ({ confirmed }) => {
+ confirmAction.mockResolvedValueOnce(confirmed);
+};
describe('IncidentTimelineEventList', () => {
let wrapper;
- const mountComponent = () => {
- wrapper = shallowMountExtended(
- IncidentTimelineEventList,
- merge({
- provide: {
- fullPath: 'group/project',
- issuableId: '1',
- },
- propsData: {
- timelineEvents: mockEvents,
- },
- }),
- );
+ const mountComponent = (mockApollo) => {
+ const apollo = mockApollo ? { apolloProvider: mockApollo } : {};
+
+ wrapper = shallowMountExtended(IncidentTimelineEventList, {
+ provide: {
+ fullPath: 'group/project',
+ issuableId: '1',
+ },
+ propsData: {
+ timelineEvents: mockEvents,
+ },
+ ...apollo,
+ });
};
- const findGroups = () => wrapper.findAllByTestId('timeline-group');
- const findItems = (base = wrapper) => base.findAllByTestId('timeline-event');
- const findFirstGroup = () => extendedWrapper(findGroups().at(0));
- const findSecondGroup = () => extendedWrapper(findGroups().at(1));
+ const findTimelineEventGroups = () => wrapper.findAllByTestId('timeline-group');
+ const findItems = (base = wrapper) => base.findAll(IncidentTimelineEventListItem);
+ const findFirstTimelineEventGroup = () => findTimelineEventGroups().at(0);
+ const findSecondTimelineEventGroup = () => findTimelineEventGroups().at(1);
const findDates = () => wrapper.findAllByTestId('event-date');
+ const clickFirstDeleteButton = async () => {
+ findItems()
+ .at(0)
+ .vm.$emit('delete', { ...mockEvents[0] });
+ await waitForPromises();
+ };
+
+ afterEach(() => {
+ confirmAction.mockReset();
+ deleteEventResponse.mockReset();
+ wrapper.destroy();
+ });
describe('template', () => {
it('groups items correctly', () => {
mountComponent();
- expect(findGroups()).toHaveLength(2);
+ expect(findTimelineEventGroups()).toHaveLength(2);
- expect(findItems(findFirstGroup())).toHaveLength(1);
- expect(findItems(findSecondGroup())).toHaveLength(2);
+ expect(findItems(findFirstTimelineEventGroup())).toHaveLength(1);
+ expect(findItems(findSecondTimelineEventGroup())).toHaveLength(2);
});
it('sets the isLastItem prop correctly', () => {
@@ -83,5 +123,48 @@ describe('IncidentTimelineEventList', () => {
});
});
});
+
+ describe('delete functionality', () => {
+ beforeEach(() => {
+ mockConfirmAction({ confirmed: true });
+ });
+
+ it('should delete when button is clicked', async () => {
+ const expectedVars = { input: { id: mockEvents[0].id } };
+
+ mountComponent(createMockApolloProvider());
+
+ await clickFirstDeleteButton();
+
+ expect(deleteEventResponse).toHaveBeenCalledWith(expectedVars);
+ });
+
+ it('should show an error when delete returns an error', async () => {
+ const expectedError = {
+ message: 'Error deleting incident timeline event: Item does not exist',
+ };
+
+ mountComponent(createMockApolloProvider());
+ deleteEventResponse.mockResolvedValue(timelineEventsDeleteEventError);
+
+ await clickFirstDeleteButton();
+
+ expect(createAlert).toHaveBeenCalledWith(expectedError);
+ });
+
+ it('should show an error when delete fails', async () => {
+ const expectedAlertArgs = {
+ captureError: true,
+ error: new Error(),
+ message: 'Something went wrong while deleting the incident timeline event.',
+ };
+ mountComponent(createMockApolloProvider());
+ deleteEventResponse.mockRejectedValueOnce();
+
+ await clickFirstDeleteButton();
+
+ expect(createAlert).toHaveBeenCalledWith(expectedAlertArgs);
+ });
+ });
});
});
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
index cf81f4cdf66..2d87851a761 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
@@ -1,13 +1,15 @@
-import { GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlEmptyState } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import Vue from 'vue';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import TimelineEventsTab from '~/issues/show/components/incidents/timeline_events_tab.vue';
import IncidentTimelineEventsList from '~/issues/show/components/incidents/timeline_events_list.vue';
+import IncidentTimelineEventForm from '~/issues/show/components/incidents/timeline_events_form.vue';
import timelineEventsQuery from '~/issues/show/components/incidents/graphql/queries/get_timeline_events.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createAlert } from '~/flash';
+import { timelineTabI18n } from '~/issues/show/components/incidents/constants';
import { timelineEventsQueryListResponse, timelineEventsQueryEmptyResponse } from './mock_data';
Vue.use(VueApollo);
@@ -28,14 +30,17 @@ describe('TimelineEventsTab', () => {
let wrapper;
const mountComponent = (options = {}) => {
- const { mockApollo, mountMethod = shallowMountExtended } = options;
+ const { mockApollo, mountMethod = shallowMountExtended, stubs, provide } = options;
wrapper = mountMethod(TimelineEventsTab, {
provide: {
fullPath: 'group/project',
issuableId: '1',
+ canUpdate: true,
+ ...provide,
},
apolloProvider: mockApollo,
+ stubs,
});
};
@@ -48,6 +53,8 @@ describe('TimelineEventsTab', () => {
const findLoadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findTimelineEventsList = () => wrapper.findComponent(IncidentTimelineEventsList);
+ const findTimelineEventForm = () => wrapper.findComponent(IncidentTimelineEventForm);
+ const findAddEventButton = () => wrapper.findByText(timelineTabI18n.addEventButton);
describe('Timeline events tab', () => {
describe('empty state', () => {
@@ -82,24 +89,85 @@ describe('TimelineEventsTab', () => {
describe('timelineEventsQuery', () => {
let mockApollo;
- beforeEach(() => {
+ const setup = () => {
mockApollo = createMockApolloProvider();
mountComponent({ mockApollo });
- });
+ };
it('should request data', () => {
+ setup();
+
expect(listResponse).toHaveBeenCalled();
});
it('should show the loading state', () => {
+ setup();
+
expect(findEmptyState().exists()).toBe(false);
expect(findLoadingSpinner().exists()).toBe(true);
});
it('should render the list', async () => {
+ setup();
await waitForPromises();
+
expect(findEmptyState().exists()).toBe(false);
expect(findTimelineEventsList().props('timelineEvents')).toHaveLength(3);
});
});
+
+ describe('add new event form', () => {
+ beforeEach(async () => {
+ mountComponent({
+ mockApollo: createMockApolloProvider(emptyResponse),
+ mountMethod: mountExtended,
+ stubs: {
+ 'incident-timeline-events-list': true,
+ 'gl-tab': true,
+ },
+ });
+ await waitForPromises();
+ });
+
+ it('should show a button when user can update', () => {
+ expect(findAddEventButton().exists()).toBe(true);
+ });
+
+ it('should not show a button when user cannot update', () => {
+ mountComponent({
+ mockApollo: createMockApolloProvider(emptyResponse),
+ provide: { canUpdate: false },
+ });
+
+ expect(findAddEventButton().exists()).toBe(false);
+ });
+
+ it('should not show a form by default', () => {
+ expect(findTimelineEventForm().isVisible()).toBe(false);
+ });
+
+ it('should show a form when button is clicked', async () => {
+ await findAddEventButton().trigger('click');
+
+ expect(findTimelineEventForm().isVisible()).toBe(true);
+ });
+
+ it('should clear the form when button is clicked', async () => {
+ const mockClear = jest.fn();
+ wrapper.vm.$refs.eventForm.clear = mockClear;
+
+ await findAddEventButton().trigger('click');
+
+ expect(mockClear).toHaveBeenCalled();
+ });
+
+ it('should hide the form when the hide event is emitted', async () => {
+ // open the form
+ await findAddEventButton().trigger('click');
+
+ await findTimelineEventForm().vm.$emit('hide-incident-timeline-event-form');
+
+ expect(findTimelineEventForm().isVisible()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/issues/show/components/incidents/utils_spec.js b/spec/frontend/issues/show/components/incidents/utils_spec.js
index e6f7082d280..0da0114c654 100644
--- a/spec/frontend/issues/show/components/incidents/utils_spec.js
+++ b/spec/frontend/issues/show/components/incidents/utils_spec.js
@@ -1,4 +1,9 @@
-import { displayAndLogError, getEventIcon } from '~/issues/show/components/incidents/utils';
+import timezoneMock from 'timezone-mock';
+import {
+ displayAndLogError,
+ getEventIcon,
+ getUtcShiftedDateNow,
+} from '~/issues/show/components/incidents/utils';
import { createAlert } from '~/flash';
jest.mock('~/flash');
@@ -19,13 +24,31 @@ describe('incident utils', () => {
describe('get event icon', () => {
it('should display a matching event icon name', () => {
- const name = 'comment';
-
- expect(getEventIcon(name)).toBe(name);
+ ['comment', 'issues', 'status'].forEach((name) => {
+ expect(getEventIcon(name)).toBe(name);
+ });
});
it('should return a default icon name', () => {
expect(getEventIcon('non-existent-icon-name')).toBe('comment');
});
});
+
+ describe('getUtcShiftedDateNow', () => {
+ beforeEach(() => {
+ timezoneMock.register('US/Pacific');
+ });
+
+ afterEach(() => {
+ timezoneMock.unregister();
+ });
+
+ it('should shift the date by the timezone offset', () => {
+ const date = new Date();
+
+ const shiftedDate = getUtcShiftedDateNow();
+
+ expect(shiftedDate > date).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/jobs/bridge/app_spec.js b/spec/frontend/jobs/bridge/app_spec.js
deleted file mode 100644
index 210dcfa364b..00000000000
--- a/spec/frontend/jobs/bridge/app_spec.js
+++ /dev/null
@@ -1,146 +0,0 @@
-import Vue, { nextTick } from 'vue';
-import { shallowMount } from '@vue/test-utils';
-
-import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
-import { GlLoadingIcon } from '@gitlab/ui';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import getPipelineQuery from '~/jobs/bridge/graphql/queries/pipeline.query.graphql';
-import waitForPromises from 'helpers/wait_for_promises';
-import BridgeApp from '~/jobs/bridge/app.vue';
-import BridgeEmptyState from '~/jobs/bridge/components/empty_state.vue';
-import BridgeSidebar from '~/jobs/bridge/components/sidebar.vue';
-import CiHeader from '~/vue_shared/components/header_ci_component.vue';
-import {
- MOCK_BUILD_ID,
- MOCK_PIPELINE_IID,
- MOCK_PROJECT_FULL_PATH,
- mockPipelineQueryResponse,
-} from './mock_data';
-
-describe('Bridge Show Page', () => {
- let wrapper;
- let mockApollo;
- let mockPipelineQuery;
-
- const createComponent = (options) => {
- wrapper = shallowMount(BridgeApp, {
- provide: {
- buildId: MOCK_BUILD_ID,
- projectFullPath: MOCK_PROJECT_FULL_PATH,
- pipelineIid: MOCK_PIPELINE_IID,
- },
- mocks: {
- $apollo: {
- queries: {
- pipeline: {
- loading: true,
- },
- },
- },
- },
- ...options,
- });
- };
-
- const createComponentWithApollo = () => {
- const handlers = [[getPipelineQuery, mockPipelineQuery]];
- Vue.use(VueApollo);
- mockApollo = createMockApollo(handlers);
-
- createComponent({
- apolloProvider: mockApollo,
- mocks: {},
- });
- };
-
- const findCiHeader = () => wrapper.findComponent(CiHeader);
- const findEmptyState = () => wrapper.findComponent(BridgeEmptyState);
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findSidebar = () => wrapper.findComponent(BridgeSidebar);
-
- beforeEach(() => {
- mockPipelineQuery = jest.fn();
- });
-
- afterEach(() => {
- mockPipelineQuery.mockReset();
- wrapper.destroy();
- });
-
- describe('while pipeline query is loading', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders loading icon', () => {
- expect(findLoadingIcon().exists()).toBe(true);
- });
- });
-
- describe('after pipeline query is loaded', () => {
- beforeEach(async () => {
- mockPipelineQuery.mockResolvedValue(mockPipelineQueryResponse);
- createComponentWithApollo();
- await waitForPromises();
- });
-
- it('query is called with correct variables', async () => {
- expect(mockPipelineQuery).toHaveBeenCalledTimes(1);
- expect(mockPipelineQuery).toHaveBeenCalledWith({
- fullPath: MOCK_PROJECT_FULL_PATH,
- iid: MOCK_PIPELINE_IID,
- });
- });
-
- it('renders CI header state', () => {
- expect(findCiHeader().exists()).toBe(true);
- });
-
- it('renders empty state', () => {
- expect(findEmptyState().exists()).toBe(true);
- });
-
- it('renders sidebar', () => {
- expect(findSidebar().exists()).toBe(true);
- });
- });
-
- describe('sidebar expansion', () => {
- beforeEach(async () => {
- mockPipelineQuery.mockResolvedValue(mockPipelineQueryResponse);
- createComponentWithApollo();
- await waitForPromises();
- });
-
- describe('on resize', () => {
- it.each`
- breakpoint | isSidebarExpanded
- ${'xs'} | ${false}
- ${'sm'} | ${false}
- ${'md'} | ${true}
- ${'lg'} | ${true}
- ${'xl'} | ${true}
- `(
- 'sets isSidebarExpanded to `$isSidebarExpanded` when the breakpoint is "$breakpoint"',
- async ({ breakpoint, isSidebarExpanded }) => {
- jest.spyOn(GlBreakpointInstance, 'getBreakpointSize').mockReturnValue(breakpoint);
-
- window.dispatchEvent(new Event('resize'));
- await nextTick();
-
- expect(findSidebar().exists()).toBe(isSidebarExpanded);
- },
- );
- });
-
- it('toggles expansion on button click', async () => {
- expect(findSidebar().exists()).toBe(true);
-
- wrapper.vm.toggleSidebar();
- await nextTick();
-
- expect(findSidebar().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/jobs/bridge/components/empty_state_spec.js b/spec/frontend/jobs/bridge/components/empty_state_spec.js
deleted file mode 100644
index 38c55b296f0..00000000000
--- a/spec/frontend/jobs/bridge/components/empty_state_spec.js
+++ /dev/null
@@ -1,58 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import BridgeEmptyState from '~/jobs/bridge/components/empty_state.vue';
-import { MOCK_EMPTY_ILLUSTRATION_PATH, MOCK_PATH_TO_DOWNSTREAM } from '../mock_data';
-
-describe('Bridge Empty State', () => {
- let wrapper;
-
- const createComponent = ({ downstreamPipelinePath }) => {
- wrapper = shallowMount(BridgeEmptyState, {
- provide: {
- emptyStateIllustrationPath: MOCK_EMPTY_ILLUSTRATION_PATH,
- },
- propsData: {
- downstreamPipelinePath,
- },
- });
- };
-
- const findSvg = () => wrapper.find('img');
- const findTitle = () => wrapper.find('h1');
- const findLinkBtn = () => wrapper.findComponent(GlButton);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('template', () => {
- beforeEach(() => {
- createComponent({ downstreamPipelinePath: MOCK_PATH_TO_DOWNSTREAM });
- });
-
- it('renders illustration', () => {
- expect(findSvg().exists()).toBe(true);
- });
-
- it('renders title', () => {
- expect(findTitle().exists()).toBe(true);
- expect(findTitle().text()).toBe(wrapper.vm.$options.i18n.title);
- });
-
- it('renders CTA button', () => {
- expect(findLinkBtn().exists()).toBe(true);
- expect(findLinkBtn().text()).toBe(wrapper.vm.$options.i18n.linkBtnText);
- expect(findLinkBtn().attributes('href')).toBe(MOCK_PATH_TO_DOWNSTREAM);
- });
- });
-
- describe('without downstream pipeline', () => {
- beforeEach(() => {
- createComponent({ downstreamPipelinePath: undefined });
- });
-
- it('does not render CTA button', () => {
- expect(findLinkBtn().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/jobs/bridge/components/sidebar_spec.js b/spec/frontend/jobs/bridge/components/sidebar_spec.js
deleted file mode 100644
index 5006d4f08a6..00000000000
--- a/spec/frontend/jobs/bridge/components/sidebar_spec.js
+++ /dev/null
@@ -1,99 +0,0 @@
-import { GlButton, GlDropdown } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import BridgeSidebar from '~/jobs/bridge/components/sidebar.vue';
-import CommitBlock from '~/jobs/components/commit_block.vue';
-import { mockCommit, mockJob } from '../mock_data';
-
-describe('Bridge Sidebar', () => {
- let wrapper;
-
- const MockHeaderEl = {
- getBoundingClientRect() {
- return {
- bottom: '40',
- };
- },
- };
-
- const createComponent = ({ featureFlag } = {}) => {
- wrapper = shallowMount(BridgeSidebar, {
- provide: {
- glFeatures: {
- triggerJobRetryAction: featureFlag,
- },
- },
- propsData: {
- bridgeJob: mockJob,
- commit: mockCommit,
- },
- });
- };
-
- const findJobTitle = () => wrapper.find('h4');
- const findCommitBlock = () => wrapper.findComponent(CommitBlock);
- const findRetryDropdown = () => wrapper.find(GlDropdown);
- const findToggleBtn = () => wrapper.findComponent(GlButton);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('template', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders job name', () => {
- expect(findJobTitle().text()).toBe(mockJob.name);
- });
-
- it('renders commit information', () => {
- expect(findCommitBlock().exists()).toBe(true);
- });
- });
-
- describe('styles', () => {
- beforeEach(async () => {
- jest.spyOn(document, 'querySelector').mockReturnValue(MockHeaderEl);
- createComponent();
- });
-
- it('calculates root styles correctly', () => {
- expect(wrapper.attributes('style')).toBe('width: 290px; top: 40px;');
- });
- });
-
- describe('sidebar expansion', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('emits toggle sidebar event on button click', async () => {
- expect(wrapper.emitted('toggleSidebar')).toBe(undefined);
-
- findToggleBtn().vm.$emit('click');
-
- expect(wrapper.emitted('toggleSidebar')).toHaveLength(1);
- });
- });
-
- describe('retry action', () => {
- describe('when feature flag is ON', () => {
- beforeEach(() => {
- createComponent({ featureFlag: true });
- });
-
- it('renders retry dropdown', () => {
- expect(findRetryDropdown().exists()).toBe(true);
- });
- });
-
- describe('when feature flag is OFF', () => {
- it('does not render retry dropdown', () => {
- createComponent({ featureFlag: false });
-
- expect(findRetryDropdown().exists()).toBe(false);
- });
- });
- });
-});
diff --git a/spec/frontend/jobs/bridge/mock_data.js b/spec/frontend/jobs/bridge/mock_data.js
deleted file mode 100644
index 4084bb54163..00000000000
--- a/spec/frontend/jobs/bridge/mock_data.js
+++ /dev/null
@@ -1,102 +0,0 @@
-export const MOCK_EMPTY_ILLUSTRATION_PATH = '/path/to/svg';
-export const MOCK_PATH_TO_DOWNSTREAM = '/path/to/downstream/pipeline';
-export const MOCK_BUILD_ID = '1331';
-export const MOCK_PIPELINE_IID = '174';
-export const MOCK_PROJECT_FULL_PATH = '/root/project/';
-export const MOCK_SHA = '38f3d89147765427a7ce58be28cd76d14efa682a';
-
-export const mockCommit = {
- id: `gid://gitlab/CommitPresenter/${MOCK_SHA}`,
- shortId: '38f3d891',
- title: 'Update .gitlab-ci.yml file',
- webPath: `/root/project/-/commit/${MOCK_SHA}`,
- __typename: 'Commit',
-};
-
-export const mockJob = {
- createdAt: '2021-12-10T09:05:45Z',
- id: 'gid://gitlab/Ci::Build/1331',
- name: 'triggerJobName',
- scheduledAt: null,
- startedAt: '2021-12-10T09:13:43Z',
- status: 'SUCCESS',
- triggered: null,
- detailedStatus: {
- id: '1',
- detailsPath: '/root/project/-/jobs/1331',
- icon: 'status_success',
- group: 'success',
- text: 'passed',
- tooltip: 'passed',
- __typename: 'DetailedStatus',
- },
- downstreamPipeline: {
- id: '1',
- path: '/root/project/-/pipelines/175',
- },
- stage: {
- id: '1',
- name: 'build',
- __typename: 'CiStage',
- },
- __typename: 'CiJob',
-};
-
-export const mockUser = {
- id: 'gid://gitlab/User/1',
- avatarUrl: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- name: 'Administrator',
- username: 'root',
- webPath: '/root',
- webUrl: 'http://gdk.test:3000/root',
- status: {
- message: 'making great things',
- __typename: 'UserStatus',
- },
- __typename: 'UserCore',
-};
-
-export const mockStage = {
- id: '1',
- name: 'build',
- jobs: {
- nodes: [mockJob],
- __typename: 'CiJobConnection',
- },
- __typename: 'CiStage',
-};
-
-export const mockPipelineQueryResponse = {
- data: {
- project: {
- id: '1',
- pipeline: {
- commit: mockCommit,
- id: 'gid://gitlab/Ci::Pipeline/174',
- iid: '88',
- path: '/root/project/-/pipelines/174',
- sha: MOCK_SHA,
- ref: 'main',
- refPath: 'path/to/ref',
- user: mockUser,
- detailedStatus: {
- id: '1',
- icon: 'status_failed',
- group: 'failed',
- __typename: 'DetailedStatus',
- },
- stages: {
- edges: [
- {
- node: mockStage,
- __typename: 'CiStageEdge',
- },
- ],
- __typename: 'CiStageConnection',
- },
- __typename: 'Pipeline',
- },
- __typename: 'Project',
- },
- },
-};
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index fc308766ab9..b4b5bc4669d 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -22,7 +22,6 @@ describe('Job App', () => {
let store;
let wrapper;
let mock;
- let origGon;
const initSettings = {
endpoint: `${TEST_HOST}jobs/123.json`,
@@ -80,17 +79,11 @@ describe('Job App', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
store = createStore();
-
- origGon = window.gon;
-
- window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: All of this passes with the feature flag
});
afterEach(() => {
wrapper.destroy();
mock.restore();
-
- window.gon = origGon;
});
describe('while loading', () => {
diff --git a/spec/frontend/jobs/components/job_log_controllers_spec.js b/spec/frontend/jobs/components/job_log_controllers_spec.js
index cd3ee734466..cc97d111c06 100644
--- a/spec/frontend/jobs/components/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job_log_controllers_spec.js
@@ -1,6 +1,11 @@
+import { GlSearchBoxByClick } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import JobLogControllers from '~/jobs/components/job_log_controllers.vue';
+import HelpPopover from '~/vue_shared/components/help_popover.vue';
+import { mockJobLog } from '../mock_data';
+
+const mockToastShow = jest.fn();
describe('Job log controllers', () => {
let wrapper;
@@ -19,14 +24,30 @@ describe('Job log controllers', () => {
isScrollBottomDisabled: false,
isScrollingDown: true,
isJobLogSizeVisible: true,
+ jobLog: mockJobLog,
};
- const createWrapper = (props) => {
+ const createWrapper = (props, jobLogSearch = false) => {
wrapper = mount(JobLogControllers, {
propsData: {
...defaultProps,
...props,
},
+ provide: {
+ glFeatures: {
+ jobLogSearch,
+ },
+ },
+ data() {
+ return {
+ searchTerm: '82',
+ };
+ },
+ mocks: {
+ $toast: {
+ show: mockToastShow,
+ },
+ },
});
};
@@ -35,6 +56,8 @@ describe('Job log controllers', () => {
const findRawLinkController = () => wrapper.find('[data-testid="job-raw-link-controller"]');
const findScrollTop = () => wrapper.find('[data-testid="job-controller-scroll-top"]');
const findScrollBottom = () => wrapper.find('[data-testid="job-controller-scroll-bottom"]');
+ const findJobLogSearch = () => wrapper.findComponent(GlSearchBoxByClick);
+ const findSearchHelp = () => wrapper.findComponent(HelpPopover);
describe('Truncate information', () => {
describe('with isJobLogSizeVisible', () => {
@@ -179,4 +202,40 @@ describe('Job log controllers', () => {
});
});
});
+
+ describe('Job log search', () => {
+ describe('with feature flag off', () => {
+ it('does not display job log search', () => {
+ createWrapper();
+
+ expect(findJobLogSearch().exists()).toBe(false);
+ expect(findSearchHelp().exists()).toBe(false);
+ });
+ });
+
+ describe('with feature flag on', () => {
+ beforeEach(() => {
+ createWrapper({}, { jobLogSearch: true });
+ });
+
+ it('displays job log search', () => {
+ expect(findJobLogSearch().exists()).toBe(true);
+ expect(findSearchHelp().exists()).toBe(true);
+ });
+
+ it('emits search results', () => {
+ const expectedSearchResults = [[[mockJobLog[6].lines[1], mockJobLog[6].lines[2]]]];
+
+ findJobLogSearch().vm.$emit('submit');
+
+ expect(wrapper.emitted('searchResults')).toEqual(expectedSearchResults);
+ });
+
+ it('clears search results', () => {
+ findJobLogSearch().vm.$emit('clear');
+
+ expect(wrapper.emitted('searchResults')).toEqual([[[]]]);
+ });
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
index cc9a5e4ee25..4046f0269dd 100644
--- a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
+++ b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
@@ -42,7 +42,7 @@ describe('Job Sidebar Details Container', () => {
expect(wrapper.html()).toBe('');
});
- it.each(['duration', 'erased_at', 'finished_at', 'queued', 'runner', 'coverage'])(
+ it.each(['duration', 'erased_at', 'finished_at', 'queued_at', 'runner', 'coverage'])(
'should not render %s details when missing',
async (detail) => {
await store.dispatch('receiveJobSuccess', { [detail]: undefined });
@@ -59,7 +59,7 @@ describe('Job Sidebar Details Container', () => {
['duration', 'Elapsed time: 6 seconds'],
['erased_at', 'Erased: 3 weeks ago'],
['finished_at', 'Finished: 3 weeks ago'],
- ['queued', 'Queued: 9 seconds'],
+ ['queued_duration', 'Queued: 9 seconds'],
['runner', 'Runner: #1 (ABCDEFGH) local ci runner'],
['coverage', 'Coverage: 20%'],
])('uses %s to render job-%s', async (detail, value) => {
diff --git a/spec/frontend/jobs/components/jobs_container_spec.js b/spec/frontend/jobs/components/jobs_container_spec.js
index 1cde72682a2..127570b8184 100644
--- a/spec/frontend/jobs/components/jobs_container_spec.js
+++ b/spec/frontend/jobs/components/jobs_container_spec.js
@@ -106,7 +106,7 @@ describe('Jobs List block', () => {
});
expect(findJob().text()).toBe(job.name);
- expect(findJob().text()).not.toContain(job.id);
+ expect(findJob().text()).not.toContain(job.id.toString());
});
it('renders job id when job name is not available', () => {
diff --git a/spec/frontend/jobs/components/log/collapsible_section_spec.js b/spec/frontend/jobs/components/log/collapsible_section_spec.js
index 2ab7f5fe22d..646935568b1 100644
--- a/spec/frontend/jobs/components/log/collapsible_section_spec.js
+++ b/spec/frontend/jobs/components/log/collapsible_section_spec.js
@@ -5,7 +5,6 @@ import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data'
describe('Job Log Collapsible Section', () => {
let wrapper;
- let origGon;
const jobLogEndpoint = 'jobs/335';
@@ -20,16 +19,8 @@ describe('Job Log Collapsible Section', () => {
});
};
- beforeEach(() => {
- origGon = window.gon;
-
- window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: This also works with true
- });
-
afterEach(() => {
wrapper.destroy();
-
- window.gon = origGon;
});
describe('with closed section', () => {
diff --git a/spec/frontend/jobs/components/log/line_spec.js b/spec/frontend/jobs/components/log/line_spec.js
index d184696cd1f..bf80d90e299 100644
--- a/spec/frontend/jobs/components/log/line_spec.js
+++ b/spec/frontend/jobs/components/log/line_spec.js
@@ -179,4 +179,46 @@ describe('Job Log Line', () => {
expect(findLink().exists()).toBe(false);
});
});
+
+ describe('job log search', () => {
+ const mockSearchResults = [
+ {
+ offset: 1533,
+ content: [{ text: '$ echo "82.71"', style: 'term-fg-l-green term-bold' }],
+ section: 'step-script',
+ lineNumber: 20,
+ },
+ { offset: 1560, content: [{ text: '82.71' }], section: 'step-script', lineNumber: 21 },
+ ];
+
+ it('applies highlight class to search result elements', () => {
+ createComponent({
+ line: {
+ offset: 1560,
+ content: [{ text: '82.71' }],
+ section: 'step-script',
+ lineNumber: 21,
+ },
+ path: '/root/ci-project/-/jobs/1089',
+ searchResults: mockSearchResults,
+ });
+
+ expect(wrapper.classes()).toContain('gl-bg-gray-500');
+ });
+
+ it('does not apply highlight class to search result elements', () => {
+ createComponent({
+ line: {
+ offset: 1560,
+ content: [{ text: 'docker' }],
+ section: 'step-script',
+ lineNumber: 29,
+ },
+ path: '/root/ci-project/-/jobs/1089',
+ searchResults: mockSearchResults,
+ });
+
+ expect(wrapper.classes()).not.toContain('gl-bg-gray-500');
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index 9cc56cce9b3..c933ed5c3e1 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -2,7 +2,7 @@ import { mount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import Log from '~/jobs/components/log/log.vue';
-import { logLinesParserLegacy, logLinesParser } from '~/jobs/store/utils';
+import { logLinesParser } from '~/jobs/store/utils';
import { jobLog } from './mock_data';
describe('Job Log', () => {
@@ -10,7 +10,6 @@ describe('Job Log', () => {
let actions;
let state;
let store;
- let origGon;
Vue.use(Vuex);
@@ -25,12 +24,8 @@ describe('Job Log', () => {
toggleCollapsibleLine: () => {},
};
- origGon = window.gon;
-
- window.gon = { features: { infinitelyCollapsibleSections: false } };
-
state = {
- jobLog: logLinesParserLegacy(jobLog),
+ jobLog: logLinesParser(jobLog),
jobLogEndpoint: 'jobs/id',
};
@@ -44,88 +39,6 @@ describe('Job Log', () => {
afterEach(() => {
wrapper.destroy();
-
- window.gon = origGon;
- });
-
- const findCollapsibleLine = () => wrapper.find('.collapsible-line');
-
- describe('line numbers', () => {
- it('renders a line number for each open line', () => {
- expect(wrapper.find('#L1').text()).toBe('1');
- expect(wrapper.find('#L2').text()).toBe('2');
- expect(wrapper.find('#L3').text()).toBe('3');
- });
-
- it('links to the provided path and correct line number', () => {
- expect(wrapper.find('#L1').attributes('href')).toBe(`${state.jobLogEndpoint}#L1`);
- });
- });
-
- describe('collapsible sections', () => {
- it('renders a clickable header section', () => {
- expect(findCollapsibleLine().attributes('role')).toBe('button');
- });
-
- it('renders an icon with the open state', () => {
- expect(findCollapsibleLine().find('[data-testid="chevron-lg-down-icon"]').exists()).toBe(
- true,
- );
- });
-
- describe('on click header section', () => {
- it('calls toggleCollapsibleLine', () => {
- jest.spyOn(wrapper.vm, 'toggleCollapsibleLine');
-
- findCollapsibleLine().trigger('click');
-
- expect(wrapper.vm.toggleCollapsibleLine).toHaveBeenCalled();
- });
- });
- });
-});
-
-describe('Job Log, infinitelyCollapsibleSections feature flag enabled', () => {
- let wrapper;
- let actions;
- let state;
- let store;
- let origGon;
-
- Vue.use(Vuex);
-
- const createComponent = () => {
- wrapper = mount(Log, {
- store,
- });
- };
-
- beforeEach(() => {
- actions = {
- toggleCollapsibleLine: () => {},
- };
-
- origGon = window.gon;
-
- window.gon = { features: { infinitelyCollapsibleSections: true } };
-
- state = {
- jobLog: logLinesParser(jobLog).parsedLines,
- jobLogEndpoint: 'jobs/id',
- };
-
- store = new Vuex.Store({
- actions,
- state,
- });
-
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
-
- window.gon = origGon;
});
const findCollapsibleLine = () => wrapper.find('.collapsible-line');
diff --git a/spec/frontend/jobs/components/log/mock_data.js b/spec/frontend/jobs/components/log/mock_data.js
index 3ff0bd73581..eb8c4fe8bc9 100644
--- a/spec/frontend/jobs/components/log/mock_data.js
+++ b/spec/frontend/jobs/components/log/mock_data.js
@@ -58,80 +58,6 @@ export const utilsMockData = [
},
];
-export const multipleCollapsibleSectionsMockData = [
- {
- offset: 1001,
- content: [{ text: ' on docker-auto-scale-com 8a6210b8' }],
- },
- {
- offset: 1002,
- content: [
- {
- text: 'Executing "step_script" stage of the job script',
- },
- ],
- section: 'step-script',
- section_header: true,
- },
- {
- offset: 1003,
- content: [{ text: 'sleep 60' }],
- section: 'step-script',
- },
- {
- offset: 1004,
- content: [
- {
- text:
- 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
- },
- ],
- section: 'step-script',
- },
- {
- offset: 1005,
- content: [{ text: 'executing...' }],
- section: 'step-script',
- },
- {
- offset: 1006,
- content: [{ text: '1st collapsible section' }],
- section: 'collapsible-1',
- section_header: true,
- },
- {
- offset: 1007,
- content: [
- {
- text:
- 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
- },
- ],
- section: 'collapsible-1',
- },
- {
- offset: 1008,
- content: [],
- section: 'collapsible-1',
- section_duration: '01:00',
- },
- {
- offset: 1009,
- content: [],
- section: 'step-script',
- section_duration: '10:00',
- },
-];
-
-export const backwardsCompatibilityTrace = [
- {
- offset: 2365,
- content: [],
- section: 'download-artifacts',
- section_duration: '00:01',
- },
-];
-
export const originalTrace = [
{
offset: 1,
diff --git a/spec/frontend/jobs/components/table/cells/actions_cell_spec.js b/spec/frontend/jobs/components/table/cells/actions_cell_spec.js
index 976b128532d..7cc008f332d 100644
--- a/spec/frontend/jobs/components/table/cells/actions_cell_spec.js
+++ b/spec/frontend/jobs/components/table/cells/actions_cell_spec.js
@@ -12,17 +12,12 @@ import JobRetryMutation from '~/jobs/components/table/graphql/mutations/job_retr
import JobUnscheduleMutation from '~/jobs/components/table/graphql/mutations/job_unschedule.mutation.graphql';
import JobCancelMutation from '~/jobs/components/table/graphql/mutations/job_cancel.mutation.graphql';
import {
- playableJob,
- retryableJob,
- cancelableJob,
- scheduledJob,
- cannotRetryJob,
- cannotPlayJob,
- cannotPlayScheduledJob,
- retryMutationResponse,
+ mockJobsNodes,
+ mockJobsNodesAsGuest,
playMutationResponse,
- cancelMutationResponse,
+ retryMutationResponse,
unscheduleMutationResponse,
+ cancelMutationResponse,
} from '../../../mock_data';
jest.mock('~/lib/utils/url_utility');
@@ -32,6 +27,22 @@ Vue.use(VueApollo);
describe('Job actions cell', () => {
let wrapper;
+ const findMockJob = (jobName, nodes = mockJobsNodes) => {
+ const job = nodes.find(({ name }) => name === jobName);
+ expect(job).toBeDefined(); // ensure job is present
+ return job;
+ };
+
+ const mockJob = findMockJob('build');
+ const cancelableJob = findMockJob('cancelable');
+ const playableJob = findMockJob('playable');
+ const retryableJob = findMockJob('retryable');
+ const scheduledJob = findMockJob('scheduled');
+ const jobWithArtifact = findMockJob('with_artifact');
+ const cannotPlayJob = findMockJob('playable', mockJobsNodesAsGuest);
+ const cannotRetryJob = findMockJob('retryable', mockJobsNodesAsGuest);
+ const cannotPlayScheduledJob = findMockJob('scheduled', mockJobsNodesAsGuest);
+
const findRetryButton = () => wrapper.findByTestId('retry');
const findPlayButton = () => wrapper.findByTestId('play');
const findCancelButton = () => wrapper.findByTestId('cancel-button');
@@ -55,10 +66,10 @@ describe('Job actions cell', () => {
return createMockApollo(requestHandlers);
};
- const createComponent = (jobType, requestHandlers, props = {}) => {
+ const createComponent = (job, requestHandlers, props = {}) => {
wrapper = shallowMountExtended(ActionsCell, {
propsData: {
- job: jobType,
+ job,
...props,
},
apolloProvider: createMockApolloProvider(requestHandlers),
@@ -73,15 +84,15 @@ describe('Job actions cell', () => {
});
it('displays the artifacts download button with correct link', () => {
- createComponent(playableJob);
+ createComponent(jobWithArtifact);
expect(findDownloadArtifactsButton().attributes('href')).toBe(
- playableJob.artifacts.nodes[0].downloadPath,
+ jobWithArtifact.artifacts.nodes[0].downloadPath,
);
});
it('does not display an artifacts download button', () => {
- createComponent(retryableJob);
+ createComponent(mockJob);
expect(findDownloadArtifactsButton().exists()).toBe(false);
});
@@ -101,7 +112,7 @@ describe('Job actions cell', () => {
button | action | jobType
${findPlayButton} | ${'play'} | ${playableJob}
${findRetryButton} | ${'retry'} | ${retryableJob}
- ${findDownloadArtifactsButton} | ${'download artifacts'} | ${playableJob}
+ ${findDownloadArtifactsButton} | ${'download artifacts'} | ${jobWithArtifact}
${findCancelButton} | ${'cancel'} | ${cancelableJob}
`('displays the $action button', ({ button, jobType }) => {
createComponent(jobType);
diff --git a/spec/frontend/jobs/components/table/cells/job_cell_spec.js b/spec/frontend/jobs/components/table/cells/job_cell_spec.js
index fc4e5586349..ddc196129a7 100644
--- a/spec/frontend/jobs/components/table/cells/job_cell_spec.js
+++ b/spec/frontend/jobs/components/table/cells/job_cell_spec.js
@@ -2,16 +2,22 @@ import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import JobCell from '~/jobs/components/table/cells/job_cell.vue';
-import { mockJobsInTable } from '../../../mock_data';
-
-const mockJob = mockJobsInTable[0];
-const mockJobCreatedByTag = mockJobsInTable[1];
-const mockJobLimitedAccess = mockJobsInTable[2];
-const mockStuckJob = mockJobsInTable[3];
+import { mockJobsNodes, mockJobsNodesAsGuest } from '../../../mock_data';
describe('Job Cell', () => {
let wrapper;
+ const findMockJob = (jobName, nodes = mockJobsNodes) => {
+ const job = nodes.find(({ name }) => name === jobName);
+ expect(job).toBeDefined(); // ensure job is present
+ return job;
+ };
+
+ const mockJob = findMockJob('build');
+ const jobCreatedByTag = findMockJob('created_by_tag');
+ const pendingJob = findMockJob('pending');
+ const jobAsGuest = findMockJob('build', mockJobsNodesAsGuest);
+
const findJobIdLink = () => wrapper.findByTestId('job-id-link');
const findJobIdNoLink = () => wrapper.findByTestId('job-id-limited-access');
const findJobRef = () => wrapper.findByTestId('job-ref');
@@ -23,11 +29,11 @@ describe('Job Cell', () => {
const findBadgeById = (id) => wrapper.findByTestId(id);
- const createComponent = (jobData = mockJob) => {
+ const createComponent = (job = mockJob) => {
wrapper = extendedWrapper(
shallowMount(JobCell, {
propsData: {
- job: jobData,
+ job,
},
}),
);
@@ -49,9 +55,9 @@ describe('Job Cell', () => {
});
it('display the job id with no link', () => {
- createComponent(mockJobLimitedAccess);
+ createComponent(jobAsGuest);
- const expectedJobId = `#${getIdFromGraphQLId(mockJobLimitedAccess.id)}`;
+ const expectedJobId = `#${getIdFromGraphQLId(jobAsGuest.id)}`;
expect(findJobIdNoLink().text()).toBe(expectedJobId);
expect(findJobIdNoLink().exists()).toBe(true);
@@ -75,7 +81,7 @@ describe('Job Cell', () => {
});
it('displays label icon when job is created by a tag', () => {
- createComponent(mockJobCreatedByTag);
+ createComponent(jobCreatedByTag);
expect(findLabelIcon().exists()).toBe(true);
expect(findForkIcon().exists()).toBe(false);
@@ -130,8 +136,8 @@ describe('Job Cell', () => {
expect(findStuckIcon().exists()).toBe(false);
});
- it('stuck icon is shown if job is stuck', () => {
- createComponent(mockStuckJob);
+ it('stuck icon is shown if job is pending', () => {
+ createComponent(pendingJob);
expect(findStuckIcon().exists()).toBe(true);
expect(findStuckIcon().attributes('name')).toBe('warning');
diff --git a/spec/frontend/jobs/components/table/job_table_app_spec.js b/spec/frontend/jobs/components/table/job_table_app_spec.js
index 986fba21fb9..374768c3ee4 100644
--- a/spec/frontend/jobs/components/table/job_table_app_spec.js
+++ b/spec/frontend/jobs/components/table/job_table_app_spec.js
@@ -6,7 +6,7 @@ import {
GlLoadingIcon,
} from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { s__ } from '~/locale';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -18,8 +18,8 @@ import JobsTableApp from '~/jobs/components/table/jobs_table_app.vue';
import JobsTableTabs from '~/jobs/components/table/jobs_table_tabs.vue';
import JobsFilteredSearch from '~/jobs/components/filtered_search/jobs_filtered_search.vue';
import {
- mockJobsQueryResponse,
- mockJobsQueryEmptyResponse,
+ mockJobsResponsePaginated,
+ mockJobsResponseEmpty,
mockFailedSearchToken,
} from '../../mock_data';
@@ -30,11 +30,10 @@ jest.mock('~/flash');
describe('Job table app', () => {
let wrapper;
- let jobsTableVueSearch = true;
- const successHandler = jest.fn().mockResolvedValue(mockJobsQueryResponse);
+ const successHandler = jest.fn().mockResolvedValue(mockJobsResponsePaginated);
const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
- const emptyHandler = jest.fn().mockResolvedValue(mockJobsQueryEmptyResponse);
+ const emptyHandler = jest.fn().mockResolvedValue(mockJobsResponseEmpty);
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findLoadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
@@ -66,7 +65,6 @@ describe('Job table app', () => {
},
provide: {
fullPath: projectPath,
- glFeatures: { jobsTableVueSearch },
},
apolloProvider: createMockApolloProvider(handler),
});
@@ -77,17 +75,17 @@ describe('Job table app', () => {
});
describe('loading state', () => {
- beforeEach(() => {
+ it('should display skeleton loader when loading', () => {
createComponent();
- });
- it('should display skeleton loader when loading', () => {
expect(findSkeletonLoader().exists()).toBe(true);
expect(findTable().exists()).toBe(false);
expect(findLoadingSpinner().exists()).toBe(false);
});
it('when switching tabs only the skeleton loader should show', () => {
+ createComponent();
+
findTabs().vm.$emit('fetchJobsByStatus', null);
expect(findSkeletonLoader().exists()).toBe(true);
@@ -119,24 +117,29 @@ describe('Job table app', () => {
});
describe('when infinite scrolling is triggered', () => {
- beforeEach(() => {
+ it('does not display a skeleton loader', () => {
triggerInfiniteScroll();
- });
- it('does not display a skeleton loader', () => {
expect(findSkeletonLoader().exists()).toBe(false);
});
it('handles infinite scrolling by calling fetch more', async () => {
+ triggerInfiniteScroll();
+
+ await nextTick();
+
+ const pageSize = 30;
+
expect(findLoadingSpinner().exists()).toBe(true);
await waitForPromises();
expect(findLoadingSpinner().exists()).toBe(false);
- expect(successHandler).toHaveBeenCalledWith({
- after: 'eyJpZCI6IjIzMTcifQ',
- fullPath: 'gitlab-org/gitlab',
+ expect(successHandler).toHaveBeenLastCalledWith({
+ first: pageSize,
+ fullPath: projectPath,
+ after: mockJobsResponsePaginated.data.project.jobs.pageInfo.endCursor,
});
});
});
@@ -227,13 +230,5 @@ describe('Job table app', () => {
expect(createFlash).toHaveBeenCalledWith(expectedWarning);
expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
});
-
- it('should not display filtered search', () => {
- jobsTableVueSearch = false;
-
- createComponent();
-
- expect(findFilteredSearch().exists()).toBe(false);
- });
});
});
diff --git a/spec/frontend/jobs/components/table/jobs_table_spec.js b/spec/frontend/jobs/components/table/jobs_table_spec.js
index ac8bef675f8..803df3df37f 100644
--- a/spec/frontend/jobs/components/table/jobs_table_spec.js
+++ b/spec/frontend/jobs/components/table/jobs_table_spec.js
@@ -3,7 +3,7 @@ import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import JobsTable from '~/jobs/components/table/jobs_table.vue';
import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
-import { mockJobsInTable } from '../../mock_data';
+import { mockJobsNodes } from '../../mock_data';
describe('Jobs Table', () => {
let wrapper;
@@ -19,7 +19,7 @@ describe('Jobs Table', () => {
wrapper = extendedWrapper(
mount(JobsTable, {
propsData: {
- jobs: mockJobsInTable,
+ jobs: mockJobsNodes,
...props,
},
}),
@@ -39,7 +39,7 @@ describe('Jobs Table', () => {
});
it('displays correct number of job rows', () => {
- expect(findTableRows()).toHaveLength(mockJobsInTable.length);
+ expect(findTableRows()).toHaveLength(mockJobsNodes.length);
});
it('displays job status', () => {
@@ -47,14 +47,14 @@ describe('Jobs Table', () => {
});
it('displays the job stage and name', () => {
- const firstJob = mockJobsInTable[0];
+ const firstJob = mockJobsNodes[0];
expect(findJobStage().text()).toBe(firstJob.stage.name);
expect(findJobName().text()).toBe(firstJob.name);
});
it('displays the coverage for only jobs that have coverage', () => {
- const jobsThatHaveCoverage = mockJobsInTable.filter((job) => job.coverage !== null);
+ const jobsThatHaveCoverage = mockJobsNodes.filter((job) => job.coverage !== null);
jobsThatHaveCoverage.forEach((job, index) => {
expect(findAllCoverageJobs().at(index).text()).toBe(`${job.coverage}%`);
diff --git a/spec/frontend/jobs/mock_data.js b/spec/frontend/jobs/mock_data.js
index 4676635cce0..bf238b2e39a 100644
--- a/spec/frontend/jobs/mock_data.js
+++ b/spec/frontend/jobs/mock_data.js
@@ -1,8 +1,18 @@
+import mockJobsEmpty from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.empty.json';
+import mockJobsPaginated from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.paginated.json';
+import mockJobs from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.json';
+import mockJobsAsGuest from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.as_guest.json';
import { TEST_HOST } from 'spec/test_constants';
const threeWeeksAgo = new Date();
threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+// Fixtures generated at spec/frontend/fixtures/jobs.rb
+export const mockJobsResponsePaginated = mockJobsPaginated;
+export const mockJobsResponseEmpty = mockJobsEmpty;
+export const mockJobsNodes = mockJobs.data.project.jobs.nodes;
+export const mockJobsNodesAsGuest = mockJobsAsGuest.data.project.jobs.nodes;
+
export const stages = [
{
name: 'build',
@@ -924,7 +934,7 @@ export default {
created_at: threeWeeksAgo.toISOString(),
updated_at: threeWeeksAgo.toISOString(),
finished_at: threeWeeksAgo.toISOString(),
- queued: 9.54,
+ queued_duration: 9.54,
status: {
icon: 'status_success',
text: 'passed',
@@ -1283,602 +1293,6 @@ export const mockPipelineDetached = {
},
};
-export const mockJobsInTable = [
- {
- detailedStatus: {
- icon: 'status_manual',
- label: 'manual play action',
- text: 'manual',
- tooltip: 'manual action',
- action: {
- buttonTitle: 'Trigger this manual action',
- icon: 'play',
- method: 'post',
- path: '/root/ci-project/-/jobs/2004/play',
- title: 'Play',
- __typename: 'StatusAction',
- },
- detailsPath: '/root/ci-project/-/jobs/2004',
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/2004',
- refName: 'main',
- refPath: '/root/ci-project/-/commits/main',
- tags: [],
- shortSha: '2d5d8323',
- commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/423',
- path: '/root/ci-project/-/pipelines/423',
- user: {
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'User',
- },
- __typename: 'Pipeline',
- },
- stage: { name: 'test', __typename: 'CiStage' },
- name: 'test_manual_job',
- duration: null,
- finishedAt: null,
- coverage: null,
- createdByTag: false,
- retryable: false,
- playable: true,
- cancelable: false,
- active: false,
- stuck: false,
- userPermissions: { readBuild: true, __typename: 'JobPermissions' },
- __typename: 'CiJob',
- },
- {
- detailedStatus: {
- icon: 'status_skipped',
- label: 'skipped',
- text: 'skipped',
- tooltip: 'skipped',
- action: null,
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/2021',
- refName: 'main',
- refPath: '/root/ci-project/-/commits/main',
- tags: [],
- shortSha: '2d5d8323',
- commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/425',
- path: '/root/ci-project/-/pipelines/425',
- user: {
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'User',
- },
- __typename: 'Pipeline',
- },
- stage: { name: 'test', __typename: 'CiStage' },
- name: 'coverage_job',
- duration: null,
- finishedAt: null,
- coverage: null,
- createdByTag: true,
- retryable: false,
- playable: false,
- cancelable: false,
- active: false,
- stuck: false,
- userPermissions: { readBuild: true, __typename: 'JobPermissions' },
- __typename: 'CiJob',
- },
- {
- detailedStatus: {
- icon: 'status_success',
- label: 'passed',
- text: 'passed',
- tooltip: 'passed',
- action: {
- buttonTitle: 'Retry this job',
- icon: 'retry',
- method: 'post',
- path: '/root/ci-project/-/jobs/2015/retry',
- title: 'Retry',
- __typename: 'StatusAction',
- },
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/2015',
- refName: 'main',
- refPath: '/root/ci-project/-/commits/main',
- tags: [],
- shortSha: '2d5d8323',
- commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/424',
- path: '/root/ci-project/-/pipelines/424',
- user: {
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'User',
- },
- __typename: 'Pipeline',
- },
- stage: { name: 'deploy', __typename: 'CiStage' },
- name: 'artifact_job',
- duration: 2,
- finishedAt: '2021-04-01T17:36:18Z',
- coverage: 82.71,
- createdByTag: false,
- retryable: true,
- playable: false,
- cancelable: false,
- active: false,
- stuck: false,
- userPermissions: { readBuild: false, __typename: 'JobPermissions' },
- __typename: 'CiJob',
- },
- {
- artifacts: { nodes: [], __typename: 'CiJobArtifactConnection' },
- allowFailure: false,
- status: 'PENDING',
- scheduledAt: null,
- manualJob: false,
- triggered: null,
- createdByTag: false,
- detailedStatus: {
- detailsPath: '/root/ci-project/-/jobs/2391',
- group: 'pending',
- icon: 'status_pending',
- label: 'pending',
- text: 'pending',
- tooltip: 'pending',
- action: {
- buttonTitle: 'Cancel this job',
- icon: 'cancel',
- method: 'post',
- path: '/root/ci-project/-/jobs/2391/cancel',
- title: 'Cancel',
- __typename: 'StatusAction',
- },
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/2391',
- refName: 'master',
- refPath: '/root/ci-project/-/commits/master',
- tags: [],
- shortSha: '916330b4',
- commitPath: '/root/ci-project/-/commit/916330b4fda5dae226524ceb51c756c0ed26679d',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/482',
- path: '/root/ci-project/-/pipelines/482',
- user: {
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'UserCore',
- },
- __typename: 'Pipeline',
- },
- stage: { name: 'build', __typename: 'CiStage' },
- name: 'build_job',
- duration: null,
- finishedAt: null,
- coverage: null,
- retryable: false,
- playable: false,
- cancelable: true,
- active: true,
- stuck: true,
- userPermissions: { readBuild: true, __typename: 'JobPermissions' },
- __typename: 'CiJob',
- },
-];
-
-export const mockJobsQueryResponse = {
- data: {
- project: {
- id: '1',
- jobs: {
- count: 1,
- pageInfo: {
- endCursor: 'eyJpZCI6IjIzMTcifQ',
- hasNextPage: true,
- hasPreviousPage: false,
- startCursor: 'eyJpZCI6IjIzMzYifQ',
- __typename: 'PageInfo',
- },
- nodes: [
- {
- artifacts: {
- nodes: [
- {
- downloadPath: '/root/ci-project/-/jobs/2336/artifacts/download?file_type=trace',
- fileType: 'TRACE',
- __typename: 'CiJobArtifact',
- },
- {
- downloadPath:
- '/root/ci-project/-/jobs/2336/artifacts/download?file_type=metadata',
- fileType: 'METADATA',
- __typename: 'CiJobArtifact',
- },
- {
- downloadPath: '/root/ci-project/-/jobs/2336/artifacts/download?file_type=archive',
- fileType: 'ARCHIVE',
- __typename: 'CiJobArtifact',
- },
- ],
- __typename: 'CiJobArtifactConnection',
- },
- allowFailure: false,
- status: 'SUCCESS',
- scheduledAt: null,
- manualJob: false,
- triggered: null,
- createdByTag: false,
- detailedStatus: {
- id: 'status-1',
- detailsPath: '/root/ci-project/-/jobs/2336',
- group: 'success',
- icon: 'status_success',
- label: 'passed',
- text: 'passed',
- tooltip: 'passed',
- action: {
- id: 'action-1',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- method: 'post',
- path: '/root/ci-project/-/jobs/2336/retry',
- title: 'Retry',
- __typename: 'StatusAction',
- },
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/2336',
- refName: 'main',
- refPath: '/root/ci-project/-/commits/main',
- tags: [],
- shortSha: '4408fa2a',
- commitPath: '/root/ci-project/-/commit/4408fa2a27aaadfdf42d8dda3d6a9c01ce6cad78',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/473',
- path: '/root/ci-project/-/pipelines/473',
- user: {
- id: 'user-1',
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'UserCore',
- },
- __typename: 'Pipeline',
- },
- stage: {
- id: 'stage-1',
- name: 'deploy',
- __typename: 'CiStage',
- },
- name: 'artifact_job',
- duration: 3,
- finishedAt: '2021-04-29T14:19:50Z',
- coverage: null,
- retryable: true,
- playable: false,
- cancelable: false,
- active: false,
- stuck: false,
- userPermissions: {
- readBuild: true,
- readJobArtifacts: true,
- updateBuild: true,
- __typename: 'JobPermissions',
- },
- __typename: 'CiJob',
- },
- ],
- __typename: 'CiJobConnection',
- },
- __typename: 'Project',
- },
- },
-};
-
-export const mockJobsQueryEmptyResponse = {
- data: {
- project: {
- id: '1',
- jobs: [],
- },
- },
-};
-
-export const retryableJob = {
- artifacts: {
- nodes: [
- {
- downloadPath: '/root/ci-project/-/jobs/847/artifacts/download?file_type=trace',
- fileType: 'TRACE',
- __typename: 'CiJobArtifact',
- },
- ],
- __typename: 'CiJobArtifactConnection',
- },
- allowFailure: false,
- status: 'SUCCESS',
- scheduledAt: null,
- manualJob: false,
- triggered: null,
- createdByTag: false,
- detailedStatus: {
- detailsPath: '/root/test-job-artifacts/-/jobs/1981',
- group: 'success',
- icon: 'status_success',
- label: 'passed',
- text: 'passed',
- tooltip: 'passed',
- action: {
- buttonTitle: 'Retry this job',
- icon: 'retry',
- method: 'post',
- path: '/root/test-job-artifacts/-/jobs/1981/retry',
- title: 'Retry',
- __typename: 'StatusAction',
- },
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/1981',
- refName: 'main',
- refPath: '/root/test-job-artifacts/-/commits/main',
- tags: [],
- shortSha: '75daf01b',
- commitPath: '/root/test-job-artifacts/-/commit/75daf01b465e7eab5a04a315e44660c9a17c8055',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/288',
- path: '/root/test-job-artifacts/-/pipelines/288',
- user: {
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'UserCore',
- },
- __typename: 'Pipeline',
- },
- stage: { name: 'test', __typename: 'CiStage' },
- name: 'hello_world',
- duration: 7,
- finishedAt: '2021-08-30T20:33:56Z',
- coverage: null,
- retryable: true,
- playable: false,
- cancelable: false,
- active: false,
- stuck: false,
- userPermissions: { readBuild: true, updateBuild: true, __typename: 'JobPermissions' },
- __typename: 'CiJob',
-};
-
-export const cancelableJob = {
- artifacts: {
- nodes: [],
- __typename: 'CiJobArtifactConnection',
- },
- allowFailure: false,
- status: 'PENDING',
- scheduledAt: null,
- manualJob: false,
- triggered: null,
- createdByTag: false,
- detailedStatus: {
- id: 'pending-1305-1305',
- detailsPath: '/root/lots-of-jobs-project/-/jobs/1305',
- group: 'pending',
- icon: 'status_pending',
- label: 'pending',
- text: 'pending',
- tooltip: 'pending',
- action: {
- id: 'Ci::Build-pending-1305',
- buttonTitle: 'Cancel this job',
- icon: 'cancel',
- method: 'post',
- path: '/root/lots-of-jobs-project/-/jobs/1305/cancel',
- title: 'Cancel',
- __typename: 'StatusAction',
- },
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/1305',
- refName: 'main',
- refPath: '/root/lots-of-jobs-project/-/commits/main',
- tags: [],
- shortSha: '750605f2',
- commitPath: '/root/lots-of-jobs-project/-/commit/750605f29530778cf0912779eba6d073128962a5',
- stage: {
- id: 'gid://gitlab/Ci::Stage/181',
- name: 'deploy',
- __typename: 'CiStage',
- },
- name: 'job_212',
- duration: null,
- finishedAt: null,
- coverage: null,
- retryable: false,
- playable: false,
- cancelable: true,
- active: true,
- stuck: false,
- userPermissions: {
- readBuild: true,
- readJobArtifacts: true,
- updateBuild: true,
- __typename: 'JobPermissions',
- },
- __typename: 'CiJob',
-};
-
-export const cannotRetryJob = {
- ...retryableJob,
- userPermissions: { readBuild: true, updateBuild: false, __typename: 'JobPermissions' },
-};
-
-export const playableJob = {
- artifacts: {
- nodes: [
- {
- downloadPath: '/root/ci-project/-/jobs/621/artifacts/download?file_type=archive',
- fileType: 'ARCHIVE',
- __typename: 'CiJobArtifact',
- },
- {
- downloadPath: '/root/ci-project/-/jobs/621/artifacts/download?file_type=metadata',
- fileType: 'METADATA',
- __typename: 'CiJobArtifact',
- },
- {
- downloadPath: '/root/ci-project/-/jobs/621/artifacts/download?file_type=trace',
- fileType: 'TRACE',
- __typename: 'CiJobArtifact',
- },
- ],
- __typename: 'CiJobArtifactConnection',
- },
- allowFailure: false,
- status: 'SUCCESS',
- scheduledAt: null,
- manualJob: true,
- triggered: null,
- createdByTag: false,
- detailedStatus: {
- detailsPath: '/root/test-job-artifacts/-/jobs/1982',
- group: 'success',
- icon: 'status_success',
- label: 'manual play action',
- text: 'passed',
- tooltip: 'passed',
- action: {
- buttonTitle: 'Trigger this manual action',
- icon: 'play',
- method: 'post',
- path: '/root/test-job-artifacts/-/jobs/1982/play',
- title: 'Play',
- __typename: 'StatusAction',
- },
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/1982',
- refName: 'main',
- refPath: '/root/test-job-artifacts/-/commits/main',
- tags: [],
- shortSha: '75daf01b',
- commitPath: '/root/test-job-artifacts/-/commit/75daf01b465e7eab5a04a315e44660c9a17c8055',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/288',
- path: '/root/test-job-artifacts/-/pipelines/288',
- user: {
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'UserCore',
- },
- __typename: 'Pipeline',
- },
- stage: { name: 'test', __typename: 'CiStage' },
- name: 'hello_world_delayed',
- duration: 6,
- finishedAt: '2021-08-30T20:36:12Z',
- coverage: null,
- retryable: true,
- playable: true,
- cancelable: false,
- active: false,
- stuck: false,
- userPermissions: {
- readBuild: true,
- readJobArtifacts: true,
- updateBuild: true,
- __typename: 'JobPermissions',
- },
- __typename: 'CiJob',
-};
-
-export const cannotPlayJob = {
- ...playableJob,
- userPermissions: {
- readBuild: true,
- readJobArtifacts: true,
- updateBuild: false,
- __typename: 'JobPermissions',
- },
-};
-
-export const scheduledJob = {
- artifacts: { nodes: [], __typename: 'CiJobArtifactConnection' },
- allowFailure: false,
- status: 'SCHEDULED',
- scheduledAt: '2021-08-31T22:36:05Z',
- manualJob: true,
- triggered: null,
- createdByTag: false,
- detailedStatus: {
- detailsPath: '/root/test-job-artifacts/-/jobs/1986',
- group: 'scheduled',
- icon: 'status_scheduled',
- label: 'unschedule action',
- text: 'delayed',
- tooltip: 'delayed manual action (%{remainingTime})',
- action: {
- buttonTitle: 'Unschedule job',
- icon: 'time-out',
- method: 'post',
- path: '/root/test-job-artifacts/-/jobs/1986/unschedule',
- title: 'Unschedule',
- __typename: 'StatusAction',
- },
- __typename: 'DetailedStatus',
- },
- id: 'gid://gitlab/Ci::Build/1986',
- refName: 'main',
- refPath: '/root/test-job-artifacts/-/commits/main',
- tags: [],
- shortSha: '75daf01b',
- commitPath: '/root/test-job-artifacts/-/commit/75daf01b465e7eab5a04a315e44660c9a17c8055',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/290',
- path: '/root/test-job-artifacts/-/pipelines/290',
- user: {
- webPath: '/root',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- __typename: 'UserCore',
- },
- __typename: 'Pipeline',
- },
- stage: { name: 'test', __typename: 'CiStage' },
- name: 'hello_world_delayed',
- duration: null,
- finishedAt: null,
- coverage: null,
- retryable: false,
- playable: true,
- cancelable: false,
- active: false,
- stuck: false,
- userPermissions: { readBuild: true, updateBuild: true, __typename: 'JobPermissions' },
- __typename: 'CiJob',
-};
-
-export const cannotPlayScheduledJob = {
- ...scheduledJob,
- userPermissions: {
- readBuild: true,
- readJobArtifacts: true,
- updateBuild: false,
- __typename: 'JobPermissions',
- },
-};
-
export const CIJobConnectionIncomingCache = {
__typename: 'CiJobConnection',
pageInfo: {
@@ -2000,3 +1414,167 @@ export const unscheduleMutationResponse = {
},
},
};
+
+export const mockJobLog = [
+ { offset: 0, content: [{ text: 'Running with gitlab-runner 15.0.0 (febb2a09)' }], lineNumber: 0 },
+ { offset: 54, content: [{ text: ' on colima-docker EwM9WzgD' }], lineNumber: 1 },
+ {
+ isClosed: false,
+ isHeader: true,
+ line: {
+ offset: 91,
+ content: [{ text: 'Resolving secrets', style: 'term-fg-l-cyan term-bold' }],
+ section: 'resolve-secrets',
+ section_header: true,
+ lineNumber: 2,
+ section_duration: '00:00',
+ },
+ lines: [],
+ },
+ {
+ isClosed: false,
+ isHeader: true,
+ line: {
+ offset: 218,
+ content: [{ text: 'Preparing the "docker" executor', style: 'term-fg-l-cyan term-bold' }],
+ section: 'prepare-executor',
+ section_header: true,
+ lineNumber: 4,
+ section_duration: '00:01',
+ },
+ lines: [
+ {
+ offset: 317,
+ content: [{ text: 'Using Docker executor with image ruby:2.7 ...' }],
+ section: 'prepare-executor',
+ lineNumber: 5,
+ },
+ {
+ offset: 372,
+ content: [{ text: 'Pulling docker image ruby:2.7 ...' }],
+ section: 'prepare-executor',
+ lineNumber: 6,
+ },
+ {
+ offset: 415,
+ content: [
+ {
+ text:
+ 'Using docker image sha256:55106bf6ba7f452c38d01ea760affc6ceb67d4b60068ffadab98d1b7b007668c for ruby:2.7 with digest ruby@sha256:23d08a4bae1a12ee3fce017f83204fcf9a02243443e4a516e65e5ff73810a449 ...',
+ },
+ ],
+ section: 'prepare-executor',
+ lineNumber: 7,
+ },
+ ],
+ },
+ {
+ isClosed: false,
+ isHeader: true,
+ line: {
+ offset: 665,
+ content: [{ text: 'Preparing environment', style: 'term-fg-l-cyan term-bold' }],
+ section: 'prepare-script',
+ section_header: true,
+ lineNumber: 9,
+ section_duration: '00:01',
+ },
+ lines: [
+ {
+ offset: 752,
+ content: [
+ { text: 'Running on runner-ewm9wzgd-project-20-concurrent-0 via 8ea689ec6969...' },
+ ],
+ section: 'prepare-script',
+ lineNumber: 10,
+ },
+ ],
+ },
+ {
+ isClosed: false,
+ isHeader: true,
+ line: {
+ offset: 865,
+ content: [{ text: 'Getting source from Git repository', style: 'term-fg-l-cyan term-bold' }],
+ section: 'get-sources',
+ section_header: true,
+ lineNumber: 12,
+ section_duration: '00:01',
+ },
+ lines: [
+ {
+ offset: 962,
+ content: [
+ {
+ text: 'Fetching changes with git depth set to 20...',
+ style: 'term-fg-l-green term-bold',
+ },
+ ],
+ section: 'get-sources',
+ lineNumber: 13,
+ },
+ {
+ offset: 1019,
+ content: [
+ { text: 'Reinitialized existing Git repository in /builds/root/ci-project/.git/' },
+ ],
+ section: 'get-sources',
+ lineNumber: 14,
+ },
+ {
+ offset: 1090,
+ content: [{ text: 'Checking out e0f63d76 as main...', style: 'term-fg-l-green term-bold' }],
+ section: 'get-sources',
+ lineNumber: 15,
+ },
+ {
+ offset: 1136,
+ content: [{ text: 'Skipping Git submodules setup', style: 'term-fg-l-green term-bold' }],
+ section: 'get-sources',
+ lineNumber: 16,
+ },
+ ],
+ },
+ {
+ isClosed: false,
+ isHeader: true,
+ line: {
+ offset: 1217,
+ content: [
+ {
+ text: 'Executing "step_script" stage of the job script',
+ style: 'term-fg-l-cyan term-bold',
+ },
+ ],
+ section: 'step-script',
+ section_header: true,
+ lineNumber: 18,
+ section_duration: '00:00',
+ },
+ lines: [
+ {
+ offset: 1327,
+ content: [
+ {
+ text:
+ 'Using docker image sha256:55106bf6ba7f452c38d01ea760affc6ceb67d4b60068ffadab98d1b7b007668c for ruby:2.7 with digest ruby@sha256:23d08a4bae1a12ee3fce017f83204fcf9a02243443e4a516e65e5ff73810a449 ...',
+ },
+ ],
+ section: 'step-script',
+ lineNumber: 19,
+ },
+ {
+ offset: 1533,
+ content: [{ text: '$ echo "82.71"', style: 'term-fg-l-green term-bold' }],
+ section: 'step-script',
+ lineNumber: 20,
+ },
+ { offset: 1560, content: [{ text: '82.71' }], section: 'step-script', lineNumber: 21 },
+ ],
+ },
+ {
+ offset: 1605,
+ content: [{ text: 'Job succeeded', style: 'term-fg-l-green term-bold' }],
+ lineNumber: 23,
+ },
+];
diff --git a/spec/frontend/jobs/store/mutations_spec.js b/spec/frontend/jobs/store/mutations_spec.js
index b73aa8abf4e..ea1ec383d6e 100644
--- a/spec/frontend/jobs/store/mutations_spec.js
+++ b/spec/frontend/jobs/store/mutations_spec.js
@@ -4,21 +4,12 @@ import state from '~/jobs/store/state';
describe('Jobs Store Mutations', () => {
let stateCopy;
- let origGon;
const html =
'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
beforeEach(() => {
stateCopy = state();
-
- origGon = window.gon;
-
- window.gon = { features: { infinitelyCollapsibleSections: false } };
- });
-
- afterEach(() => {
- window.gon = origGon;
});
describe('SET_JOB_ENDPOINT', () => {
@@ -276,88 +267,3 @@ describe('Jobs Store Mutations', () => {
});
});
});
-
-describe('Job Store mutations, feature flag ON', () => {
- let stateCopy;
- let origGon;
-
- const html =
- 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
-
- beforeEach(() => {
- stateCopy = state();
-
- origGon = window.gon;
-
- window.gon = { features: { infinitelyCollapsibleSections: true } };
- });
-
- afterEach(() => {
- window.gon = origGon;
- });
-
- describe('RECEIVE_JOB_LOG_SUCCESS', () => {
- describe('with new job log', () => {
- describe('log.lines', () => {
- describe('when append is true', () => {
- it('sets the parsed log ', () => {
- mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
- append: true,
- size: 511846,
- complete: true,
- lines: [
- {
- offset: 1,
- content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
- },
- ],
- });
-
- expect(stateCopy.jobLog).toEqual([
- {
- offset: 1,
- content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
- lineNumber: 1,
- },
- ]);
- });
- });
-
- describe('when lines are defined', () => {
- it('sets the parsed log ', () => {
- mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
- append: false,
- size: 511846,
- complete: true,
- lines: [
- { offset: 0, content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }] },
- ],
- });
-
- expect(stateCopy.jobLog).toEqual([
- {
- offset: 0,
- content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
- lineNumber: 1,
- },
- ]);
- });
- });
-
- describe('when lines are null', () => {
- it('sets the default value', () => {
- mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
- append: true,
- html,
- size: 511846,
- complete: false,
- lines: null,
- });
-
- expect(stateCopy.jobLog).toEqual([]);
- });
- });
- });
- });
- });
-});
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/jobs/store/utils_spec.js
index 92ac33c8792..9458c2184f5 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/jobs/store/utils_spec.js
@@ -1,6 +1,5 @@
import {
logLinesParser,
- logLinesParserLegacy,
updateIncrementalJobLog,
parseHeaderLine,
parseLine,
@@ -18,8 +17,6 @@ import {
headerTraceIncremental,
collapsibleTrace,
collapsibleTraceIncremental,
- multipleCollapsibleSectionsMockData,
- backwardsCompatibilityTrace,
} from '../components/log/mock_data';
describe('Jobs Store Utils', () => {
@@ -178,11 +175,11 @@ describe('Jobs Store Utils', () => {
expect(isCollapsibleSection()).toEqual(false);
});
});
- describe('logLinesParserLegacy', () => {
+ describe('logLinesParser', () => {
let result;
beforeEach(() => {
- result = logLinesParserLegacy(utilsMockData);
+ result = logLinesParser(utilsMockData);
});
describe('regular line', () => {
@@ -219,102 +216,6 @@ describe('Jobs Store Utils', () => {
});
});
- describe('logLinesParser', () => {
- let result;
-
- beforeEach(() => {
- result = logLinesParser(utilsMockData);
- });
-
- describe('regular line', () => {
- it('adds a lineNumber property with correct index', () => {
- expect(result.parsedLines[0].lineNumber).toEqual(1);
- expect(result.parsedLines[1].line.lineNumber).toEqual(2);
- });
- });
-
- describe('collapsible section', () => {
- it('adds a `isClosed` property', () => {
- expect(result.parsedLines[1].isClosed).toEqual(false);
- });
-
- it('adds a `isHeader` property', () => {
- expect(result.parsedLines[1].isHeader).toEqual(true);
- });
-
- it('creates a lines array property with the content of the collapsible section', () => {
- expect(result.parsedLines[1].lines.length).toEqual(2);
- expect(result.parsedLines[1].lines[0].content).toEqual(utilsMockData[2].content);
- expect(result.parsedLines[1].lines[1].content).toEqual(utilsMockData[3].content);
- });
- });
-
- describe('section duration', () => {
- it('adds the section information to the header section', () => {
- expect(result.parsedLines[1].line.section_duration).toEqual(
- utilsMockData[4].section_duration,
- );
- });
-
- it('does not add section duration as a line', () => {
- expect(result.parsedLines[1].lines.includes(utilsMockData[4])).toEqual(false);
- });
- });
-
- describe('multiple collapsible sections', () => {
- beforeEach(() => {
- result = logLinesParser(multipleCollapsibleSectionsMockData);
- });
-
- it('should contain a section inside another section', () => {
- const innerSection = [
- {
- isClosed: false,
- isHeader: true,
- line: {
- content: [{ text: '1st collapsible section' }],
- lineNumber: 6,
- offset: 1006,
- section: 'collapsible-1',
- section_duration: '01:00',
- section_header: true,
- },
- lines: [
- {
- content: [
- {
- text:
- 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
- },
- ],
- lineNumber: 7,
- offset: 1007,
- section: 'collapsible-1',
- },
- ],
- },
- ];
-
- expect(result.parsedLines[1].lines).toEqual(expect.arrayContaining(innerSection));
- });
- });
-
- describe('backwards compatibility', () => {
- beforeEach(() => {
- result = logLinesParser(backwardsCompatibilityTrace);
- });
-
- it('should return an object with a parsedLines prop', () => {
- expect(result).toEqual(
- expect.objectContaining({
- parsedLines: expect.any(Array),
- }),
- );
- expect(result.parsedLines).toHaveLength(1);
- });
- });
- });
-
describe('findOffsetAndRemove', () => {
describe('when last item is header', () => {
const existingLog = [
@@ -490,7 +391,7 @@ describe('Jobs Store Utils', () => {
describe('updateIncrementalJobLog', () => {
describe('without repeated section', () => {
it('concats and parses both arrays', () => {
- const oldLog = logLinesParserLegacy(originalTrace);
+ const oldLog = logLinesParser(originalTrace);
const result = updateIncrementalJobLog(regularIncremental, oldLog);
expect(result).toEqual([
@@ -518,7 +419,7 @@ describe('Jobs Store Utils', () => {
describe('with regular line repeated offset', () => {
it('updates the last line and formats with the incremental part', () => {
- const oldLog = logLinesParserLegacy(originalTrace);
+ const oldLog = logLinesParser(originalTrace);
const result = updateIncrementalJobLog(regularIncrementalRepeated, oldLog);
expect(result).toEqual([
@@ -537,7 +438,7 @@ describe('Jobs Store Utils', () => {
describe('with header line repeated', () => {
it('updates the header line and formats with the incremental part', () => {
- const oldLog = logLinesParserLegacy(headerTrace);
+ const oldLog = logLinesParser(headerTrace);
const result = updateIncrementalJobLog(headerTraceIncremental, oldLog);
expect(result).toEqual([
@@ -563,7 +464,7 @@ describe('Jobs Store Utils', () => {
describe('with collapsible line repeated', () => {
it('updates the collapsible line and formats with the incremental part', () => {
- const oldLog = logLinesParserLegacy(collapsibleTrace);
+ const oldLog = logLinesParser(collapsibleTrace);
const result = updateIncrementalJobLog(collapsibleTraceIncremental, oldLog);
expect(result).toEqual([
diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js
index 34325dad6a1..b585c69e911 100644
--- a/spec/frontend/lib/dompurify_spec.js
+++ b/spec/frontend/lib/dompurify_spec.js
@@ -34,6 +34,17 @@ const unsafeUrls = [
`${absoluteGon.sprite_file_icons}/../../https://evil.url`,
];
+/* eslint-disable no-script-url */
+const invalidProtocolUrls = [
+ 'javascript:alert(1)',
+ 'jAvascript:alert(1)',
+ 'data:text/html,<script>alert(1);</script>',
+ ' javascript:',
+ 'javascript :',
+];
+/* eslint-enable no-script-url */
+const validProtocolUrls = ['slack://open', 'x-devonthink-item://90909', 'x-devonthink-item:90909'];
+
const forbiddenDataAttrs = ['data-remote', 'data-url', 'data-type', 'data-method'];
const acceptedDataAttrs = ['data-random', 'data-custom'];
@@ -150,4 +161,16 @@ describe('~/lib/dompurify', () => {
expect(sanitize(htmlHref)).toBe(`<a ${attrWithValue}>hello</a>`);
});
});
+
+ describe('with non-http links', () => {
+ it.each(validProtocolUrls)('should allow %s', (url) => {
+ const html = `<a href="${url}">internal link</a>`;
+ expect(sanitize(html)).toBe(`<a href="${url}">internal link</a>`);
+ });
+
+ it.each(invalidProtocolUrls)('should not allow %s', (url) => {
+ const html = `<a href="${url}">internal link</a>`;
+ expect(sanitize(html)).toBe(`<a>internal link</a>`);
+ });
+ });
});
diff --git a/spec/frontend/lib/gfm/index_spec.js b/spec/frontend/lib/gfm/index_spec.js
index 7aab0072364..b722315d63a 100644
--- a/spec/frontend/lib/gfm/index_spec.js
+++ b/spec/frontend/lib/gfm/index_spec.js
@@ -1,11 +1,12 @@
import { render } from '~/lib/gfm';
describe('gfm', () => {
- const markdownToAST = async (markdown) => {
+ const markdownToAST = async (markdown, skipRendering = []) => {
let result;
await render({
markdown,
+ skipRendering,
renderer: (tree) => {
result = tree;
},
@@ -58,36 +59,62 @@ describe('gfm', () => {
expect(result).toEqual(rendered);
});
- it('transforms footnotes into footnotedefinition and footnotereference tags', async () => {
- const result = await markdownToAST(
- `footnote reference [^footnote]
+ describe('when skipping the rendering of footnote reference and definition nodes', () => {
+ it('transforms footnotes into footnotedefinition and footnotereference tags', async () => {
+ const result = await markdownToAST(
+ `footnote reference [^footnote]
[^footnote]: Footnote definition`,
- );
+ ['footnoteReference', 'footnoteDefinition'],
+ );
- expectInRoot(
- result,
- expect.objectContaining({
- children: expect.arrayContaining([
- expect.objectContaining({
- type: 'element',
- tagName: 'footnotereference',
- properties: {
- identifier: 'footnote',
- label: 'footnote',
- },
- }),
- ]),
- }),
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ children: expect.arrayContaining([
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'footnotereference',
+ properties: {
+ identifier: 'footnote',
+ label: 'footnote',
+ },
+ }),
+ ]),
+ }),
+ );
+
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ tagName: 'footnotedefinition',
+ properties: {
+ identifier: 'footnote',
+ label: 'footnote',
+ },
+ }),
+ );
+ });
+ });
+ });
+
+ describe('when skipping the rendering of code blocks', () => {
+ it('transforms code nodes into codeblock html tags', async () => {
+ const result = await markdownToAST(
+ `
+\`\`\`javascript
+console.log('Hola');
+\`\`\`\
+ `,
+ ['code'],
);
expectInRoot(
result,
expect.objectContaining({
- tagName: 'footnotedefinition',
+ tagName: 'codeblock',
properties: {
- identifier: 'footnote',
- label: 'footnote',
+ language: 'javascript',
},
}),
);
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 8e499844406..7cf101a5e59 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -88,6 +88,28 @@ describe('common_utils', () => {
expectGetElementIdToHaveBeenCalledWith('user-content-definição');
});
+ it(`does not scroll when ${commonUtils.NO_SCROLL_TO_HASH_CLASS} is set on target`, () => {
+ jest.spyOn(window, 'scrollBy');
+
+ document.body.innerHTML += `
+ <div id="parent">
+ <a href="#test">Link</a>
+ <div style="height: 2000px;"></div>
+ <div id="test" style="height: 2000px;" class="${commonUtils.NO_SCROLL_TO_HASH_CLASS}"></div>
+ </div>
+ `;
+
+ window.history.pushState({}, null, '#test');
+ commonUtils.handleLocationHash();
+ jest.runOnlyPendingTimers();
+
+ try {
+ expect(window.scrollBy).not.toHaveBeenCalled();
+ } finally {
+ document.getElementById('parent').remove();
+ }
+ });
+
it('scrolls element into view', () => {
document.body.innerHTML += `
<div id="parent">
diff --git a/spec/frontend/lib/utils/navigation_utility_spec.js b/spec/frontend/lib/utils/navigation_utility_spec.js
index 632a8904578..6d3a871eb33 100644
--- a/spec/frontend/lib/utils/navigation_utility_spec.js
+++ b/spec/frontend/lib/utils/navigation_utility_spec.js
@@ -81,8 +81,6 @@ describe('initPrefetchLinks', () => {
const mouseOverEvent = new Event('mouseover');
beforeEach(() => {
- jest.useFakeTimers();
-
jest.spyOn(global, 'setTimeout');
jest.spyOn(newLink, 'removeEventListener');
});
diff --git a/spec/frontend/lib/utils/rails_ujs_spec.js b/spec/frontend/lib/utils/rails_ujs_spec.js
index 00c29b72e73..c10301523c9 100644
--- a/spec/frontend/lib/utils/rails_ujs_spec.js
+++ b/spec/frontend/lib/utils/rails_ujs_spec.js
@@ -8,7 +8,7 @@ beforeAll(async () => {
// that jQuery isn't available *before* we import @rails/ujs.
delete global.jQuery;
- const { initRails } = await import('~/lib/utils/rails_ujs.js');
+ const { initRails } = await import('~/lib/utils/rails_ujs');
initRails();
});
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 9570d2a831c..8e31fc792c5 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -384,4 +384,17 @@ describe('text_utility', () => {
);
});
});
+
+ describe('limitedCounterWithDelimiter', () => {
+ it('returns 1,000+ for count greater than 1000', () => {
+ const expectedOutput = '1,000+';
+
+ expect(textUtils.limitedCounterWithDelimiter(1001)).toBe(expectedOutput);
+ expect(textUtils.limitedCounterWithDelimiter(2300)).toBe(expectedOutput);
+ });
+
+ it('returns exact number for count less than 1000', () => {
+ expect(textUtils.limitedCounterWithDelimiter(120)).toBe(120);
+ });
+ });
});
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
deleted file mode 100644
index 84dc0bdf6cd..00000000000
--- a/spec/frontend/logs/components/environment_logs_spec.js
+++ /dev/null
@@ -1,370 +0,0 @@
-import { GlSprintf, GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { scrollDown } from '~/lib/utils/scroll_utils';
-import EnvironmentLogs from '~/logs/components/environment_logs.vue';
-
-import { createStore } from '~/logs/stores';
-import {
- mockEnvName,
- mockEnvironments,
- mockPods,
- mockLogsResult,
- mockTrace,
- mockEnvironmentsEndpoint,
- mockDocumentationPath,
-} from '../mock_data';
-
-jest.mock('~/lib/utils/scroll_utils');
-
-const module = 'environmentLogs';
-
-jest.mock('lodash/throttle', () =>
- jest.fn((func) => {
- return func;
- }),
-);
-
-describe('EnvironmentLogs', () => {
- let store;
- let dispatch;
- let wrapper;
- let state;
-
- const propsData = {
- environmentName: mockEnvName,
- environmentsPath: mockEnvironmentsEndpoint,
- clusterApplicationsDocumentationPath: mockDocumentationPath,
- clustersPath: '/gitlab-org',
- };
-
- const updateControlBtnsMock = jest.fn();
- const LogControlButtonsStub = {
- template: '<div/>',
- methods: {
- update: updateControlBtnsMock,
- },
- props: {
- scrollDownButtonDisabled: false,
- },
- };
-
- const findEnvironmentsDropdown = () => wrapper.find('.js-environments-dropdown');
-
- const findSimpleFilters = () => wrapper.find({ ref: 'log-simple-filters' });
- const findAdvancedFilters = () => wrapper.find({ ref: 'log-advanced-filters' });
- const findElasticsearchNotice = () => wrapper.find({ ref: 'elasticsearchNotice' });
- const findLogControlButtons = () => wrapper.find(LogControlButtonsStub);
-
- const findInfiniteScroll = () => wrapper.find({ ref: 'infiniteScroll' });
- const findLogTrace = () => wrapper.find({ ref: 'logTrace' });
- const findLogFooter = () => wrapper.find({ ref: 'logFooter' });
- const getInfiniteScrollAttr = (attr) => parseInt(findInfiniteScroll().attributes(attr), 10);
-
- const mockSetInitData = () => {
- state.pods.options = mockPods;
- state.environments.current = mockEnvName;
- [state.pods.current] = state.pods.options;
-
- state.logs.lines = [];
- };
-
- const mockShowPodLogs = () => {
- state.pods.options = mockPods;
- [state.pods.current] = mockPods;
-
- state.logs.lines = mockLogsResult;
- };
-
- const mockFetchEnvs = () => {
- state.environments.options = mockEnvironments;
- };
-
- const initWrapper = () => {
- wrapper = shallowMount(EnvironmentLogs, {
- propsData,
- store,
- stubs: {
- LogControlButtons: LogControlButtonsStub,
- GlInfiniteScroll: {
- name: 'gl-infinite-scroll',
- template: `
- <div>
- <slot name="header"></slot>
- <slot name="items"></slot>
- <slot></slot>
- </div>
- `,
- },
- GlSprintf,
- },
- });
- };
-
- beforeEach(() => {
- store = createStore();
- state = store.state.environmentLogs;
-
- jest.spyOn(store, 'dispatch').mockResolvedValue();
-
- dispatch = store.dispatch;
- });
-
- afterEach(() => {
- store.dispatch.mockReset();
-
- if (wrapper) {
- wrapper.destroy();
- }
- });
-
- it('displays UI elements', () => {
- initWrapper();
-
- expect(findEnvironmentsDropdown().is(GlDropdown)).toBe(true);
- expect(findSimpleFilters().exists()).toBe(true);
- expect(findLogControlButtons().exists()).toBe(true);
-
- expect(findInfiniteScroll().exists()).toBe(true);
- expect(findLogTrace().exists()).toBe(true);
- });
-
- it('mounted inits data', () => {
- initWrapper();
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/setInitData`, {
- timeRange: expect.objectContaining({
- default: true,
- }),
- environmentName: mockEnvName,
- podName: null,
- });
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/fetchEnvironments`, mockEnvironmentsEndpoint);
- });
-
- describe('loading state', () => {
- beforeEach(() => {
- state.pods.options = [];
-
- state.logs.lines = [];
- state.logs.isLoading = true;
-
- state.environments = {
- options: [],
- isLoading: true,
- };
-
- initWrapper();
- });
-
- it('does not display an alert to upgrade to ES', () => {
- expect(findElasticsearchNotice().exists()).toBe(false);
- });
-
- it('displays a disabled environments dropdown', () => {
- expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
- expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
- });
-
- it('does not update buttons state', () => {
- expect(updateControlBtnsMock).not.toHaveBeenCalled();
- });
-
- it('shows an infinite scroll with no content', () => {
- expect(getInfiniteScrollAttr('fetched-items')).toBe(0);
- });
-
- it('shows an infinite scroll container with no set max-height ', () => {
- expect(findInfiniteScroll().attributes('max-list-height')).toBeUndefined();
- });
-
- it('shows a logs trace', () => {
- expect(findLogTrace().text()).toBe('');
- expect(findLogTrace().find('.js-build-loader-animation').isVisible()).toBe(true);
- });
- });
-
- describe('k8s environment', () => {
- beforeEach(() => {
- state.pods.options = [];
-
- state.logs.lines = [];
- state.logs.isLoading = false;
-
- state.environments = {
- options: mockEnvironments,
- current: 'staging',
- isLoading: false,
- };
-
- initWrapper();
- });
-
- it('displays an alert to upgrade to ES', () => {
- expect(findElasticsearchNotice().exists()).toBe(true);
- });
-
- it('displays simple filters for kubernetes logs API', () => {
- expect(findSimpleFilters().exists()).toBe(true);
- expect(findAdvancedFilters().exists()).toBe(false);
- });
- });
-
- describe('state with data', () => {
- beforeEach(() => {
- dispatch.mockImplementation((actionName) => {
- if (actionName === `${module}/setInitData`) {
- mockSetInitData();
- } else if (actionName === `${module}/showPodLogs`) {
- mockShowPodLogs();
- } else if (actionName === `${module}/fetchEnvironments`) {
- mockFetchEnvs();
- mockShowPodLogs();
- }
- });
-
- initWrapper();
- });
-
- afterEach(() => {
- scrollDown.mockReset();
- updateControlBtnsMock.mockReset();
- });
-
- it('does not display an alert to upgrade to ES', () => {
- expect(findElasticsearchNotice().exists()).toBe(false);
- });
-
- it('populates environments dropdown', () => {
- const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
- expect(findEnvironmentsDropdown().props('text')).toBe(mockEnvName);
- expect(items.length).toBe(mockEnvironments.length);
- mockEnvironments.forEach((env, i) => {
- const item = items.at(i);
- expect(item.text()).toBe(env.name);
- });
- });
-
- it('dropdown has one environment selected', () => {
- const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
- mockEnvironments.forEach((env, i) => {
- const item = items.at(i);
-
- if (item.text() !== mockEnvName) {
- expect(item.find(GlDropdownItem).attributes('ischecked')).toBeFalsy();
- } else {
- expect(item.find(GlDropdownItem).attributes('ischecked')).toBeTruthy();
- }
- });
- });
-
- it('displays advanced filters for elasticsearch logs API', () => {
- expect(findSimpleFilters().exists()).toBe(false);
- expect(findAdvancedFilters().exists()).toBe(true);
- });
-
- it('shows infinite scroll with content', () => {
- expect(getInfiniteScrollAttr('fetched-items')).toBe(mockTrace.length);
- });
-
- it('populates logs trace', () => {
- const trace = findLogTrace();
- expect(trace.text().split('\n').length).toBe(mockTrace.length);
- expect(trace.text().split('\n')).toEqual(mockTrace);
- });
-
- it('populates footer', () => {
- const footer = findLogFooter().text();
-
- expect(footer).toContain(`${mockLogsResult.length} results`);
- });
-
- describe('when user clicks', () => {
- it('environment name, trace is refreshed', () => {
- const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
- const index = 1; // any env
-
- expect(dispatch).not.toHaveBeenCalledWith(`${module}/showEnvironment`, expect.anything());
-
- items.at(index).vm.$emit('click');
-
- expect(dispatch).toHaveBeenCalledWith(
- `${module}/showEnvironment`,
- mockEnvironments[index].name,
- );
- });
-
- it('refresh button, trace is refreshed', () => {
- expect(dispatch).not.toHaveBeenCalledWith(`${module}/refreshPodLogs`, undefined);
-
- findLogControlButtons().vm.$emit('refresh');
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/refreshPodLogs`, undefined);
- });
- });
- });
-
- describe('listeners', () => {
- beforeEach(() => {
- initWrapper();
- });
-
- it('attaches listeners in components', () => {
- expect(findInfiniteScroll().vm.$listeners).toEqual({
- topReached: expect.any(Function),
- scroll: expect.any(Function),
- });
- });
-
- it('`topReached` when not loading', () => {
- expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
-
- findInfiniteScroll().vm.$emit('topReached');
-
- expect(store.dispatch).toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
- });
-
- it('`topReached` does not fetches more logs when already loading', () => {
- state.logs.isLoading = true;
- findInfiniteScroll().vm.$emit('topReached');
-
- expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
- });
-
- it('`topReached` fetches more logs', () => {
- state.logs.isLoading = true;
- findInfiniteScroll().vm.$emit('topReached');
-
- expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
- });
-
- it('`scroll` on a scrollable target results in enabled scroll buttons', async () => {
- const target = { scrollTop: 10, clientHeight: 10, scrollHeight: 21 };
-
- state.logs.isLoading = true;
- findInfiniteScroll().vm.$emit('scroll', { target });
-
- await nextTick();
- expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(false);
- });
-
- it('`scroll` on a non-scrollable target in disabled scroll buttons', async () => {
- const target = { scrollTop: 10, clientHeight: 10, scrollHeight: 20 };
-
- state.logs.isLoading = true;
- findInfiniteScroll().vm.$emit('scroll', { target });
-
- await nextTick();
- expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(true);
- });
-
- it('`scroll` on no target results in disabled scroll buttons', async () => {
- state.logs.isLoading = true;
- findInfiniteScroll().vm.$emit('scroll', { target: undefined });
-
- await nextTick();
- expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(true);
- });
- });
-});
diff --git a/spec/frontend/logs/components/log_advanced_filters_spec.js b/spec/frontend/logs/components/log_advanced_filters_spec.js
deleted file mode 100644
index 4e4052eb4d8..00000000000
--- a/spec/frontend/logs/components/log_advanced_filters_spec.js
+++ /dev/null
@@ -1,175 +0,0 @@
-import { GlFilteredSearch } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { convertToFixedRange } from '~/lib/utils/datetime_range';
-import LogAdvancedFilters from '~/logs/components/log_advanced_filters.vue';
-import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
-import { createStore } from '~/logs/stores';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
-import { defaultTimeRange } from '~/vue_shared/constants';
-import { mockPods, mockSearch } from '../mock_data';
-
-const module = 'environmentLogs';
-
-describe('LogAdvancedFilters', () => {
- let store;
- let dispatch;
- let wrapper;
- let state;
-
- const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
- const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
- const getSearchToken = (type) =>
- findFilteredSearch()
- .props('availableTokens')
- .filter((token) => token.type === type)[0];
-
- const mockStateLoading = () => {
- state.timeRange.selected = defaultTimeRange;
- state.timeRange.current = convertToFixedRange(defaultTimeRange);
- state.pods.options = [];
- state.pods.current = null;
- state.logs.isLoading = true;
- };
-
- const mockStateWithData = () => {
- state.timeRange.selected = defaultTimeRange;
- state.timeRange.current = convertToFixedRange(defaultTimeRange);
- state.pods.options = mockPods;
- state.pods.current = null;
- state.logs.isLoading = false;
- };
-
- const initWrapper = (propsData = {}) => {
- wrapper = shallowMount(LogAdvancedFilters, {
- propsData: {
- ...propsData,
- },
- store,
- });
- };
-
- beforeEach(() => {
- store = createStore();
- state = store.state.environmentLogs;
-
- jest.spyOn(store, 'dispatch').mockResolvedValue();
-
- dispatch = store.dispatch;
- });
-
- afterEach(() => {
- store.dispatch.mockReset();
-
- if (wrapper) {
- wrapper.destroy();
- }
- });
-
- it('displays UI elements', () => {
- initWrapper();
-
- expect(findFilteredSearch().exists()).toBe(true);
- expect(findTimeRangePicker().exists()).toBe(true);
- });
-
- it('displays search tokens', () => {
- initWrapper();
-
- expect(getSearchToken(TOKEN_TYPE_POD_NAME)).toMatchObject({
- title: 'Pod name',
- unique: true,
- operators: OPERATOR_IS_ONLY,
- });
- });
-
- describe('disabled state', () => {
- beforeEach(() => {
- mockStateLoading();
- initWrapper({
- disabled: true,
- });
- });
-
- it('displays disabled filters', () => {
- expect(findFilteredSearch().attributes('disabled')).toBeTruthy();
- expect(findTimeRangePicker().attributes('disabled')).toBeTruthy();
- });
- });
-
- describe('when the state is loading', () => {
- beforeEach(() => {
- mockStateLoading();
- initWrapper();
- });
-
- it('displays a disabled search', () => {
- expect(findFilteredSearch().attributes('disabled')).toBeTruthy();
- });
-
- it('displays an enable date filter', () => {
- expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
- });
-
- it('displays no pod options when no pods are available, so suggestions can be displayed', () => {
- expect(getSearchToken(TOKEN_TYPE_POD_NAME).options).toBe(null);
- expect(getSearchToken(TOKEN_TYPE_POD_NAME).loading).toBe(true);
- });
- });
-
- describe('when the state has data', () => {
- beforeEach(() => {
- mockStateWithData();
- initWrapper();
- });
-
- it('displays a single token for pods', () => {
- initWrapper();
-
- const tokens = findFilteredSearch().props('availableTokens');
-
- expect(tokens).toHaveLength(1);
- expect(tokens[0].type).toBe(TOKEN_TYPE_POD_NAME);
- });
-
- it('displays a enabled filters', () => {
- expect(findFilteredSearch().attributes('disabled')).toBeFalsy();
- expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
- });
-
- it('displays options in the pods token', () => {
- const { options } = getSearchToken(TOKEN_TYPE_POD_NAME);
-
- expect(options).toHaveLength(mockPods.length);
- });
-
- it('displays options in date time picker', () => {
- const options = findTimeRangePicker().props('options');
-
- expect(options).toEqual(expect.any(Array));
- expect(options.length).toBeGreaterThan(0);
- });
-
- describe('when the user interacts', () => {
- it('clicks on the search button, showFilteredLogs is dispatched', () => {
- findFilteredSearch().vm.$emit('submit', null);
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/showFilteredLogs`, null);
- });
-
- it('clicks on the search button, showFilteredLogs is dispatched with null', () => {
- findFilteredSearch().vm.$emit('submit', [mockSearch]);
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/showFilteredLogs`, [mockSearch]);
- });
-
- it('selects a new time range', () => {
- expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
-
- const mockRange = { start: 'START_DATE', end: 'END_DATE' };
- findTimeRangePicker().vm.$emit('input', mockRange);
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/setTimeRange`, mockRange);
- });
- });
- });
-});
diff --git a/spec/frontend/logs/components/log_control_buttons_spec.js b/spec/frontend/logs/components/log_control_buttons_spec.js
deleted file mode 100644
index e249272b87d..00000000000
--- a/spec/frontend/logs/components/log_control_buttons_spec.js
+++ /dev/null
@@ -1,88 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import LogControlButtons from '~/logs/components/log_control_buttons.vue';
-
-describe('LogControlButtons', () => {
- let wrapper;
-
- const findScrollToTop = () => wrapper.find('.js-scroll-to-top');
- const findScrollToBottom = () => wrapper.find('.js-scroll-to-bottom');
- const findRefreshBtn = () => wrapper.find('.js-refresh-log');
-
- const initWrapper = (opts) => {
- wrapper = shallowMount(LogControlButtons, {
- listeners: {
- scrollUp: () => {},
- scrollDown: () => {},
- },
- ...opts,
- });
- };
-
- afterEach(() => {
- if (wrapper) {
- wrapper.destroy();
- }
- });
-
- it('displays UI elements', () => {
- initWrapper();
-
- expect(findScrollToTop().is(GlButton)).toBe(true);
- expect(findScrollToBottom().is(GlButton)).toBe(true);
- expect(findRefreshBtn().is(GlButton)).toBe(true);
- });
-
- it('emits a `refresh` event on click on `refresh` button', async () => {
- initWrapper();
-
- // An `undefined` value means no event was emitted
- expect(wrapper.emitted('refresh')).toBe(undefined);
-
- findRefreshBtn().vm.$emit('click');
-
- await nextTick();
- expect(wrapper.emitted('refresh')).toHaveLength(1);
- });
-
- describe('when scrolling actions are enabled', () => {
- beforeEach(async () => {
- // mock scrolled to the middle of a long page
- initWrapper();
- await nextTick();
- });
-
- it('click on "scroll to top" scrolls up', () => {
- expect(findScrollToTop().attributes('disabled')).toBeUndefined();
-
- findScrollToTop().vm.$emit('click');
-
- expect(wrapper.emitted('scrollUp')).toHaveLength(1);
- });
-
- it('click on "scroll to bottom" scrolls down', () => {
- expect(findScrollToBottom().attributes('disabled')).toBeUndefined();
-
- findScrollToBottom().vm.$emit('click');
-
- expect(wrapper.emitted('scrollDown')).toHaveLength(1);
- });
- });
-
- describe('when scrolling actions are disabled', () => {
- beforeEach(async () => {
- initWrapper({ listeners: {} });
- await nextTick();
- });
-
- it('buttons are disabled', async () => {
- await nextTick();
- expect(findScrollToTop().exists()).toBe(false);
- expect(findScrollToBottom().exists()).toBe(false);
- // This should be enabled when gitlab-ui contains:
- // https://gitlab.com/gitlab-org/gitlab-ui/-/merge_requests/1149
- // expect(findScrollToBottom().is('[disabled]')).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/logs/components/log_simple_filters_spec.js b/spec/frontend/logs/components/log_simple_filters_spec.js
deleted file mode 100644
index 04ad2e03542..00000000000
--- a/spec/frontend/logs/components/log_simple_filters_spec.js
+++ /dev/null
@@ -1,134 +0,0 @@
-import { GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import LogSimpleFilters from '~/logs/components/log_simple_filters.vue';
-import { createStore } from '~/logs/stores';
-import { mockPods, mockPodName } from '../mock_data';
-
-const module = 'environmentLogs';
-
-describe('LogSimpleFilters', () => {
- let store;
- let dispatch;
- let wrapper;
- let state;
-
- const findPodsDropdown = () => wrapper.find({ ref: 'podsDropdown' });
- const findPodsNoPodsText = () => wrapper.find({ ref: 'noPodsMsg' });
- const findPodsDropdownItems = () =>
- findPodsDropdown()
- .findAll(GlDropdownItem)
- .filter((item) => !('disabled' in item.attributes()));
-
- const mockPodsLoading = () => {
- state.pods.options = [];
- state.pods.current = null;
- };
-
- const mockPodsLoaded = () => {
- state.pods.options = mockPods;
- state.pods.current = mockPodName;
- };
-
- const initWrapper = (propsData = {}) => {
- wrapper = shallowMount(LogSimpleFilters, {
- propsData: {
- ...propsData,
- },
- store,
- });
- };
-
- beforeEach(() => {
- store = createStore();
- state = store.state.environmentLogs;
-
- jest.spyOn(store, 'dispatch').mockResolvedValue();
-
- dispatch = store.dispatch;
- });
-
- afterEach(() => {
- store.dispatch.mockReset();
-
- if (wrapper) {
- wrapper.destroy();
- }
- });
-
- it('displays UI elements', () => {
- initWrapper();
-
- expect(findPodsDropdown().exists()).toBe(true);
- });
-
- describe('disabled state', () => {
- beforeEach(() => {
- mockPodsLoading();
- initWrapper({
- disabled: true,
- });
- });
-
- it('displays a disabled pods dropdown', () => {
- expect(findPodsDropdown().props('text')).toBe('No pod selected');
- expect(findPodsDropdown().attributes('disabled')).toBeTruthy();
- });
- });
-
- describe('loading state', () => {
- beforeEach(() => {
- mockPodsLoading();
- initWrapper();
- });
-
- it('displays an enabled pods dropdown', () => {
- expect(findPodsDropdown().attributes('disabled')).toBeFalsy();
- expect(findPodsDropdown().props('text')).toBe('No pod selected');
- });
-
- it('displays an empty pods dropdown', () => {
- expect(findPodsNoPodsText().exists()).toBe(true);
- expect(findPodsDropdownItems()).toHaveLength(0);
- });
- });
-
- describe('pods available state', () => {
- beforeEach(() => {
- mockPodsLoaded();
- initWrapper();
- });
-
- it('displays an enabled pods dropdown', () => {
- expect(findPodsDropdown().attributes('disabled')).toBeFalsy();
- expect(findPodsDropdown().props('text')).toBe(mockPods[0]);
- });
-
- it('displays a pods dropdown with items', () => {
- expect(findPodsNoPodsText().exists()).toBe(false);
- expect(findPodsDropdownItems()).toHaveLength(mockPods.length);
- });
-
- it('dropdown has one pod selected', () => {
- const items = findPodsDropdownItems();
- mockPods.forEach((pod, i) => {
- const item = items.at(i);
- if (item.text() !== mockPodName) {
- expect(item.find(GlDropdownItem).attributes('ischecked')).toBeFalsy();
- } else {
- expect(item.find(GlDropdownItem).attributes('ischecked')).toBeTruthy();
- }
- });
- });
-
- it('when the user clicks on a pod, showPodLogs is dispatched', () => {
- const items = findPodsDropdownItems();
- const index = 2; // any pod
-
- expect(dispatch).not.toHaveBeenCalledWith(`${module}/showPodLogs`, expect.anything());
-
- items.at(index).vm.$emit('click');
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, mockPods[index]);
- });
- });
-});
diff --git a/spec/frontend/logs/components/tokens/token_with_loading_state_spec.js b/spec/frontend/logs/components/tokens/token_with_loading_state_spec.js
deleted file mode 100644
index f667a590a36..00000000000
--- a/spec/frontend/logs/components/tokens/token_with_loading_state_spec.js
+++ /dev/null
@@ -1,71 +0,0 @@
-import { GlFilteredSearchToken, GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-
-import TokenWithLoadingState from '~/logs/components/tokens/token_with_loading_state.vue';
-
-describe('TokenWithLoadingState', () => {
- let wrapper;
-
- const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
-
- const initWrapper = (props = {}, options) => {
- wrapper = shallowMount(TokenWithLoadingState, {
- propsData: {
- cursorPosition: 'start',
- ...props,
- },
- ...options,
- });
- };
-
- beforeEach(() => {});
-
- it('passes entire config correctly', () => {
- const config = {
- icon: 'pod',
- type: 'pod',
- title: 'Pod name',
- unique: true,
- };
-
- initWrapper({ config });
-
- expect(findFilteredSearchToken().props('config')).toEqual(config);
- });
-
- describe('suggestions are replaced', () => {
- let mockNoOptsText;
- let config;
- let stubs;
-
- beforeEach(() => {
- mockNoOptsText = 'No suggestions available';
- config = {
- loading: false,
- noOptionsText: mockNoOptsText,
- };
- stubs = {
- GlFilteredSearchToken: {
- template: `<div><slot name="suggestions"></slot></div>`,
- },
- };
- });
-
- it('renders a loading icon', () => {
- config.loading = true;
-
- initWrapper({ config }, { stubs });
-
- expect(findLoadingIcon().exists()).toBe(true);
- expect(wrapper.text()).toBe('');
- });
-
- it('renders an empty results message', () => {
- initWrapper({ config }, { stubs });
-
- expect(findLoadingIcon().exists()).toBe(false);
- expect(wrapper.text()).toBe(mockNoOptsText);
- });
- });
-});
diff --git a/spec/frontend/logs/mock_data.js b/spec/frontend/logs/mock_data.js
deleted file mode 100644
index 14c8f7a2ba2..00000000000
--- a/spec/frontend/logs/mock_data.js
+++ /dev/null
@@ -1,71 +0,0 @@
-const mockProjectPath = 'root/autodevops-deploy';
-
-export const mockEnvName = 'production';
-export const mockEnvironmentsEndpoint = `${mockProjectPath}/environments.json`;
-export const mockEnvId = '99';
-export const mockDocumentationPath = '/documentation.md';
-export const mockLogsEndpoint = '/dummy_logs_path.json';
-export const mockCursor = 'MOCK_CURSOR';
-export const mockNextCursor = 'MOCK_NEXT_CURSOR';
-
-const makeMockEnvironment = (id, name, advancedQuerying) => ({
- id,
- project_path: mockProjectPath,
- name,
- logs_api_path: mockLogsEndpoint,
- enable_advanced_logs_querying: advancedQuerying,
-});
-
-export const mockEnvironment = makeMockEnvironment(mockEnvId, mockEnvName, true);
-export const mockEnvironments = [
- mockEnvironment,
- makeMockEnvironment(101, 'staging', false),
- makeMockEnvironment(102, 'review/a-feature', false),
-];
-
-export const mockPodName = 'production-764c58d697-aaaaa';
-export const mockPods = [
- mockPodName,
- 'production-764c58d697-bbbbb',
- 'production-764c58d697-ccccc',
- 'production-764c58d697-ddddd',
-];
-
-export const mockLogsResult = [
- {
- timestamp: '2019-12-13T13:43:18.2760123Z',
- message: 'log line 1',
- pod: 'foo',
- },
- {
- timestamp: '2019-12-13T13:43:18.2760123Z',
- message: 'log line A',
- pod: 'bar',
- },
- {
- timestamp: '2019-12-13T13:43:26.8420123Z',
- message: 'log line 2',
- pod: 'foo',
- },
- {
- timestamp: '2019-12-13T13:43:26.8420123Z',
- message: 'log line B',
- pod: 'bar',
- },
-];
-
-export const mockTrace = [
- 'Dec 13 13:43:18.276 | foo | log line 1',
- 'Dec 13 13:43:18.276 | bar | log line A',
- 'Dec 13 13:43:26.842 | foo | log line 2',
- 'Dec 13 13:43:26.842 | bar | log line B',
-];
-
-export const mockResponse = {
- pod_name: mockPodName,
- pods: mockPods,
- logs: mockLogsResult,
- cursor: mockNextCursor,
-};
-
-export const mockSearch = 'foo +bar';
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
deleted file mode 100644
index 46ef1500a20..00000000000
--- a/spec/frontend/logs/stores/actions_spec.js
+++ /dev/null
@@ -1,521 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import testAction from 'helpers/vuex_action_helper';
-import axios from '~/lib/utils/axios_utils';
-import { convertToFixedRange } from '~/lib/utils/datetime_range';
-import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
-import {
- setInitData,
- showFilteredLogs,
- showPodLogs,
- fetchEnvironments,
- fetchLogs,
- fetchMoreLogsPrepend,
-} from '~/logs/stores/actions';
-import * as types from '~/logs/stores/mutation_types';
-import logsPageState from '~/logs/stores/state';
-import Tracking from '~/tracking';
-
-import { defaultTimeRange } from '~/vue_shared/constants';
-
-import {
- mockPodName,
- mockEnvironmentsEndpoint,
- mockEnvironments,
- mockPods,
- mockLogsResult,
- mockEnvName,
- mockSearch,
- mockLogsEndpoint,
- mockResponse,
- mockCursor,
- mockNextCursor,
-} from '../mock_data';
-
-jest.mock('~/lib/utils/datetime_range');
-jest.mock('~/logs/utils');
-
-const mockDefaultRange = {
- start: '2020-01-10T18:00:00.000Z',
- end: '2020-01-10T19:00:00.000Z',
-};
-const mockFixedRange = {
- start: '2020-01-09T18:06:20.000Z',
- end: '2020-01-09T18:36:20.000Z',
-};
-const mockRollingRange = {
- duration: 120,
-};
-const mockRollingRangeAsFixed = {
- start: '2020-01-10T18:00:00.000Z',
- end: '2020-01-10T17:58:00.000Z',
-};
-
-describe('Logs Store actions', () => {
- let state;
- let mock;
-
- const latestGetParams = () => mock.history.get[mock.history.get.length - 1].params;
-
- convertToFixedRange.mockImplementation((range) => {
- if (range === defaultTimeRange) {
- return { ...mockDefaultRange };
- }
- if (range === mockFixedRange) {
- return { ...mockFixedRange };
- }
- if (range === mockRollingRange) {
- return { ...mockRollingRangeAsFixed };
- }
- throw new Error('Invalid time range');
- });
-
- beforeEach(() => {
- state = logsPageState();
- });
-
- describe('setInitData', () => {
- it('should commit environment and pod name mutation', () =>
- testAction(
- setInitData,
- { timeRange: mockFixedRange, environmentName: mockEnvName, podName: mockPodName },
- state,
- [
- { type: types.SET_TIME_RANGE, payload: mockFixedRange },
- { type: types.SET_PROJECT_ENVIRONMENT, payload: mockEnvName },
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- ],
- ));
- });
-
- describe('showFilteredLogs', () => {
- it('empty search should filter with defaults', () =>
- testAction(
- showFilteredLogs,
- undefined,
- state,
- [
- { type: types.SET_CURRENT_POD_NAME, payload: null },
- { type: types.SET_SEARCH, payload: '' },
- ],
- [{ type: 'fetchLogs', payload: 'used_search_bar' }],
- ));
-
- it('text search should filter with a search term', () =>
- testAction(
- showFilteredLogs,
- [mockSearch],
- state,
- [
- { type: types.SET_CURRENT_POD_NAME, payload: null },
- { type: types.SET_SEARCH, payload: mockSearch },
- ],
- [{ type: 'fetchLogs', payload: 'used_search_bar' }],
- ));
-
- it('pod search should filter with a search term', () =>
- testAction(
- showFilteredLogs,
- [{ type: TOKEN_TYPE_POD_NAME, value: { data: mockPodName, operator: '=' } }],
- state,
- [
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- { type: types.SET_SEARCH, payload: '' },
- ],
- [{ type: 'fetchLogs', payload: 'used_search_bar' }],
- ));
-
- it('pod search should filter with a pod selection and a search term', () =>
- testAction(
- showFilteredLogs,
- [{ type: TOKEN_TYPE_POD_NAME, value: { data: mockPodName, operator: '=' } }, mockSearch],
- state,
- [
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- { type: types.SET_SEARCH, payload: mockSearch },
- ],
- [{ type: 'fetchLogs', payload: 'used_search_bar' }],
- ));
-
- it('pod search should filter with a pod selection and two search terms', () =>
- testAction(
- showFilteredLogs,
- ['term1', 'term2'],
- state,
- [
- { type: types.SET_CURRENT_POD_NAME, payload: null },
- { type: types.SET_SEARCH, payload: `term1 term2` },
- ],
- [{ type: 'fetchLogs', payload: 'used_search_bar' }],
- ));
-
- it('pod search should filter with a pod selection and a search terms before and after', () =>
- testAction(
- showFilteredLogs,
- [
- 'term1',
- { type: TOKEN_TYPE_POD_NAME, value: { data: mockPodName, operator: '=' } },
- 'term2',
- ],
- state,
- [
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- { type: types.SET_SEARCH, payload: `term1 term2` },
- ],
- [{ type: 'fetchLogs', payload: 'used_search_bar' }],
- ));
- });
-
- describe('showPodLogs', () => {
- it('should commit pod name', () =>
- testAction(
- showPodLogs,
- mockPodName,
- state,
- [{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName }],
- [{ type: 'fetchLogs', payload: 'pod_log_changed' }],
- ));
- });
-
- describe('fetchEnvironments', () => {
- beforeEach(() => {
- mock = new MockAdapter(axios);
- });
-
- it('should commit RECEIVE_ENVIRONMENTS_DATA_SUCCESS mutation on correct data', () => {
- mock.onGet(mockEnvironmentsEndpoint).replyOnce(200, mockEnvironments);
- return testAction(
- fetchEnvironments,
- mockEnvironmentsEndpoint,
- state,
- [
- { type: types.REQUEST_ENVIRONMENTS_DATA },
- { type: types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, payload: mockEnvironments },
- ],
- [{ type: 'fetchLogs', payload: 'environment_selected' }],
- );
- });
-
- it('should commit RECEIVE_ENVIRONMENTS_DATA_ERROR on wrong data', () => {
- mock.onGet(mockEnvironmentsEndpoint).replyOnce(500);
- return testAction(
- fetchEnvironments,
- mockEnvironmentsEndpoint,
- state,
- [
- { type: types.REQUEST_ENVIRONMENTS_DATA },
- { type: types.RECEIVE_ENVIRONMENTS_DATA_ERROR },
- ],
- [],
- );
- });
- });
-
- describe('when the backend responds succesfully', () => {
- let expectedMutations;
- let expectedActions;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mock.onGet(mockLogsEndpoint).reply(200, mockResponse);
- mock.onGet(mockLogsEndpoint).replyOnce(202); // mock reactive cache
-
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
- });
-
- afterEach(() => {
- mock.reset();
- });
-
- describe('fetchLogs', () => {
- beforeEach(() => {
- expectedMutations = [
- { type: types.REQUEST_LOGS_DATA },
- {
- type: types.RECEIVE_LOGS_DATA_SUCCESS,
- payload: { logs: mockLogsResult, cursor: mockNextCursor },
- },
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
- ];
-
- expectedActions = [];
- });
-
- it('should commit logs and pod data when there is pod name defined', () => {
- state.pods.current = mockPodName;
- state.timeRange.current = mockFixedRange;
-
- return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
- expect(latestGetParams()).toMatchObject({
- pod_name: mockPodName,
- });
- });
- });
-
- it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
- state.pods.current = mockPodName;
- state.timeRange.current = mockFixedRange;
- state.logs.cursor = mockCursor;
-
- return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
- expect(latestGetParams()).toEqual({
- pod_name: mockPodName,
- start_time: mockFixedRange.start,
- end_time: mockFixedRange.end,
- cursor: mockCursor,
- });
- });
- });
-
- it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
- state.pods.current = mockPodName;
- state.search = mockSearch;
- state.timeRange.current = 'INVALID_TIME_RANGE';
-
- expectedMutations.splice(1, 0, {
- type: types.SHOW_TIME_RANGE_INVALID_WARNING,
- });
-
- return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
- expect(latestGetParams()).toEqual({
- pod_name: mockPodName,
- search: mockSearch,
- });
- });
- });
-
- it('should commit logs and pod data when no pod name defined', () => {
- state.timeRange.current = defaultTimeRange;
-
- return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
- expect(latestGetParams()).toEqual({
- start_time: expect.any(String),
- end_time: expect.any(String),
- });
- });
- });
- });
-
- describe('fetchMoreLogsPrepend', () => {
- beforeEach(() => {
- expectedMutations = [
- { type: types.REQUEST_LOGS_DATA_PREPEND },
- {
- type: types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS,
- payload: { logs: mockLogsResult, cursor: mockNextCursor },
- },
- ];
-
- expectedActions = [];
- });
-
- it('should commit logs and pod data when there is pod name defined', () => {
- state.pods.current = mockPodName;
- state.timeRange.current = mockFixedRange;
-
- expectedActions = [];
-
- return testAction(
- fetchMoreLogsPrepend,
- null,
- state,
- expectedMutations,
- expectedActions,
- () => {
- expect(latestGetParams()).toMatchObject({
- pod_name: mockPodName,
- });
- },
- );
- });
-
- it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
- state.pods.current = mockPodName;
- state.timeRange.current = mockFixedRange;
- state.logs.cursor = mockCursor;
-
- return testAction(
- fetchMoreLogsPrepend,
- null,
- state,
- expectedMutations,
- expectedActions,
- () => {
- expect(latestGetParams()).toEqual({
- pod_name: mockPodName,
- start_time: mockFixedRange.start,
- end_time: mockFixedRange.end,
- cursor: mockCursor,
- });
- },
- );
- });
-
- it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
- state.pods.current = mockPodName;
- state.search = mockSearch;
- state.timeRange.current = 'INVALID_TIME_RANGE';
-
- expectedMutations.splice(1, 0, {
- type: types.SHOW_TIME_RANGE_INVALID_WARNING,
- });
-
- return testAction(
- fetchMoreLogsPrepend,
- null,
- state,
- expectedMutations,
- expectedActions,
- () => {
- expect(latestGetParams()).toEqual({
- pod_name: mockPodName,
- search: mockSearch,
- });
- },
- );
- });
-
- it('should commit logs and pod data when no pod name defined', () => {
- state.timeRange.current = defaultTimeRange;
-
- return testAction(
- fetchMoreLogsPrepend,
- null,
- state,
- expectedMutations,
- expectedActions,
- () => {
- expect(latestGetParams()).toEqual({
- start_time: expect.any(String),
- end_time: expect.any(String),
- });
- },
- );
- });
-
- it('should not commit logs or pod data when it has reached the end', () => {
- state.logs.isComplete = true;
- state.logs.cursor = null;
-
- return testAction(
- fetchMoreLogsPrepend,
- null,
- state,
- [], // no mutations done
- [], // no actions dispatched
- () => {
- expect(mock.history.get).toHaveLength(0);
- },
- );
- });
- });
- });
-
- describe('when the backend responds with an error', () => {
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mock.onGet(mockLogsEndpoint).reply(500);
- });
-
- afterEach(() => {
- mock.reset();
- });
-
- it('fetchLogs should commit logs and pod errors', () => {
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
- state.timeRange.current = defaultTimeRange;
-
- return testAction(
- fetchLogs,
- null,
- state,
- [
- { type: types.REQUEST_LOGS_DATA },
- { type: types.RECEIVE_PODS_DATA_ERROR },
- { type: types.RECEIVE_LOGS_DATA_ERROR },
- ],
- [],
- () => {
- expect(mock.history.get[0].url).toBe(mockLogsEndpoint);
- },
- );
- });
-
- it('fetchMoreLogsPrepend should commit logs and pod errors', () => {
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
- state.timeRange.current = defaultTimeRange;
-
- return testAction(
- fetchMoreLogsPrepend,
- null,
- state,
- [
- { type: types.REQUEST_LOGS_DATA_PREPEND },
- { type: types.RECEIVE_LOGS_DATA_PREPEND_ERROR },
- ],
- [],
- () => {
- expect(mock.history.get[0].url).toBe(mockLogsEndpoint);
- },
- );
- });
- });
-});
-
-describe('Tracking user interaction', () => {
- let commit;
- let dispatch;
- let state;
- let mock;
-
- beforeEach(() => {
- jest.spyOn(Tracking, 'event');
- commit = jest.fn();
- dispatch = jest.fn();
- state = logsPageState();
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
-
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.reset();
- });
-
- describe('Logs with data', () => {
- beforeEach(() => {
- mock.onGet(mockLogsEndpoint).reply(200, mockResponse);
- mock.onGet(mockLogsEndpoint).replyOnce(202); // mock reactive cache
- });
-
- it('tracks fetched logs with data', () => {
- return fetchLogs({ state, commit, dispatch }, 'environment_selected').then(() => {
- expect(Tracking.event).toHaveBeenCalledWith(document.body.dataset.page, 'logs_view', {
- label: 'environment_selected',
- property: 'count',
- value: 1,
- });
- });
- });
- });
-
- describe('Logs without data', () => {
- beforeEach(() => {
- mock.onGet(mockLogsEndpoint).reply(200, {
- ...mockResponse,
- logs: [],
- });
- mock.onGet(mockLogsEndpoint).replyOnce(202); // mock reactive cache
- });
-
- it('does not track empty log responses', () => {
- return fetchLogs({ state, commit, dispatch }).then(() => {
- expect(Tracking.event).not.toHaveBeenCalled();
- });
- });
- });
-});
diff --git a/spec/frontend/logs/stores/getters_spec.js b/spec/frontend/logs/stores/getters_spec.js
deleted file mode 100644
index 9d213d8c01f..00000000000
--- a/spec/frontend/logs/stores/getters_spec.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { trace, showAdvancedFilters } from '~/logs/stores/getters';
-import logsPageState from '~/logs/stores/state';
-
-import { mockLogsResult, mockTrace, mockEnvName, mockEnvironments } from '../mock_data';
-
-describe('Logs Store getters', () => {
- let state;
-
- beforeEach(() => {
- state = logsPageState();
- });
-
- describe('trace', () => {
- describe('when state is initialized', () => {
- it('returns an empty string', () => {
- expect(trace(state)).toEqual('');
- });
- });
-
- describe('when state logs are empty', () => {
- beforeEach(() => {
- state.logs.lines = [];
- });
-
- it('returns an empty string', () => {
- expect(trace(state)).toEqual('');
- });
- });
-
- describe('when state logs are set', () => {
- beforeEach(() => {
- state.logs.lines = mockLogsResult;
- });
-
- it('returns an empty string', () => {
- expect(trace(state)).toEqual(mockTrace.join('\n'));
- });
- });
- });
-
- describe('showAdvancedFilters', () => {
- describe('when no environments are set', () => {
- beforeEach(() => {
- state.environments.current = mockEnvName;
- state.environments.options = [];
- });
-
- it('returns false', () => {
- expect(showAdvancedFilters(state)).toBe(false);
- });
- });
-
- describe('when the environment supports filters', () => {
- beforeEach(() => {
- state.environments.current = mockEnvName;
- state.environments.options = mockEnvironments;
- });
-
- it('returns true', () => {
- expect(showAdvancedFilters(state)).toBe(true);
- });
- });
-
- describe('when the environment does not support filters', () => {
- beforeEach(() => {
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvironments[1].name;
- });
-
- it('returns true', () => {
- expect(showAdvancedFilters(state)).toBe(false);
- });
- });
- });
-});
diff --git a/spec/frontend/logs/stores/mutations_spec.js b/spec/frontend/logs/stores/mutations_spec.js
deleted file mode 100644
index 988197a8350..00000000000
--- a/spec/frontend/logs/stores/mutations_spec.js
+++ /dev/null
@@ -1,257 +0,0 @@
-import * as types from '~/logs/stores/mutation_types';
-import mutations from '~/logs/stores/mutations';
-
-import logsPageState from '~/logs/stores/state';
-import {
- mockEnvName,
- mockEnvironments,
- mockPods,
- mockPodName,
- mockLogsResult,
- mockSearch,
- mockCursor,
- mockNextCursor,
-} from '../mock_data';
-
-describe('Logs Store Mutations', () => {
- let state;
-
- beforeEach(() => {
- state = logsPageState();
- });
-
- it('ensures mutation types are correctly named', () => {
- Object.keys(types).forEach((k) => {
- expect(k).toEqual(types[k]);
- });
- });
-
- describe('SET_PROJECT_ENVIRONMENT', () => {
- it('sets the environment', () => {
- mutations[types.SET_PROJECT_ENVIRONMENT](state, mockEnvName);
- expect(state.environments.current).toEqual(mockEnvName);
- });
- });
-
- describe('SET_SEARCH', () => {
- it('sets the search', () => {
- mutations[types.SET_SEARCH](state, mockSearch);
- expect(state.search).toEqual(mockSearch);
- });
- });
-
- describe('REQUEST_ENVIRONMENTS_DATA', () => {
- it('inits data', () => {
- mutations[types.REQUEST_ENVIRONMENTS_DATA](state);
- expect(state.environments.options).toEqual([]);
- expect(state.environments.isLoading).toEqual(true);
- });
- });
-
- describe('RECEIVE_ENVIRONMENTS_DATA_SUCCESS', () => {
- it('receives environments data and stores it as options', () => {
- expect(state.environments.options).toEqual([]);
-
- mutations[types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS](state, mockEnvironments);
-
- expect(state.environments.options).toEqual(mockEnvironments);
- expect(state.environments.isLoading).toEqual(false);
- });
- });
-
- describe('RECEIVE_ENVIRONMENTS_DATA_ERROR', () => {
- it('captures an error loading environments', () => {
- mutations[types.RECEIVE_ENVIRONMENTS_DATA_ERROR](state);
-
- expect(state.environments).toEqual({
- options: [],
- isLoading: false,
- current: null,
- fetchError: true,
- });
- });
- });
-
- describe('REQUEST_LOGS_DATA', () => {
- it('starts loading for logs', () => {
- mutations[types.REQUEST_LOGS_DATA](state);
-
- expect(state.timeRange.current).toEqual({
- start: expect.any(String),
- end: expect.any(String),
- });
-
- expect(state.logs).toEqual({
- lines: [],
- cursor: null,
- fetchError: false,
- isLoading: true,
- isComplete: false,
- });
- });
- });
-
- describe('RECEIVE_LOGS_DATA_SUCCESS', () => {
- it('receives logs lines and cursor', () => {
- mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, {
- logs: mockLogsResult,
- cursor: mockCursor,
- });
-
- expect(state.logs).toEqual({
- lines: mockLogsResult,
- isLoading: false,
- cursor: mockCursor,
- isComplete: false,
- fetchError: false,
- });
- });
-
- it('receives logs lines and a null cursor to indicate the end', () => {
- mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, {
- logs: mockLogsResult,
- cursor: null,
- });
-
- expect(state.logs).toEqual({
- lines: mockLogsResult,
- isLoading: false,
- cursor: null,
- isComplete: true,
- fetchError: false,
- });
- });
- });
-
- describe('RECEIVE_LOGS_DATA_ERROR', () => {
- it('receives log data error and stops loading', () => {
- mutations[types.RECEIVE_LOGS_DATA_ERROR](state);
-
- expect(state.logs).toEqual({
- lines: [],
- isLoading: false,
- cursor: null,
- isComplete: false,
- fetchError: true,
- });
- });
- });
-
- describe('REQUEST_LOGS_DATA_PREPEND', () => {
- it('receives logs lines and cursor', () => {
- mutations[types.REQUEST_LOGS_DATA_PREPEND](state);
-
- expect(state.logs.isLoading).toBe(true);
- });
- });
-
- describe('RECEIVE_LOGS_DATA_PREPEND_SUCCESS', () => {
- it('receives logs lines and cursor', () => {
- mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
- logs: mockLogsResult,
- cursor: mockCursor,
- });
-
- expect(state.logs).toEqual({
- lines: mockLogsResult,
- isLoading: false,
- cursor: mockCursor,
- isComplete: false,
- fetchError: false,
- });
- });
-
- it('receives additional logs lines and a new cursor', () => {
- mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
- logs: mockLogsResult,
- cursor: mockCursor,
- });
-
- mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
- logs: mockLogsResult,
- cursor: mockNextCursor,
- });
-
- expect(state.logs).toEqual({
- lines: [...mockLogsResult, ...mockLogsResult],
- isLoading: false,
- cursor: mockNextCursor,
- isComplete: false,
- fetchError: false,
- });
- });
-
- it('receives logs lines and a null cursor to indicate is complete', () => {
- mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
- logs: mockLogsResult,
- cursor: null,
- });
-
- expect(state.logs).toEqual({
- lines: mockLogsResult,
- isLoading: false,
- cursor: null,
- isComplete: true,
- fetchError: false,
- });
- });
- });
-
- describe('RECEIVE_LOGS_DATA_PREPEND_ERROR', () => {
- it('receives logs lines and cursor', () => {
- mutations[types.RECEIVE_LOGS_DATA_PREPEND_ERROR](state);
-
- expect(state.logs.isLoading).toBe(false);
- expect(state.logs.fetchError).toBe(true);
- });
- });
-
- describe('SET_CURRENT_POD_NAME', () => {
- it('set current pod name', () => {
- mutations[types.SET_CURRENT_POD_NAME](state, mockPodName);
-
- expect(state.pods.current).toEqual(mockPodName);
- });
- });
-
- describe('SET_TIME_RANGE', () => {
- it('sets a default range', () => {
- expect(state.timeRange.selected).toEqual(expect.any(Object));
- expect(state.timeRange.current).toEqual(expect.any(Object));
- });
-
- it('sets a time range', () => {
- const mockRange = {
- start: '2020-01-10T18:00:00.000Z',
- end: '2020-01-10T10:00:00.000Z',
- };
- mutations[types.SET_TIME_RANGE](state, mockRange);
-
- expect(state.timeRange.selected).toEqual(mockRange);
- expect(state.timeRange.current).toEqual(mockRange);
- });
- });
-
- describe('RECEIVE_PODS_DATA_SUCCESS', () => {
- it('receives pods data success', () => {
- mutations[types.RECEIVE_PODS_DATA_SUCCESS](state, mockPods);
-
- expect(state.pods).toEqual(
- expect.objectContaining({
- options: mockPods,
- }),
- );
- });
- });
- describe('RECEIVE_PODS_DATA_ERROR', () => {
- it('receives pods data error', () => {
- mutations[types.RECEIVE_PODS_DATA_ERROR](state);
-
- expect(state.pods).toEqual(
- expect.objectContaining({
- options: [],
- }),
- );
- });
- });
-});
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index f0f051cbc8b..2001bb5f95e 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import initMrPage from 'helpers/init_vue_mr_page_helper';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs from '~/merge_request_tabs';
import '~/lib/utils/common_utils';
@@ -24,6 +25,8 @@ describe('MergeRequestTabs', () => {
};
beforeEach(() => {
+ stubPerformanceWebAPI();
+
initMrPage();
testContext.class = new MergeRequestTabs({ stubLocation });
@@ -331,6 +334,8 @@ describe('MergeRequestTabs', () => {
${'diffs'} | ${true} | ${'hides'}
${'commits'} | ${true} | ${'hides'}
`('it $hidesText expand button on $tab tab', ({ tab, hides }) => {
+ window.gon = { features: { movedMrSidebar: true } };
+
const expandButton = document.createElement('div');
expandButton.classList.add('js-expand-sidebar');
@@ -344,16 +349,16 @@ describe('MergeRequestTabs', () => {
testContext.class = new MergeRequestTabs({ stubLocation });
testContext.class.tabShown(tab, 'foobar');
- expect(testContext.class.expandSidebar.classList.contains('gl-display-none!')).toBe(hides);
+ testContext.class.expandSidebar.forEach((el) => {
+ expect(el.classList.contains('gl-display-none!')).toBe(hides);
+ });
+
+ window.gon = {};
});
describe('when switching tabs', () => {
const SCROLL_TOP = 100;
- beforeAll(() => {
- jest.useFakeTimers();
- });
-
beforeEach(() => {
jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
testContext.class.mergeRequestTabs = document.createElement('div');
@@ -362,10 +367,6 @@ describe('MergeRequestTabs', () => {
testContext.class.scrollPositions = { newTab: SCROLL_TOP };
});
- afterAll(() => {
- jest.useRealTimers();
- });
-
it('scrolls to the stored position, if one is stored', () => {
testContext.class.tabShown('newTab');
diff --git a/spec/frontend/milestones/components/delete_milestone_modal_spec.js b/spec/frontend/milestones/components/delete_milestone_modal_spec.js
index b9ba0833c4f..6692a3b9347 100644
--- a/spec/frontend/milestones/components/delete_milestone_modal_spec.js
+++ b/spec/frontend/milestones/components/delete_milestone_modal_spec.js
@@ -1,44 +1,59 @@
-import Vue from 'vue';
+import { GlSprintf, GlModal } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
-import mountComponent from 'helpers/vue_mount_component_helper';
import axios from '~/lib/utils/axios_utils';
-import { redirectTo } from '~/lib/utils/url_utility';
-import deleteMilestoneModal from '~/milestones/components/delete_milestone_modal.vue';
+import DeleteMilestoneModal from '~/milestones/components/delete_milestone_modal.vue';
import eventHub from '~/milestones/event_hub';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { createAlert } from '~/flash';
-jest.mock('~/lib/utils/url_utility', () => ({
- ...jest.requireActual('~/lib/utils/url_utility'),
- redirectTo: jest.fn(),
-}));
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/flash');
-describe('delete_milestone_modal.vue', () => {
- const Component = Vue.extend(deleteMilestoneModal);
- const props = {
+describe('Delete milestone modal', () => {
+ let wrapper;
+ const mockProps = {
issueCount: 1,
mergeRequestCount: 2,
milestoneId: 3,
milestoneTitle: 'my milestone title',
milestoneUrl: `${TEST_HOST}/delete_milestone_modal.vue/milestone`,
};
- let vm;
+
+ const findModal = () => wrapper.findComponent(GlModal);
+
+ const createComponent = (props) => {
+ wrapper = shallowMount(DeleteMilestoneModal, {
+ propsData: {
+ ...mockProps,
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('onSubmit', () => {
beforeEach(() => {
- vm = mountComponent(Component, props);
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
});
it('deletes milestone and redirects to overview page', async () => {
const responseURL = `${TEST_HOST}/delete_milestone_modal.vue/milestoneOverview`;
jest.spyOn(axios, 'delete').mockImplementation((url) => {
- expect(url).toBe(props.milestoneUrl);
+ expect(url).toBe(mockProps.milestoneUrl);
expect(eventHub.$emit).toHaveBeenCalledWith(
'deleteMilestoneModal.requestStarted',
- props.milestoneUrl,
+ mockProps.milestoneUrl,
);
eventHub.$emit.mockReset();
return Promise.resolve({
@@ -47,55 +62,71 @@ describe('delete_milestone_modal.vue', () => {
},
});
});
-
- await vm.onSubmit();
+ await findModal().vm.$emit('primary');
expect(redirectTo).toHaveBeenCalledWith(responseURL);
expect(eventHub.$emit).toHaveBeenCalledWith('deleteMilestoneModal.requestFinished', {
- milestoneUrl: props.milestoneUrl,
+ milestoneUrl: mockProps.milestoneUrl,
successful: true,
});
});
- it('displays error if deleting milestone failed', async () => {
- const dummyError = new Error('deleting milestone failed');
- dummyError.response = { status: 418 };
- jest.spyOn(axios, 'delete').mockImplementation((url) => {
- expect(url).toBe(props.milestoneUrl);
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'deleteMilestoneModal.requestStarted',
- props.milestoneUrl,
- );
- eventHub.$emit.mockReset();
- return Promise.reject(dummyError);
- });
+ it.each`
+ statusCode | alertMessage
+ ${418} | ${`Failed to delete milestone ${mockProps.milestoneTitle}`}
+ ${404} | ${`Milestone ${mockProps.milestoneTitle} was not found`}
+ `(
+ 'displays error if deleting milestone failed with code $statusCode',
+ async ({ statusCode, alertMessage }) => {
+ const dummyError = new Error('deleting milestone failed');
+ dummyError.response = { status: statusCode };
+ jest.spyOn(axios, 'delete').mockImplementation((url) => {
+ expect(url).toBe(mockProps.milestoneUrl);
+ expect(eventHub.$emit).toHaveBeenCalledWith(
+ 'deleteMilestoneModal.requestStarted',
+ mockProps.milestoneUrl,
+ );
+ eventHub.$emit.mockReset();
+ return Promise.reject(dummyError);
+ });
- await expect(vm.onSubmit()).rejects.toEqual(dummyError);
- expect(redirectTo).not.toHaveBeenCalled();
- expect(eventHub.$emit).toHaveBeenCalledWith('deleteMilestoneModal.requestFinished', {
- milestoneUrl: props.milestoneUrl,
- successful: false,
- });
- });
+ await expect(wrapper.vm.onSubmit()).rejects.toEqual(dummyError);
+ expect(createAlert).toHaveBeenCalledWith({
+ message: alertMessage,
+ });
+ expect(redirectTo).not.toHaveBeenCalled();
+ expect(eventHub.$emit).toHaveBeenCalledWith('deleteMilestoneModal.requestFinished', {
+ milestoneUrl: mockProps.milestoneUrl,
+ successful: false,
+ });
+ },
+ );
});
- describe('text', () => {
- it('contains the issue and milestone count', () => {
- vm = mountComponent(Component, props);
- const value = vm.text;
+ describe('Modal title and description', () => {
+ const emptyDescription = `You’re about to permanently delete the milestone ${mockProps.milestoneTitle}. This milestone is not currently used in any issues or merge requests.`;
+ const description = `You’re about to permanently delete the milestone ${mockProps.milestoneTitle} and remove it from 1 issue and 2 merge requests. Once deleted, it cannot be undone or recovered.`;
+ const title = `Delete milestone ${mockProps.milestoneTitle}?`;
- expect(value).toContain('remove it from 1 issue and 2 merge requests');
+ it('renders proper title', () => {
+ const value = findModal().props('title');
+ expect(value).toBe(title);
});
- it('contains neither issue nor milestone count', () => {
- vm = mountComponent(Component, {
- ...props,
- issueCount: 0,
- mergeRequestCount: 0,
- });
-
- const value = vm.text;
+ it.each`
+ statement | descriptionText | issueCount | mergeRequestCount
+ ${'1 issue and 2 merge requests'} | ${description} | ${1} | ${2}
+ ${'no issues and merge requests'} | ${emptyDescription} | ${0} | ${0}
+ `(
+ 'renders proper description when the milestone contains $statement',
+ ({ issueCount, mergeRequestCount, descriptionText }) => {
+ createComponent({
+ issueCount,
+ mergeRequestCount,
+ });
- expect(value).toContain('is not currently used');
- });
+ const value = findModal().text();
+ expect(value).toBe(descriptionText);
+ },
+ );
});
});
diff --git a/spec/frontend/milestones/components/milestone_combobox_spec.js b/spec/frontend/milestones/components/milestone_combobox_spec.js
index afd85fb78ce..a8e3d13dca0 100644
--- a/spec/frontend/milestones/components/milestone_combobox_spec.js
+++ b/spec/frontend/milestones/components/milestone_combobox_spec.js
@@ -154,9 +154,9 @@ describe('Milestone combobox component', () => {
};
describe('initialization behavior', () => {
- beforeEach(createComponent);
-
it('initializes the dropdown with milestones when mounted', () => {
+ createComponent();
+
return waitForRequests().then(() => {
expect(projectMilestonesApiCallSpy).toHaveBeenCalledTimes(1);
expect(groupMilestonesApiCallSpy).toHaveBeenCalledTimes(1);
@@ -164,6 +164,8 @@ describe('Milestone combobox component', () => {
});
it('shows a spinner while network requests are in progress', () => {
+ createComponent();
+
expect(findLoadingIcon().exists()).toBe(true);
return waitForRequests().then(() => {
@@ -172,6 +174,8 @@ describe('Milestone combobox component', () => {
});
it('shows additional links', () => {
+ createComponent();
+
const links = wrapper.findAll('[data-testid="milestone-combobox-extra-links"]');
links.wrappers.forEach((item, idx) => {
expect(item.text()).toBe(extraLinks[idx].text);
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index a9f37f90561..14f04d9b767 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -35,7 +35,7 @@ exports[`Dashboard template matches the default snapshot 1`] = `
class="prometheus-graphs-header d-sm-flex flex-sm-wrap pt-2 pr-1 pb-0 pl-2 border-bottom bg-gray-light"
>
<div
- class="mb-2 mr-2 d-flex d-sm-block"
+ class="gl-mb-3 gl-mr-3 gl-display-flex gl-sm-display-block"
>
<dashboards-dropdown-stub
class="flex-grow-1"
diff --git a/spec/frontend/monitoring/components/dashboard_panel_spec.js b/spec/frontend/monitoring/components/dashboard_panel_spec.js
index 1f9eb03b5d4..7c54a4742ac 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_spec.js
@@ -5,7 +5,6 @@ import Vuex from 'vuex';
import { nextTick } from 'vue';
import { setTestTimeout } from 'helpers/timeout';
import axios from '~/lib/utils/axios_utils';
-import invalidUrl from '~/lib/utils/invalid_url';
import MonitorAnomalyChart from '~/monitoring/components/charts/anomaly.vue';
import MonitorBarChart from '~/monitoring/components/charts/bar.vue';
@@ -27,13 +26,7 @@ import {
heatmapGraphData,
barGraphData,
} from '../graph_data';
-import {
- mockLogsHref,
- mockLogsPath,
- mockNamespace,
- mockNamespacedData,
- mockTimeRange,
-} from '../mock_data';
+import { mockNamespace, mockNamespacedData, mockTimeRange } from '../mock_data';
const mocks = {
$toast: {
@@ -65,7 +58,6 @@ describe('Dashboard Panel', () => {
},
store,
mocks,
- provide: { glFeatures: { monitorLogging: true } },
...options,
});
};
@@ -335,86 +327,6 @@ describe('Dashboard Panel', () => {
});
});
- describe('View Logs dropdown item', () => {
- const findViewLogsLink = () => wrapper.find({ ref: 'viewLogsLink' });
-
- beforeEach(async () => {
- createWrapper();
- await nextTick();
- });
-
- it('is not present by default', async () => {
- await nextTick();
- expect(findViewLogsLink().exists()).toBe(false);
- });
-
- it('is not present if a time range is not set', async () => {
- state.logsPath = mockLogsPath;
- state.timeRange = null;
-
- await nextTick();
- expect(findViewLogsLink().exists()).toBe(false);
- });
-
- it('is not present if the logs path is default', async () => {
- state.logsPath = invalidUrl;
- state.timeRange = mockTimeRange;
-
- await nextTick();
- expect(findViewLogsLink().exists()).toBe(false);
- });
-
- it('is not present if the logs path is not set', async () => {
- state.logsPath = null;
- state.timeRange = mockTimeRange;
-
- await nextTick();
- expect(findViewLogsLink().exists()).toBe(false);
- });
-
- it('is present when logs path and time a range is present', async () => {
- state.logsPath = mockLogsPath;
- state.timeRange = mockTimeRange;
-
- await nextTick();
- expect(findViewLogsLink().attributes('href')).toMatch(mockLogsHref);
- });
-
- describe(':monitor_logging feature flag', () => {
- it.each`
- flagState | logsState | expected
- ${true} | ${'shows'} | ${true}
- ${false} | ${'hides'} | ${false}
- `('$logsState logs when flag state is $flagState', async ({ flagState, expected }) => {
- createWrapper({}, { provide: { glFeatures: { monitorLogging: flagState } } });
- state.logsPath = mockLogsPath;
- state.timeRange = mockTimeRange;
- await nextTick();
-
- expect(findViewLogsLink().exists()).toBe(expected);
- });
- });
-
- it('it is overridden when a datazoom event is received', async () => {
- state.logsPath = mockLogsPath;
- state.timeRange = mockTimeRange;
-
- const zoomedTimeRange = {
- start: '2020-01-01T00:00:00.000Z',
- end: '2020-01-01T01:00:00.000Z',
- };
-
- findTimeChart().vm.$emit('datazoom', zoomedTimeRange);
-
- await nextTick();
- const start = encodeURIComponent(zoomedTimeRange.start);
- const end = encodeURIComponent(zoomedTimeRange.end);
- expect(findViewLogsLink().attributes('href')).toMatch(
- `${mockLogsPath}?start=${start}&end=${end}`,
- );
- });
- });
-
describe('when clipboard data is available', () => {
const clipboardText = 'A value to copy.';
@@ -507,14 +419,6 @@ describe('Dashboard Panel', () => {
createWrapper({ namespace: mockNamespace });
});
- it('handles namespaced time range and logs path state', async () => {
- store.state[mockNamespace].timeRange = mockTimeRange;
- store.state[mockNamespace].logsPath = mockLogsPath;
-
- await nextTick();
- expect(wrapper.find({ ref: 'viewLogsLink' }).attributes().href).toBe(mockLogsHref);
- });
-
it('handles namespaced deployment data state', async () => {
store.state[mockNamespace].deploymentData = mockDeploymentData;
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 6c5972e1140..90171cfc65e 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -75,6 +75,7 @@ describe('Dashboard', () => {
if (store.dispatch.mockReset) {
store.dispatch.mockReset();
}
+ wrapper.destroy();
});
describe('request information to the server', () => {
@@ -569,28 +570,37 @@ describe('Dashboard', () => {
const findDraggablePanels = () => wrapper.findAll('.js-draggable-panel');
const findRearrangeButton = () => wrapper.find('.js-rearrange-button');
- beforeEach(async () => {
+ const setup = async () => {
// call original dispatch
store.dispatch.mockRestore();
createShallowWrapper({ hasMetrics: true });
setupStoreWithData(store);
await nextTick();
- });
+ };
+
+ it('wraps vuedraggable', async () => {
+ await setup();
- it('wraps vuedraggable', () => {
expect(findDraggablePanels().exists()).toBe(true);
expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
});
- it('is disabled by default', () => {
+ it('is disabled by default', async () => {
+ await setup();
+
expect(findRearrangeButton().exists()).toBe(false);
expect(findEnabledDraggables().length).toBe(0);
});
describe('when rearrange is enabled', () => {
beforeEach(async () => {
- wrapper.setProps({ rearrangePanelsAvailable: true });
+ // call original dispatch
+ store.dispatch.mockRestore();
+
+ createShallowWrapper({ hasMetrics: true, rearrangePanelsAvailable: true });
+ setupStoreWithData(store);
+
await nextTick();
});
@@ -602,17 +612,18 @@ describe('Dashboard', () => {
const findFirstDraggableRemoveButton = () =>
findDraggablePanels().at(0).find('.js-draggable-remove');
- beforeEach(async () => {
+ it('it enables draggables', async () => {
findRearrangeButton().vm.$emit('click');
await nextTick();
- });
- it('it enables draggables', () => {
expect(findRearrangeButton().attributes('pressed')).toBeTruthy();
expect(findEnabledDraggables().wrappers).toEqual(findDraggables().wrappers);
});
it('metrics can be swapped', async () => {
+ findRearrangeButton().vm.$emit('click');
+ await nextTick();
+
const firstDraggable = findDraggables().at(0);
const mockMetrics = [...metricsDashboardViewModel.panelGroups[0].panels];
@@ -624,6 +635,7 @@ describe('Dashboard', () => {
firstDraggable.vm.$emit('input', mockMetrics);
await nextTick();
+
const { panels } = wrapper.vm.dashboard.panelGroups[0];
expect(panels[1].title).toEqual(firstTitle);
@@ -631,18 +643,23 @@ describe('Dashboard', () => {
});
it('shows a remove button, which removes a panel', async () => {
+ findRearrangeButton().vm.$emit('click');
+ await nextTick();
+
expect(findFirstDraggableRemoveButton().find('a').exists()).toBe(true);
expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
- findFirstDraggableRemoveButton().trigger('click');
+ await findFirstDraggableRemoveButton().trigger('click');
- await nextTick();
expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount - 1);
});
it('it disables draggables when clicked again', async () => {
findRearrangeButton().vm.$emit('click');
await nextTick();
+
+ findRearrangeButton().vm.$emit('click');
+ await nextTick();
expect(findRearrangeButton().attributes('pressed')).toBeFalsy();
expect(findEnabledDraggables().length).toBe(0);
});
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index ae1a4e16b30..49e8ab9ebd4 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -180,11 +180,6 @@ describe('Monitoring mutations', () => {
});
it('should not remove previously set properties', () => {
- const defaultLogsPath = stateCopy.logsPath;
-
- mutations[types.SET_INITIAL_STATE](stateCopy, {
- logsPath: defaultLogsPath,
- });
mutations[types.SET_INITIAL_STATE](stateCopy, {
dashboardEndpoint: 'dashboard.json',
});
@@ -196,7 +191,6 @@ describe('Monitoring mutations', () => {
});
expect(stateCopy).toMatchObject({
- logsPath: defaultLogsPath,
dashboardEndpoint: 'dashboard.json',
projectPath: '/gitlab-org/gitlab-foss',
currentEnvironmentName: 'canary',
@@ -227,11 +221,6 @@ describe('Monitoring mutations', () => {
});
it('should not remove previously set properties', () => {
- const defaultLogsPath = stateCopy.logsPath;
-
- mutations[types.SET_ENDPOINTS](stateCopy, {
- logsPath: defaultLogsPath,
- });
mutations[types.SET_ENDPOINTS](stateCopy, {
dashboardEndpoint: 'dashboard.json',
});
@@ -240,7 +229,6 @@ describe('Monitoring mutations', () => {
});
expect(stateCopy).toMatchObject({
- logsPath: defaultLogsPath,
dashboardEndpoint: 'dashboard.json',
projectPath: '/gitlab-org/gitlab-foss',
});
diff --git a/spec/frontend/new_branch_spec.js b/spec/frontend/new_branch_spec.js
index e4f4b3fa5b5..5a09598059d 100644
--- a/spec/frontend/new_branch_spec.js
+++ b/spec/frontend/new_branch_spec.js
@@ -1,4 +1,3 @@
-import $ from 'jquery';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import NewBranchForm from '~/new_branch_form';
@@ -11,17 +10,19 @@ describe('Branch', () => {
describe('create a new branch', () => {
function fillNameWith(value) {
- $('.js-branch-name').val(value).trigger('blur');
+ document.querySelector('.js-branch-name').value = value;
+ const event = new CustomEvent('blur');
+ document.querySelector('.js-branch-name').dispatchEvent(event);
}
function expectToHaveError(error) {
- expect($('.js-branch-name-error span').text()).toEqual(error);
+ expect(document.querySelector('.js-branch-name-error').textContent).toEqual(error);
}
beforeEach(() => {
loadHTMLFixture('branches/new_branch.html');
- $('form').on('submit', (e) => e.preventDefault());
- testContext.form = new NewBranchForm($('.js-create-branch-form'), []);
+ document.querySelector('form').addEventListener('submit', (e) => e.preventDefault());
+ testContext.form = new NewBranchForm(document.querySelector('.js-create-branch-form'), []);
});
afterEach(() => {
@@ -171,34 +172,34 @@ describe('Branch', () => {
it('removes the error message when is a valid name', () => {
fillNameWith('foo?bar');
- expect($('.js-branch-name-error span').length).toEqual(1);
+ expect(document.querySelector('.js-branch-name-error').textContent).not.toEqual('');
fillNameWith('foobar');
- expect($('.js-branch-name-error span').length).toEqual(0);
+ expect(document.querySelector('.js-branch-name-error').textContent).toEqual('');
});
it('can have dashes anywhere', () => {
fillNameWith('-foo-bar-zoo-');
- expect($('.js-branch-name-error span').length).toEqual(0);
+ expect(document.querySelector('.js-branch-name-error').textContent).toEqual('');
});
it('can have underscores anywhere', () => {
fillNameWith('_foo_bar_zoo_');
- expect($('.js-branch-name-error span').length).toEqual(0);
+ expect(document.querySelector('.js-branch-name-error').textContent).toEqual('');
});
it('can have numbers anywhere', () => {
fillNameWith('1foo2bar3zoo4');
- expect($('.js-branch-name-error span').length).toEqual(0);
+ expect(document.querySelector('.js-branch-name-error').textContent).toEqual('');
});
it('can be only letters', () => {
fillNameWith('foo');
- expect($('.js-branch-name-error span').length).toEqual(0);
+ expect(document.querySelector('.js-branch-name-error').textContent).toEqual('');
});
});
});
diff --git a/spec/frontend/notebook/cells/code_spec.js b/spec/frontend/notebook/cells/code_spec.js
index 9a2db061278..10762a1c3a2 100644
--- a/spec/frontend/notebook/cells/code_spec.js
+++ b/spec/frontend/notebook/cells/code_spec.js
@@ -1,89 +1,73 @@
-import Vue, { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import fixture from 'test_fixtures/blob/notebook/basic.json';
-import CodeComponent from '~/notebook/cells/code.vue';
-
-const Component = Vue.extend(CodeComponent);
+import Code from '~/notebook/cells/code.vue';
describe('Code component', () => {
- let vm;
-
+ let wrapper;
let json;
+ const mountComponent = (cell) => mount(Code, { propsData: { cell } });
+
beforeEach(() => {
// Clone fixture as it could be modified by tests
json = JSON.parse(JSON.stringify(fixture));
});
- const setupComponent = (cell) => {
- const comp = new Component({
- propsData: {
- cell,
- },
- });
- comp.$mount();
- return comp;
- };
+ afterEach(() => {
+ wrapper.destroy();
+ });
describe('without output', () => {
beforeEach(() => {
- vm = setupComponent(json.cells[0]);
-
- return nextTick();
+ wrapper = mountComponent(json.cells[0]);
});
it('does not render output prompt', () => {
- expect(vm.$el.querySelectorAll('.prompt').length).toBe(1);
+ expect(wrapper.findAll('.prompt')).toHaveLength(1);
});
});
describe('with output', () => {
beforeEach(() => {
- vm = setupComponent(json.cells[2]);
-
- return nextTick();
+ wrapper = mountComponent(json.cells[2]);
});
it('does not render output prompt', () => {
- expect(vm.$el.querySelectorAll('.prompt').length).toBe(2);
+ expect(wrapper.findAll('.prompt')).toHaveLength(2);
});
it('renders output cell', () => {
- expect(vm.$el.querySelector('.output')).toBeDefined();
+ expect(wrapper.find('.output').exists()).toBe(true);
});
});
describe('with string for output', () => {
// NBFormat Version 4.1 allows outputs.text to be a string
- beforeEach(async () => {
+ beforeEach(() => {
const cell = json.cells[2];
cell.outputs[0].text = cell.outputs[0].text.join('');
- vm = setupComponent(cell);
- await nextTick();
+ wrapper = mountComponent(cell);
});
it('does not render output prompt', () => {
- expect(vm.$el.querySelectorAll('.prompt').length).toBe(2);
+ expect(wrapper.findAll('.prompt')).toHaveLength(2);
});
it('renders output cell', () => {
- expect(vm.$el.querySelector('.output')).toBeDefined();
+ expect(wrapper.find('.output').exists()).toBe(true);
});
});
describe('with string for cell.source', () => {
- beforeEach(async () => {
+ beforeEach(() => {
const cell = json.cells[0];
cell.source = cell.source.join('');
-
- vm = setupComponent(cell);
- await nextTick();
+ wrapper = mountComponent(cell);
});
it('renders the same input as when cell.source is an array', () => {
- const expected = "console.log('test')";
-
- expect(vm.$el.querySelector('.input').innerText).toContain(expected);
+ expect(wrapper.find('.input').text()).toContain("console.log('test')");
});
});
});
diff --git a/spec/frontend/notebook/cells/markdown_spec.js b/spec/frontend/notebook/cells/markdown_spec.js
index de415b5bfe0..c757b55faf4 100644
--- a/spec/frontend/notebook/cells/markdown_spec.js
+++ b/spec/frontend/notebook/cells/markdown_spec.js
@@ -130,7 +130,7 @@ describe('Markdown component', () => {
expect(columns[0].innerHTML).toContain('<img src="data:image/jpeg;base64');
expect(columns[1].innerHTML).toContain('<img src="data:image/png;base64');
expect(columns[2].innerHTML).toContain('<img src="data:image/jpeg;base64');
- expect(columns[3].innerHTML).toContain('<img>');
+ expect(columns[3].innerHTML).toContain('<img src="attachment:bogus">');
expect(columns[4].innerHTML).toContain('<img src="https://www.google.com/');
});
});
diff --git a/spec/frontend/notebook/cells/output/index_spec.js b/spec/frontend/notebook/cells/output/index_spec.js
index 8e04e4c146c..4d1d03e5e34 100644
--- a/spec/frontend/notebook/cells/output/index_spec.js
+++ b/spec/frontend/notebook/cells/output/index_spec.js
@@ -1,36 +1,35 @@
-import Vue, { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import json from 'test_fixtures/blob/notebook/basic.json';
-import CodeComponent from '~/notebook/cells/output/index.vue';
-
-const Component = Vue.extend(CodeComponent);
+import Output from '~/notebook/cells/output/index.vue';
describe('Output component', () => {
- let vm;
+ let wrapper;
const createComponent = (output) => {
- vm = new Component({
+ wrapper = mount(Output, {
propsData: {
outputs: [].concat(output),
count: 1,
},
});
- vm.$mount();
};
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
describe('text output', () => {
beforeEach(() => {
const textType = json.cells[2];
createComponent(textType.outputs[0]);
-
- return nextTick();
});
it('renders as plain text', () => {
- expect(vm.$el.querySelector('pre')).not.toBeNull();
+ expect(wrapper.find('pre').exists()).toBe(true);
});
it('renders prompt', () => {
- expect(vm.$el.querySelector('.prompt span')).not.toBeNull();
+ expect(wrapper.find('.prompt span').exists()).toBe(true);
});
});
@@ -38,12 +37,10 @@ describe('Output component', () => {
beforeEach(() => {
const imageType = json.cells[3];
createComponent(imageType.outputs[0]);
-
- return nextTick();
});
it('renders as an image', () => {
- expect(vm.$el.querySelector('img')).not.toBeNull();
+ expect(wrapper.find('img').exists()).toBe(true);
});
});
@@ -52,16 +49,15 @@ describe('Output component', () => {
const htmlType = json.cells[4];
createComponent(htmlType.outputs[0]);
- expect(vm.$el.querySelector('p')).not.toBeNull();
- expect(vm.$el.querySelectorAll('p')).toHaveLength(1);
- expect(vm.$el.textContent.trim()).toContain('test');
+ expect(wrapper.findAll('p')).toHaveLength(1);
+ expect(wrapper.text()).toContain('test');
});
it('renders multiple raw HTML outputs', () => {
const htmlType = json.cells[4];
createComponent([htmlType.outputs[0], htmlType.outputs[0]]);
- expect(vm.$el.querySelectorAll('p')).toHaveLength(2);
+ expect(wrapper.findAll('p')).toHaveLength(2);
});
});
@@ -77,7 +73,7 @@ describe('Output component', () => {
};
createComponent(output);
- expect(vm.$el.querySelector('.MathJax')).not.toBeNull();
+ expect(wrapper.find('.MathJax').exists()).toBe(true);
});
});
@@ -85,12 +81,10 @@ describe('Output component', () => {
beforeEach(() => {
const svgType = json.cells[5];
createComponent(svgType.outputs[0]);
-
- return nextTick();
});
it('renders as an svg', () => {
- expect(vm.$el.querySelector('svg')).not.toBeNull();
+ expect(wrapper.find('svg').exists()).toBe(true);
});
});
@@ -98,27 +92,23 @@ describe('Output component', () => {
beforeEach(() => {
const unknownType = json.cells[6];
createComponent(unknownType.outputs[0]);
-
- return nextTick();
});
it('renders as plain text', () => {
- expect(vm.$el.querySelector('pre')).not.toBeNull();
- expect(vm.$el.textContent.trim()).toContain('testing');
+ expect(wrapper.find('pre').exists()).toBe(true);
+ expect(wrapper.text()).toContain('testing');
});
- it('renders promot', () => {
- expect(vm.$el.querySelector('.prompt span')).not.toBeNull();
+ it('renders prompt', () => {
+ expect(wrapper.find('.prompt span').exists()).toBe(true);
});
- it("renders as plain text when doesn't recognise other types", async () => {
+ it("renders as plain text when doesn't recognise other types", () => {
const unknownType = json.cells[7];
createComponent(unknownType.outputs[0]);
- await nextTick();
-
- expect(vm.$el.querySelector('pre')).not.toBeNull();
- expect(vm.$el.textContent.trim()).toContain('testing');
+ expect(wrapper.find('pre').exists()).toBe(true);
+ expect(wrapper.text()).toContain('testing');
});
});
});
diff --git a/spec/frontend/notebook/cells/prompt_spec.js b/spec/frontend/notebook/cells/prompt_spec.js
index 89b2d7b2b90..0cda0c5bc2b 100644
--- a/spec/frontend/notebook/cells/prompt_spec.js
+++ b/spec/frontend/notebook/cells/prompt_spec.js
@@ -1,52 +1,40 @@
-import Vue, { nextTick } from 'vue';
-import PromptComponent from '~/notebook/cells/prompt.vue';
-
-const Component = Vue.extend(PromptComponent);
+import { shallowMount } from '@vue/test-utils';
+import Prompt from '~/notebook/cells/prompt.vue';
describe('Prompt component', () => {
- let vm;
+ let wrapper;
+
+ const mountComponent = ({ type }) => shallowMount(Prompt, { propsData: { type, count: 1 } });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
describe('input', () => {
beforeEach(() => {
- vm = new Component({
- propsData: {
- type: 'In',
- count: 1,
- },
- });
- vm.$mount();
-
- return nextTick();
+ wrapper = mountComponent({ type: 'In' });
});
it('renders in label', () => {
- expect(vm.$el.textContent.trim()).toContain('In');
+ expect(wrapper.text()).toContain('In');
});
it('renders count', () => {
- expect(vm.$el.textContent.trim()).toContain('1');
+ expect(wrapper.text()).toContain('1');
});
});
describe('output', () => {
beforeEach(() => {
- vm = new Component({
- propsData: {
- type: 'Out',
- count: 1,
- },
- });
- vm.$mount();
-
- return nextTick();
+ wrapper = mountComponent({ type: 'Out' });
});
it('renders in label', () => {
- expect(vm.$el.textContent.trim()).toContain('Out');
+ expect(wrapper.text()).toContain('Out');
});
it('renders count', () => {
- expect(vm.$el.textContent.trim()).toContain('1');
+ expect(wrapper.text()).toContain('1');
});
});
});
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 116016ecae2..463787c148b 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -550,98 +550,74 @@ describe('issue_comment_form component', () => {
});
describe('confidential notes checkbox', () => {
- describe('when confidentialNotes feature flag is `false`', () => {
- const features = { confidentialNotes: false };
+ it('should render checkbox as unchecked by default', () => {
+ mountComponent({
+ mountFunction: mount,
+ initialData: { note: 'confidential note' },
+ noteableData: { ...notableDataMockCanUpdateIssuable },
+ });
- it('should not render checkbox', () => {
+ const checkbox = findConfidentialNoteCheckbox();
+ expect(checkbox.exists()).toBe(true);
+ expect(checkbox.element.checked).toBe(false);
+ });
+
+ it.each`
+ noteableType | rendered | message
+ ${'Issue'} | ${true} | ${'render'}
+ ${'Epic'} | ${true} | ${'render'}
+ ${'MergeRequest'} | ${false} | ${'not render'}
+ `(
+ 'should $message checkbox when noteableType is $noteableType',
+ ({ noteableType, rendered }) => {
mountComponent({
mountFunction: mount,
- initialData: { note: 'confidential note' },
- noteableData: { ...notableDataMockCanUpdateIssuable },
- features,
+ noteableType,
+ initialData: { note: 'internal note' },
+ noteableData: { ...notableDataMockCanUpdateIssuable, noteableType },
});
- const checkbox = findConfidentialNoteCheckbox();
- expect(checkbox.exists()).toBe(false);
- });
- });
-
- describe('when confidentialNotes feature flag is `true`', () => {
- const features = { confidentialNotes: true };
+ expect(findConfidentialNoteCheckbox().exists()).toBe(rendered);
+ },
+ );
- it('should render checkbox as unchecked by default', () => {
+ describe.each`
+ shouldCheckboxBeChecked
+ ${true}
+ ${false}
+ `('when checkbox value is `$shouldCheckboxBeChecked`', ({ shouldCheckboxBeChecked }) => {
+ it(`sets \`confidential\` to \`${shouldCheckboxBeChecked}\``, async () => {
mountComponent({
mountFunction: mount,
initialData: { note: 'confidential note' },
noteableData: { ...notableDataMockCanUpdateIssuable },
- features,
});
- const checkbox = findConfidentialNoteCheckbox();
- expect(checkbox.exists()).toBe(true);
- expect(checkbox.element.checked).toBe(false);
- });
+ jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue({});
- it.each`
- noteableType | rendered | message
- ${'Issue'} | ${true} | ${'render'}
- ${'Epic'} | ${true} | ${'render'}
- ${'MergeRequest'} | ${false} | ${'not render'}
- `(
- 'should $message checkbox when noteableType is $noteableType',
- ({ noteableType, rendered }) => {
- mountComponent({
- mountFunction: mount,
- noteableType,
- initialData: { note: 'internal note' },
- noteableData: { ...notableDataMockCanUpdateIssuable, noteableType },
- features,
- });
-
- expect(findConfidentialNoteCheckbox().exists()).toBe(rendered);
- },
- );
-
- describe.each`
- shouldCheckboxBeChecked
- ${true}
- ${false}
- `('when checkbox value is `$shouldCheckboxBeChecked`', ({ shouldCheckboxBeChecked }) => {
- it(`sets \`confidential\` to \`${shouldCheckboxBeChecked}\``, async () => {
- mountComponent({
- mountFunction: mount,
- initialData: { note: 'confidential note' },
- noteableData: { ...notableDataMockCanUpdateIssuable },
- features,
- });
-
- jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue({});
-
- const checkbox = findConfidentialNoteCheckbox();
+ const checkbox = findConfidentialNoteCheckbox();
- // check checkbox
- checkbox.element.checked = shouldCheckboxBeChecked;
- checkbox.trigger('change');
- await nextTick();
+ // check checkbox
+ checkbox.element.checked = shouldCheckboxBeChecked;
+ checkbox.trigger('change');
+ await nextTick();
- // submit comment
- findCommentButton().trigger('click');
+ // submit comment
+ findCommentButton().trigger('click');
- const [providedData] = wrapper.vm.saveNote.mock.calls[0];
- expect(providedData.data.note.confidential).toBe(shouldCheckboxBeChecked);
- });
+ const [providedData] = wrapper.vm.saveNote.mock.calls[0];
+ expect(providedData.data.note.confidential).toBe(shouldCheckboxBeChecked);
});
+ });
- describe('when user cannot update issuable', () => {
- it('should not render checkbox', () => {
- mountComponent({
- mountFunction: mount,
- noteableData: { ...notableDataMockCannotUpdateIssuable },
- features,
- });
-
- expect(findConfidentialNoteCheckbox().exists()).toBe(false);
+ describe('when user cannot update issuable', () => {
+ it('should not render checkbox', () => {
+ mountComponent({
+ mountFunction: mount,
+ noteableData: { ...notableDataMockCannotUpdateIssuable },
});
+
+ expect(findConfidentialNoteCheckbox().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/notes/components/note_signed_out_widget_spec.js b/spec/frontend/notes/components/note_signed_out_widget_spec.js
index e217a2caa73..84f20e4ad58 100644
--- a/spec/frontend/notes/components/note_signed_out_widget_spec.js
+++ b/spec/frontend/notes/components/note_signed_out_widget_spec.js
@@ -1,41 +1,30 @@
-import Vue from 'vue';
-import noteSignedOut from '~/notes/components/note_signed_out_widget.vue';
+import { shallowMount } from '@vue/test-utils';
+import NoteSignedOutWidget from '~/notes/components/note_signed_out_widget.vue';
import createStore from '~/notes/stores';
import { notesDataMock } from '../mock_data';
-describe('note_signed_out_widget component', () => {
- let store;
- let vm;
+describe('NoteSignedOutWidget component', () => {
+ let wrapper;
beforeEach(() => {
- const Component = Vue.extend(noteSignedOut);
- store = createStore();
+ const store = createStore();
store.dispatch('setNotesData', notesDataMock);
-
- vm = new Component({
- store,
- }).$mount();
+ wrapper = shallowMount(NoteSignedOutWidget, { store });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('should render sign in link provided in the store', () => {
- expect(vm.$el.querySelector(`a[href="${notesDataMock.newSessionPath}"]`).textContent).toEqual(
- 'sign in',
- );
+ it('renders sign in link provided in the store', () => {
+ expect(wrapper.find(`a[href="${notesDataMock.newSessionPath}"]`).text()).toBe('sign in');
});
- it('should render register link provided in the store', () => {
- expect(vm.$el.querySelector(`a[href="${notesDataMock.registerPath}"]`).textContent).toEqual(
- 'register',
- );
+ it('renders register link provided in the store', () => {
+ expect(wrapper.find(`a[href="${notesDataMock.registerPath}"]`).text()).toBe('register');
});
- it('should render information text', () => {
- expect(vm.$el.textContent.replace(/\s+/g, ' ').trim()).toEqual(
- 'Please register or sign in to reply',
- );
+ it('renders information text', () => {
+ expect(wrapper.text()).toContain('Please register or sign in to reply');
});
});
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index ddfa77117ca..603db56a098 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -2,7 +2,7 @@ import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import discussionWithTwoUnresolvedNotes from 'test_fixtures/merge_requests/resolved_diff_discussion.json';
import { trimText } from 'helpers/text_helper';
-import mockDiffFile from 'jest/diffs/mock_data/diff_file';
+import { getDiffFileMock } from 'jest/diffs/mock_data/diff_file';
import DiscussionNotes from '~/notes/components/discussion_notes.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
@@ -45,7 +45,7 @@ describe('noteable_discussion component', () => {
it('should render thread header', async () => {
const discussion = { ...discussionMock };
- discussion.diff_file = mockDiffFile;
+ discussion.diff_file = getDiffFileMock();
discussion.diff_discussion = true;
discussion.expanded = false;
@@ -57,7 +57,7 @@ describe('noteable_discussion component', () => {
it('should hide actions when diff refs do not exists', async () => {
const discussion = { ...discussionMock };
- discussion.diff_file = { ...mockDiffFile, diff_refs: null };
+ discussion.diff_file = { ...getDiffFileMock(), diff_refs: null };
discussion.diff_discussion = true;
discussion.expanded = false;
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index 385edc59eb6..3350609bb90 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -1,20 +1,15 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-
+import { GlAvatar } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
-
import DiffsModule from '~/diffs/store/modules';
-
import NoteActions from '~/notes/components/note_actions.vue';
import NoteBody from '~/notes/components/note_body.vue';
import NoteHeader from '~/notes/components/note_header.vue';
import issueNote from '~/notes/components/noteable_note.vue';
import NotesModule from '~/notes/stores/modules';
import { NOTEABLE_TYPE_MAPPING } from '~/notes/constants';
-
-import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
-
import { noteableDataMock, notesDataMock, note } from '../mock_data';
Vue.use(Vuex);
@@ -205,19 +200,21 @@ describe('issue_note', () => {
await nextTick();
- expect(wrapper.findComponent(UserAvatarLink).props('imgSize')).toBe(24);
+ const avatar = wrapper.findComponent(GlAvatar);
+ const avatarProps = avatar.props();
+ expect(avatarProps.size).toBe(24);
});
});
- it('should render user information', () => {
+ it('should render user avatar', () => {
const { author } = note;
- const avatar = wrapper.findComponent(UserAvatarLink);
+ const avatar = wrapper.findComponent(GlAvatar);
const avatarProps = avatar.props();
- expect(avatarProps.linkHref).toBe(author.path);
- expect(avatarProps.imgSrc).toBe(author.avatar_url);
- expect(avatarProps.imgAlt).toBe(author.name);
- expect(avatarProps.imgSize).toBe(40);
+ expect(avatarProps.src).toBe(author.avatar_url);
+ expect(avatarProps.entityName).toBe(author.username);
+ expect(avatarProps.alt).toBe(author.name);
+ expect(avatarProps.size).toEqual({ default: 24, md: 32 });
});
it('should render note header content', () => {
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index f4eb69e0d49..36a68118fa7 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -44,22 +44,6 @@ describe('note_app', () => {
.wrappers.map((node) => (node.is(CommentForm) ? TYPE_COMMENT_FORM : TYPE_NOTES_LIST));
};
- /**
- * waits for fetchNotes() to complete
- */
- const waitForDiscussionsRequest = () =>
- new Promise((resolve) => {
- const { vm } = wrapper.find(NotesApp);
- const unwatch = vm.$watch('isFetching', (isFetching) => {
- if (isFetching) {
- return;
- }
-
- unwatch();
- resolve();
- });
- });
-
beforeEach(() => {
$('body').attr('data-page', 'projects:merge_requests:show');
@@ -95,7 +79,7 @@ describe('note_app', () => {
axiosMock.onAny().reply(200, []);
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
afterEach(() => {
@@ -129,7 +113,7 @@ describe('note_app', () => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
afterEach(() => {
@@ -172,7 +156,7 @@ describe('note_app', () => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
store.state.commentsDisabled = true;
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
afterEach(() => {
@@ -197,7 +181,7 @@ describe('note_app', () => {
store.state.isTimelineEnabled = true;
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
afterEach(() => {
@@ -210,15 +194,13 @@ describe('note_app', () => {
});
describe('while fetching data', () => {
- beforeEach(() => {
+ beforeEach(async () => {
setHTMLFixture('<div class="js-discussions-count"></div>');
- axiosMock.onAny().reply(200, []);
wrapper = mountComponent();
});
afterEach(() => {
- waitForDiscussionsRequest();
- resetHTMLFixture();
+ return waitForPromises().then(() => resetHTMLFixture());
});
it('renders skeleton notes', () => {
@@ -242,7 +224,7 @@ describe('note_app', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
wrapper = mountComponent();
- return waitForDiscussionsRequest().then(() => {
+ return waitForPromises().then(() => {
wrapper.find('.js-note-edit').trigger('click');
});
});
@@ -264,7 +246,7 @@ describe('note_app', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getDiscussionNoteResponse);
wrapper = mountComponent();
- return waitForDiscussionsRequest().then(() => {
+ return waitForPromises().then(() => {
wrapper.find('.js-note-edit').trigger('click');
});
});
@@ -287,7 +269,7 @@ describe('note_app', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
it('should render markdown docs url', () => {
@@ -309,7 +291,7 @@ describe('note_app', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
it('should render markdown docs url', async () => {
@@ -337,7 +319,7 @@ describe('note_app', () => {
beforeEach(() => {
axiosMock.onAny().reply(200, []);
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
it('dispatches toggleAward after toggleAward event', () => {
@@ -373,7 +355,7 @@ describe('note_app', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
wrapper = mountComponent();
- return waitForDiscussionsRequest();
+ return waitForPromises();
});
it('should listen hashchange event', () => {
@@ -471,7 +453,7 @@ describe('note_app', () => {
wrapper = shallowMount(NotesApp, { propsData, store: createStore() });
await waitForPromises();
- expect(axiosMock.history.get[0].params).toBeUndefined();
+ expect(axiosMock.history.get[0].params).toEqual({ per_page: 20 });
});
});
@@ -496,14 +478,14 @@ describe('note_app', () => {
wrapper = mountWithNotesFilter(undefined);
await waitForPromises();
- expect(axiosMock.history.get[0].params).toBeUndefined();
+ expect(axiosMock.history.get[0].params).toEqual({ per_page: 20 });
});
it('does not include extra query params when filter is already set to default', async () => {
wrapper = mountWithNotesFilter(constants.DISCUSSION_FILTERS_DEFAULT_VALUE);
await waitForPromises();
- expect(axiosMock.history.get[0].params).toBeUndefined();
+ expect(axiosMock.history.get[0].params).toEqual({ per_page: 20 });
});
it('includes extra query params when filter is not set to default', async () => {
@@ -512,6 +494,7 @@ describe('note_app', () => {
expect(axiosMock.history.get[0].params).toEqual({
notes_filter: constants.DISCUSSION_FILTERS_DEFAULT_VALUE,
+ per_page: 20,
persist_filter: false,
});
});
diff --git a/spec/frontend/notes/components/toggle_replies_widget_spec.js b/spec/frontend/notes/components/toggle_replies_widget_spec.js
index 409e1bc3951..8c3696e88b7 100644
--- a/spec/frontend/notes/components/toggle_replies_widget_spec.js
+++ b/spec/frontend/notes/components/toggle_replies_widget_spec.js
@@ -1,13 +1,14 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import toggleRepliesWidget from '~/notes/components/toggle_replies_widget.vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ToggleRepliesWidget from '~/notes/components/toggle_replies_widget.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import { note } from '../mock_data';
-const deepCloneObject = (obj) => JSON.parse(JSON.stringify(obj));
-
describe('toggle replies widget for notes', () => {
- let vm;
- let ToggleRepliesWidget;
+ let wrapper;
+
+ const deepCloneObject = (obj) => JSON.parse(JSON.stringify(obj));
+
const noteFromOtherUser = deepCloneObject(note);
noteFromOtherUser.author.username = 'fatihacet';
@@ -17,62 +18,62 @@ describe('toggle replies widget for notes', () => {
const replies = [note, note, note, noteFromOtherUser, noteFromAnotherUser];
- beforeEach(() => {
- ToggleRepliesWidget = Vue.extend(toggleRepliesWidget);
- });
+ const findCollapseToggleButton = () =>
+ wrapper.findByRole('button', { text: ToggleRepliesWidget.i18n.collapseReplies });
+ const findExpandToggleButton = () =>
+ wrapper.findByRole('button', { text: ToggleRepliesWidget.i18n.expandReplies });
+ const findRepliesButton = () => wrapper.findByRole('button', { text: '5 replies' });
+ const findTimeAgoTooltip = () => wrapper.findComponent(TimeAgoTooltip);
+ const findUserAvatarLink = () => wrapper.findAllComponents(UserAvatarLink);
+ const findUserLink = () => wrapper.findByRole('link', { text: noteFromAnotherUser.author.name });
+
+ const mountComponent = ({ collapsed = false }) =>
+ mountExtended(ToggleRepliesWidget, { propsData: { replies, collapsed } });
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('collapsed state', () => {
beforeEach(() => {
- vm = mountComponent(ToggleRepliesWidget, {
- replies,
- collapsed: true,
- });
+ wrapper = mountComponent({ collapsed: true });
});
- it('should render the collapsed', () => {
- const vmTextContent = vm.$el.textContent.replace(/\s\s+/g, ' ');
-
- expect(vm.$el.classList.contains('collapsed')).toEqual(true);
- expect(vm.$el.querySelectorAll('.user-avatar-link').length).toEqual(3);
- expect(vm.$el.querySelector('time')).not.toBeNull();
- expect(vmTextContent).toContain('5 replies');
- expect(vmTextContent).toContain(`Last reply by ${noteFromAnotherUser.author.name}`);
+ it('renders collapsed state elements', () => {
+ expect(findExpandToggleButton().exists()).toBe(true);
+ expect(findUserAvatarLink()).toHaveLength(3);
+ expect(findRepliesButton().exists()).toBe(true);
+ expect(wrapper.text()).toContain('Last reply by');
+ expect(findUserLink().exists()).toBe(true);
+ expect(findTimeAgoTooltip().exists()).toBe(true);
});
- it('should emit toggle event when the replies text clicked', () => {
- const spy = jest.spyOn(vm, '$emit');
+ it('emits "toggle" event when expand toggle button is clicked', () => {
+ findExpandToggleButton().trigger('click');
+
+ expect(wrapper.emitted('toggle')).toEqual([[]]);
+ });
- vm.$el.querySelector('.js-replies-text').click();
+ it('emits "toggle" event when replies button is clicked', () => {
+ findRepliesButton().trigger('click');
- expect(spy).toHaveBeenCalledWith('toggle');
+ expect(wrapper.emitted('toggle')).toEqual([[]]);
});
});
describe('expanded state', () => {
beforeEach(() => {
- vm = mountComponent(ToggleRepliesWidget, {
- replies,
- collapsed: false,
- });
+ wrapper = mountComponent({ collapsed: false });
});
- it('should render expanded state', () => {
- const vmTextContent = vm.$el.textContent.replace(/\s\s+/g, ' ');
-
- expect(vm.$el.querySelector('.collapse-replies-btn')).not.toBeNull();
- expect(vmTextContent).toContain('Collapse replies');
+ it('renders expanded state elements', () => {
+ expect(findCollapseToggleButton().exists()).toBe(true);
});
- it('should emit toggle event when the collapse replies text called', () => {
- const spy = jest.spyOn(vm, '$emit');
-
- vm.$el.querySelector('.js-collapse-replies').click();
+ it('emits "toggle" event when collapse toggle button is clicked', () => {
+ findCollapseToggleButton().trigger('click');
- expect(spy).toHaveBeenCalledWith('toggle');
+ expect(wrapper.emitted('toggle')).toEqual([[]]);
});
});
});
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 38f29ac2559..02b27eca196 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -15,6 +15,7 @@ import * as utils from '~/notes/stores/utils';
import updateIssueLockMutation from '~/sidebar/components/lock/mutations/update_issue_lock.mutation.graphql';
import updateMergeRequestLockMutation from '~/sidebar/components/lock/mutations/update_merge_request_lock.mutation.graphql';
import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
+import waitForPromises from 'helpers/wait_for_promises';
import { resetStore } from '../helpers';
import {
discussionMock,
@@ -254,9 +255,7 @@ describe('Actions Notes Store', () => {
jest.advanceTimersByTime(time);
}
- return new Promise((resolve) => {
- requestAnimationFrame(resolve);
- });
+ return waitForPromises();
};
const advanceXMoreIntervals = async (number) => {
const timeoutLength = pollInterval * number;
@@ -365,7 +364,6 @@ describe('Actions Notes Store', () => {
});
it('hides the error display if it exists on success', async () => {
- jest.mock();
failureMock();
await startPolling();
@@ -668,7 +666,6 @@ describe('Actions Notes Store', () => {
describe('updateOrCreateNotes', () => {
it('Prevents `fetchDiscussions` being called multiple times within time limit', () => {
- jest.useFakeTimers();
const note = { id: 1234, type: notesConstants.DIFF_NOTE };
const getters = { notesById: {} };
state = { discussions: [note], notesData: { discussionsPath: '' } };
@@ -1351,7 +1348,7 @@ describe('Actions Notes Store', () => {
return testAction(
actions.fetchDiscussions,
{},
- null,
+ { noteableType: notesConstants.MERGE_REQUEST_NOTEABLE_TYPE },
[
{ type: mutationTypes.ADD_OR_UPDATE_DISCUSSIONS, payload: { discussion } },
{ type: mutationTypes.SET_FETCHING_DISCUSSIONS, payload: false },
@@ -1360,13 +1357,11 @@ describe('Actions Notes Store', () => {
);
});
- it('dispatches `fetchDiscussionsBatch` action if `paginatedIssueDiscussions` feature flag is enabled', () => {
- window.gon = { features: { paginatedIssueDiscussions: true } };
-
+ it('dispatches `fetchDiscussionsBatch` action if noteable is an Issue', () => {
return testAction(
actions.fetchDiscussions,
{ path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
- null,
+ { noteableType: notesConstants.ISSUE_NOTEABLE_TYPE },
[],
[
{
@@ -1389,7 +1384,7 @@ describe('Actions Notes Store', () => {
return testAction(
actions.fetchDiscussions,
{ path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
- null,
+ { noteableType: notesConstants.MERGE_REQUEST_NOTEABLE_TYPE },
[],
[
{
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
index ca666e38291..9982286c625 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
@@ -18,7 +18,6 @@ import {
CLEANUP_SCHEDULED_TOOLTIP,
CLEANUP_ONGOING_TOOLTIP,
CLEANUP_UNFINISHED_TOOLTIP,
- ROOT_IMAGE_TEXT,
ROOT_IMAGE_TOOLTIP,
} from '~/packages_and_registries/container_registry/explorer/constants';
import getContainerRepositoryMetadata from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_metadata.query.graphql';
@@ -35,6 +34,7 @@ describe('Details Header', () => {
canDelete: true,
project: {
visibility: 'public',
+ path: 'path',
containerExpirationPolicy: {
enabled: false,
},
@@ -98,8 +98,8 @@ describe('Details Header', () => {
return waitForPromises();
});
- it('root image ', () => {
- expect(findTitle().text()).toBe(ROOT_IMAGE_TEXT);
+ it('root image shows project path name', () => {
+ expect(findTitle().text()).toBe('path');
});
it('has an icon', () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
index 0581a40b6a2..a5b2b1d7cf8 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
@@ -109,5 +109,17 @@ describe('cleanup_status', () => {
expect(findPopover().findComponent(GlLink).exists()).toBe(true);
expect(findPopover().findComponent(GlLink).attributes('href')).toBe(cleanupPolicyHelpPage);
});
+
+ it('id matches popover target attribute', () => {
+ mountComponent({
+ status: UNFINISHED_STATUS,
+ next_run_at: '2063-04-08T01:44:03Z',
+ });
+
+ const id = findExtraInfoIcon().attributes('id');
+
+ expect(id).toMatch(/status-info-[0-9]+/);
+ expect(findPopover().props('target')).toEqual(id);
+ });
});
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
index 979e1500d7d..d12933526bc 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
@@ -1,6 +1,7 @@
-import { GlIcon, GlSprintf, GlSkeletonLoader } from '@gitlab/ui';
+import { GlIcon, GlSprintf, GlSkeletonLoader, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { mockTracking } from 'helpers/tracking_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import DeleteButton from '~/packages_and_registries/container_registry/explorer/components/delete_button.vue';
import CleanupStatus from '~/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status.vue';
@@ -12,7 +13,6 @@ import {
IMAGE_DELETE_SCHEDULED_STATUS,
IMAGE_MIGRATING_STATE,
SCHEDULED_STATUS,
- ROOT_IMAGE_TEXT,
COPY_IMAGE_PATH_TITLE,
} from '~/packages_and_registries/container_registry/explorer/constants';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
@@ -31,13 +31,15 @@ describe('Image List Row', () => {
const findCleanupStatus = () => wrapper.findComponent(CleanupStatus);
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findListItemComponent = () => wrapper.findComponent(ListItem);
+ const findShowFullPathButton = () => wrapper.findComponent(GlButton);
- const mountComponent = (props) => {
+ const mountComponent = (props, features = {}) => {
wrapper = shallowMount(Component, {
stubs: {
RouterLink,
GlSprintf,
ListItem,
+ GlButton,
},
propsData: {
item,
@@ -45,6 +47,9 @@ describe('Image List Row', () => {
},
provide: {
config: {},
+ glFeatures: {
+ ...features,
+ },
},
directives: {
GlTooltip: createMockDirective(),
@@ -96,10 +101,10 @@ describe('Image List Row', () => {
});
});
- it(`when the image has no name appends ${ROOT_IMAGE_TEXT} to the path`, () => {
+ it('when the image has no name lists the path', () => {
mountComponent({ item: { ...item, name: '' } });
- expect(findDetailsLink().text()).toBe(`${item.path}/ ${ROOT_IMAGE_TEXT}`);
+ expect(findDetailsLink().text()).toBe(item.path);
});
it('contains a clipboard button', () => {
@@ -144,6 +149,35 @@ describe('Image List Row', () => {
expect(findClipboardButton().attributes('disabled')).toBe('true');
});
});
+
+ describe('when containerRegistryShowShortenedPath feature enabled', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ mountComponent({}, { containerRegistryShowShortenedPath: true });
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ it('renders shortened name of image', () => {
+ expect(findShowFullPathButton().exists()).toBe(true);
+ expect(findDetailsLink().text()).toBe('gitlab-test/rails-12009');
+ });
+
+ it('clicking on shortened name of image hides the button & shows full path', async () => {
+ const btn = findShowFullPathButton();
+ const mockFocusFn = jest.fn();
+ wrapper.vm.$refs.imageName.$el.focus = mockFocusFn;
+
+ await btn.trigger('click');
+
+ expect(findShowFullPathButton().exists()).toBe(false);
+ expect(findDetailsLink().text()).toBe(item.path);
+ expect(mockFocusFn).toHaveBeenCalled();
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_show_full_path', {
+ label: 'registry_image_list',
+ });
+ });
+ });
});
describe('delete button', () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
index 7e6f88fe5bc..f9739509ef9 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
@@ -11,6 +11,10 @@ export const imagesListResponse = [
createdAt: '2020-11-03T13:29:21Z',
expirationPolicyStartedAt: null,
expirationPolicyCleanupStatus: 'UNSCHEDULED',
+ project: {
+ id: 'gid://gitlab/Project/22',
+ path: 'gitlab-test',
+ },
},
{
__typename: 'ContainerRepository',
@@ -24,6 +28,10 @@ export const imagesListResponse = [
createdAt: '2020-09-21T06:57:43Z',
expirationPolicyStartedAt: null,
expirationPolicyCleanupStatus: 'UNSCHEDULED',
+ project: {
+ id: 'gid://gitlab/Project/22',
+ path: 'gitlab-test',
+ },
},
];
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
index 59ca47bee50..1d161888a4d 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
@@ -20,7 +20,6 @@ import {
ALERT_DANGER_IMAGE,
ALERT_DANGER_IMPORTING,
MISSING_OR_DELETED_IMAGE_BREADCRUMB,
- ROOT_IMAGE_TEXT,
MISSING_OR_DELETED_IMAGE_TITLE,
MISSING_OR_DELETED_IMAGE_MESSAGE,
} from '~/packages_and_registries/container_registry/explorer/constants';
@@ -482,7 +481,7 @@ describe('Details Page', () => {
expect(breadCrumbState.updateName).toHaveBeenCalledWith(MISSING_OR_DELETED_IMAGE_BREADCRUMB);
});
- it(`when the image has no name set the breadcrumb to ${ROOT_IMAGE_TEXT}`, async () => {
+ it(`when the image has no name set the breadcrumb to project name`, async () => {
mountComponent({
resolver: jest
.fn()
@@ -491,7 +490,7 @@ describe('Details Page', () => {
await waitForApolloRequestRender();
- expect(breadCrumbState.updateName).toHaveBeenCalledWith(ROOT_IMAGE_TEXT);
+ expect(breadCrumbState.updateName).toHaveBeenCalledWith('gitlab-test');
});
});
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
index fe4a2c06f1c..f2901148e17 100644
--- a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
+++ b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
@@ -38,6 +38,8 @@ const dummyGon = {
let originalGon;
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${dummyGrouptId}/dependency_proxy/cache`;
+Vue.use(VueApollo);
+
describe('DependencyProxyApp', () => {
let wrapper;
let apolloProvider;
@@ -51,8 +53,6 @@ describe('DependencyProxyApp', () => {
};
function createComponent({ provide = provideDefaults } = {}) {
- Vue.use(VueApollo);
-
const requestHandlers = [[getDependencyProxyDetailsQuery, resolver]];
apolloProvider = createMockApollo(requestHandlers);
@@ -103,19 +103,21 @@ describe('DependencyProxyApp', () => {
describe('when the dependency proxy is available', () => {
describe('when is loading', () => {
- beforeEach(() => {
+ it('renders the skeleton loader', () => {
createComponent();
- });
- it('renders the skeleton loader', () => {
expect(findSkeletonLoader().exists()).toBe(true);
});
it('does not render a form group with label', () => {
+ createComponent();
+
expect(findFormGroup().exists()).toBe(false);
});
it('does not show the main section', () => {
+ createComponent();
+
expect(findMainArea().exists()).toBe(false);
});
});
@@ -215,23 +217,26 @@ describe('DependencyProxyApp', () => {
});
describe('triggering page event on list', () => {
- beforeEach(async () => {
+ it('re-renders the skeleton loader', async () => {
findManifestList().vm.$emit('next-page');
-
await nextTick();
- });
- it('re-renders the skeleton loader', () => {
expect(findSkeletonLoader().exists()).toBe(true);
});
- it('renders form group with label', () => {
+ it('renders form group with label', async () => {
+ findManifestList().vm.$emit('next-page');
+ await nextTick();
+
expect(findFormGroup().attributes('label')).toEqual(
expect.stringMatching(DependencyProxyApp.i18n.proxyImagePrefix),
);
});
- it('does not show the main section', () => {
+ it('does not show the main section', async () => {
+ findManifestList().vm.$emit('next-page');
+ await nextTick();
+
expect(findMainArea().exists()).toBe(false);
});
});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
index e60989b0949..9d4c7f4737b 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
@@ -6,13 +6,15 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import component from '~/packages_and_registries/settings/group/components/dependency_proxy_settings.vue';
-import { DEPENDENCY_PROXY_HEADER } from '~/packages_and_registries/settings/group/constants';
+import {
+ DEPENDENCY_PROXY_HEADER,
+ DEPENDENCY_PROXY_DESCRIPTION,
+} from '~/packages_and_registries/settings/group/constants';
import updateDependencyProxySettings from '~/packages_and_registries/settings/group/graphql/mutations/update_dependency_proxy_settings.mutation.graphql';
import updateDependencyProxyImageTtlGroupPolicy from '~/packages_and_registries/settings/group/graphql/mutations/update_dependency_proxy_image_ttl_group_policy.mutation.graphql';
import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql';
import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
-import SettingsTitles from '~/packages_and_registries/settings/group/components/settings_titles.vue';
import {
updateGroupDependencyProxySettingsOptimisticResponse,
updateDependencyProxyImageTtlGroupPolicyOptimisticResponse,
@@ -36,7 +38,6 @@ describe('DependencyProxySettings', () => {
let updateTtlPoliciesMutationResolver;
const defaultProvide = {
- defaultExpanded: false,
groupPath: 'foo_group_path',
groupDependencyProxyPath: 'group_dependency_proxy_path',
};
@@ -86,7 +87,6 @@ describe('DependencyProxySettings', () => {
});
const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
- const findSettingsTitles = () => wrapper.findComponent(SettingsTitles);
const findEnableProxyToggle = () => wrapper.findByTestId('dependency-proxy-setting-toggle');
const findEnableTtlPoliciesToggle = () =>
wrapper.findByTestId('dependency-proxy-ttl-policies-toggle');
@@ -108,16 +108,11 @@ describe('DependencyProxySettings', () => {
expect(findSettingsBlock().exists()).toBe(true);
});
- it('passes the correct props to settings block', () => {
- mountComponent();
-
- expect(findSettingsBlock().props('defaultExpanded')).toBe(false);
- });
-
- it('has the correct header text', () => {
+ it('has the correct header text and description', () => {
mountComponent();
expect(wrapper.text()).toContain(DEPENDENCY_PROXY_HEADER);
+ expect(wrapper.text()).toContain(DEPENDENCY_PROXY_DESCRIPTION);
});
describe('enable toggle', () => {
@@ -158,14 +153,6 @@ describe('DependencyProxySettings', () => {
});
describe('storage settings', () => {
- it('the component has the settings title', () => {
- mountComponent();
-
- expect(findSettingsTitles().props()).toMatchObject({
- title: component.i18n.storageSettingsTitle,
- });
- });
-
describe('enable proxy ttl policies', () => {
it('exists', () => {
mountComponent();
diff --git a/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
index 79c2f811c08..3eecdeb5b1f 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
@@ -4,8 +4,6 @@ import component from '~/packages_and_registries/settings/group/components/dupli
import {
DUPLICATES_TOGGLE_LABEL,
- DUPLICATES_ALLOWED_ENABLED,
- DUPLICATES_ALLOWED_DISABLED,
DUPLICATES_SETTING_EXCEPTION_TITLE,
DUPLICATES_SETTINGS_EXCEPTION_LEGEND,
} from '~/packages_and_registries/settings/group/constants';
@@ -36,7 +34,6 @@ describe('Duplicates Settings', () => {
});
const findToggle = () => wrapper.findComponent(GlToggle);
- const findToggleLabel = () => wrapper.find('[data-testid="toggle-label"');
const findInputGroup = () => wrapper.findComponent(GlFormGroup);
const findInput = () => wrapper.findComponent(GlFormInput);
@@ -47,7 +44,7 @@ describe('Duplicates Settings', () => {
expect(findToggle().exists()).toBe(true);
expect(findToggle().props()).toMatchObject({
label: DUPLICATES_TOGGLE_LABEL,
- value: defaultProps.duplicatesAllowed,
+ value: !defaultProps.duplicatesAllowed,
});
});
@@ -57,18 +54,11 @@ describe('Duplicates Settings', () => {
findToggle().vm.$emit('change', false);
expect(wrapper.emitted('update')).toStrictEqual([
- [{ [defaultProps.modelNames.allowed]: false }],
+ [{ [defaultProps.modelNames.allowed]: true }],
]);
});
describe('when the duplicates are disabled', () => {
- it('the toggle has the disabled message', () => {
- mountComponent();
-
- expect(findToggleLabel().exists()).toBe(true);
- expect(findToggleLabel().text()).toMatchInterpolatedText(DUPLICATES_ALLOWED_DISABLED);
- });
-
it('shows a form group with an input field', () => {
mountComponent();
@@ -130,13 +120,6 @@ describe('Duplicates Settings', () => {
});
describe('when the duplicates are enabled', () => {
- it('has the correct toggle label', () => {
- mountComponent({ ...defaultProps, duplicatesAllowed: true });
-
- expect(findToggleLabel().exists()).toBe(true);
- expect(findToggleLabel().text()).toMatchInterpolatedText(DUPLICATES_ALLOWED_ENABLED);
- });
-
it('hides the form input group', () => {
mountComponent({ ...defaultProps, duplicatesAllowed: true });
diff --git a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
index 635195ff0a4..31fc3ad419c 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
@@ -26,7 +26,6 @@ describe('Group Settings App', () => {
let show;
const defaultProvide = {
- defaultExpanded: false,
groupPath: 'foo_group_path',
};
diff --git a/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js
index d92d42e7834..274930ce668 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js
@@ -1,4 +1,3 @@
-import { GlSprintf, GlLink } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -11,7 +10,6 @@ import MavenSettings from '~/packages_and_registries/settings/group/components/m
import {
PACKAGE_SETTINGS_HEADER,
PACKAGE_SETTINGS_DESCRIPTION,
- PACKAGES_DOCS_PATH,
} from '~/packages_and_registries/settings/group/constants';
import updateNamespacePackageSettings from '~/packages_and_registries/settings/group/graphql/mutations/update_group_packages_settings.mutation.graphql';
@@ -33,7 +31,6 @@ describe('Packages Settings', () => {
let apolloProvider;
const defaultProvide = {
- defaultExpanded: false,
groupPath: 'foo_group_path',
};
@@ -53,7 +50,6 @@ describe('Packages Settings', () => {
packageSettings: packageSettings(),
},
stubs: {
- GlSprintf,
SettingsBlock,
MavenSettings,
GenericSettings,
@@ -67,7 +63,6 @@ describe('Packages Settings', () => {
const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
const findDescription = () => wrapper.findByTestId('description');
- const findLink = () => wrapper.findComponent(GlLink);
const findMavenSettings = () => wrapper.findComponent(MavenSettings);
const findMavenDuplicatedSettings = () => findMavenSettings().findComponent(DuplicatesSettings);
const findGenericSettings = () => wrapper.findComponent(GenericSettings);
@@ -97,12 +92,6 @@ describe('Packages Settings', () => {
expect(findSettingsBlock().exists()).toBe(true);
});
- it('passes the correct props to settings block', () => {
- mountComponent();
-
- expect(findSettingsBlock().props('defaultExpanded')).toBe(false);
- });
-
it('has the correct header text', () => {
mountComponent();
@@ -115,16 +104,6 @@ describe('Packages Settings', () => {
expect(findDescription().text()).toMatchInterpolatedText(PACKAGE_SETTINGS_DESCRIPTION);
});
- it('has the correct link', () => {
- mountComponent();
-
- expect(findLink().attributes()).toMatchObject({
- href: PACKAGES_DOCS_PATH,
- target: '_blank',
- });
- expect(findLink().text()).toBe('Learn more.');
- });
-
describe('maven settings', () => {
it('exists', () => {
mountComponent();
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
index faa313118f3..108d9478788 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
@@ -4,6 +4,7 @@ exports[`Container Expiration Policy Settings Form Cadence matches snapshot 1`]
<expiration-dropdown-stub
class="gl-mr-7 gl-mb-0!"
data-testid="cadence-dropdown"
+ description=""
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Run cleanup:"
name="cadence"
@@ -22,6 +23,7 @@ exports[`Container Expiration Policy Settings Form Enable matches snapshot 1`] =
exports[`Container Expiration Policy Settings Form Keep N matches snapshot 1`] = `
<expiration-dropdown-stub
data-testid="keep-n-dropdown"
+ description=""
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Keep the most recent:"
name="keep-n"
@@ -44,6 +46,7 @@ exports[`Container Expiration Policy Settings Form Keep Regex matches snapshot 1
exports[`Container Expiration Policy Settings Form OlderThan matches snapshot 1`] = `
<expiration-dropdown-stub
data-testid="older-than-dropdown"
+ description=""
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Remove tags older than:"
name="older-than"
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
index aa3506771fa..d83c717da6a 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
@@ -43,11 +43,6 @@ describe('Container expiration policy project settings', () => {
GlSprintf,
SettingsBlock,
},
- mocks: {
- $toast: {
- show: jest.fn(),
- },
- },
provide,
...config,
});
@@ -98,7 +93,7 @@ describe('Container expiration policy project settings', () => {
await waitForPromises();
expect(findFormComponent().exists()).toBe(true);
- expect(findSettingsBlock().props('collapsible')).toBe(false);
+ expect(findSettingsBlock().exists()).toBe(true);
});
describe('the form is disabled', () => {
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js
index 5c9ade7f785..8b99ac6b06c 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js
@@ -16,6 +16,7 @@ describe('ExpirationDropdown', () => {
const findFormSelect = () => wrapper.find(GlFormSelect);
const findFormGroup = () => wrapper.find(GlFormGroup);
+ const findDescription = () => wrapper.find('[data-testid="description"]');
const findOptions = () => wrapper.findAll('[data-testid="option"]');
const mountComponent = (props) => {
@@ -47,6 +48,14 @@ describe('ExpirationDropdown', () => {
expect(findOptions()).toHaveLength(defaultProps.formOptions.length);
});
+
+ it('renders the description if passed', () => {
+ mountComponent({
+ description: 'test description',
+ });
+
+ expect(findDescription().html()).toContain('test description');
+ });
});
describe('model', () => {
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js
new file mode 100644
index 00000000000..86f45d78bae
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js
@@ -0,0 +1,267 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { GlLoadingIcon } from 'jest/packages_and_registries/shared/stubs';
+import component from '~/packages_and_registries/settings/project/components/packages_cleanup_policy_form.vue';
+import {
+ UPDATE_SETTINGS_ERROR_MESSAGE,
+ UPDATE_SETTINGS_SUCCESS_MESSAGE,
+ KEEP_N_DUPLICATED_PACKAGE_FILES_LABEL,
+ KEEP_N_DUPLICATED_PACKAGE_FILES_DESCRIPTION,
+} from '~/packages_and_registries/settings/project/constants';
+import updatePackagesCleanupPolicyMutation from '~/packages_and_registries/settings/project/graphql/mutations/update_packages_cleanup_policy.mutation.graphql';
+import Tracking from '~/tracking';
+import { packagesCleanupPolicyPayload, packagesCleanupPolicyMutationPayload } from '../mock_data';
+
+Vue.use(VueApollo);
+
+describe('Packages Cleanup Policy Settings Form', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ };
+
+ const {
+ data: {
+ project: { packagesCleanupPolicy },
+ },
+ } = packagesCleanupPolicyPayload();
+
+ const defaultProps = {
+ value: { ...packagesCleanupPolicy },
+ };
+
+ const trackingPayload = {
+ label: 'packages_cleanup_policies',
+ };
+
+ const findForm = () => wrapper.find({ ref: 'form-element' });
+ const findSaveButton = () => wrapper.findByTestId('save-button');
+ const findKeepNDuplicatedPackageFilesDropdown = () =>
+ wrapper.findByTestId('keep-n-duplicated-package-files-dropdown');
+
+ const submitForm = async () => {
+ findForm().trigger('submit');
+ return waitForPromises();
+ };
+
+ const mountComponent = ({
+ props = defaultProps,
+ data,
+ config,
+ provide = defaultProvidedValues,
+ } = {}) => {
+ wrapper = shallowMountExtended(component, {
+ stubs: {
+ GlLoadingIcon,
+ },
+ propsData: { ...props },
+ provide,
+ data() {
+ return {
+ ...data,
+ };
+ },
+ mocks: {
+ $toast: {
+ show: jest.fn(),
+ },
+ },
+ ...config,
+ });
+ };
+
+ const mountComponentWithApollo = ({
+ provide = defaultProvidedValues,
+ mutationResolver,
+ queryPayload = packagesCleanupPolicyPayload(),
+ } = {}) => {
+ const requestHandlers = [[updatePackagesCleanupPolicyMutation, mutationResolver]];
+
+ fakeApollo = createMockApollo(requestHandlers);
+
+ const {
+ data: {
+ project: { packagesCleanupPolicy: value },
+ },
+ } = queryPayload;
+
+ mountComponent({
+ provide,
+ props: {
+ ...defaultProps,
+ value,
+ },
+ config: {
+ apolloProvider: fakeApollo,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ describe('keepNDuplicatedPackageFiles', () => {
+ it('renders dropdown', () => {
+ mountComponent();
+
+ const element = findKeepNDuplicatedPackageFilesDropdown();
+
+ expect(element.exists()).toBe(true);
+ expect(element.props('label')).toMatchInterpolatedText(KEEP_N_DUPLICATED_PACKAGE_FILES_LABEL);
+ expect(element.props('description')).toEqual(KEEP_N_DUPLICATED_PACKAGE_FILES_DESCRIPTION);
+ });
+
+ it('input event triggers a model update', () => {
+ mountComponent();
+
+ findKeepNDuplicatedPackageFilesDropdown().vm.$emit('input', 'foo');
+ expect(wrapper.emitted('input')[0][0]).toMatchObject({
+ keepNDuplicatedPackageFiles: 'foo',
+ });
+ });
+
+ it('shows the default option when none are selected', () => {
+ mountComponent({ props: { value: {} } });
+ expect(findKeepNDuplicatedPackageFilesDropdown().props('value')).toEqual('ALL_PACKAGE_FILES');
+ });
+
+ it.each`
+ isLoading | mutationLoading
+ ${true} | ${false}
+ ${true} | ${true}
+ ${false} | ${true}
+ `(
+ 'is disabled when is loading is $isLoading and mutationLoading is $mutationLoading',
+ ({ isLoading, mutationLoading }) => {
+ mountComponent({
+ props: { isLoading, value: {} },
+ data: { mutationLoading },
+ });
+ expect(findKeepNDuplicatedPackageFilesDropdown().props('disabled')).toEqual(true);
+ },
+ );
+
+ it('has the correct formOptions', () => {
+ mountComponent();
+ expect(findKeepNDuplicatedPackageFilesDropdown().props('formOptions')).toEqual(
+ wrapper.vm.$options.formOptions.keepNDuplicatedPackageFiles,
+ );
+ });
+ });
+
+ describe('form', () => {
+ describe('actions', () => {
+ describe('submit button', () => {
+ it('has type submit', () => {
+ mountComponent();
+
+ expect(findSaveButton().attributes('type')).toBe('submit');
+ });
+
+ it.each`
+ isLoading | mutationLoading | disabled
+ ${true} | ${true} | ${true}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${false} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and mutationLoading is $mutationLoading is disabled',
+ ({ isLoading, mutationLoading, disabled }) => {
+ mountComponent({
+ props: { ...defaultProps, isLoading },
+ data: { mutationLoading },
+ });
+
+ expect(findSaveButton().props('disabled')).toBe(disabled);
+ expect(findKeepNDuplicatedPackageFilesDropdown().props('disabled')).toBe(disabled);
+ },
+ );
+
+ it.each`
+ isLoading | mutationLoading | showLoading
+ ${true} | ${true} | ${true}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${false} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and mutationLoading is $mutationLoading is $showLoading that the loading icon is shown',
+ ({ isLoading, mutationLoading, showLoading }) => {
+ mountComponent({
+ props: { ...defaultProps, isLoading },
+ data: { mutationLoading },
+ });
+
+ expect(findSaveButton().props('loading')).toBe(showLoading);
+ },
+ );
+ });
+ });
+
+ describe('form submit event', () => {
+ it('dispatches the correct apollo mutation', () => {
+ const mutationResolver = jest
+ .fn()
+ .mockResolvedValue(packagesCleanupPolicyMutationPayload());
+ mountComponentWithApollo({
+ mutationResolver,
+ });
+
+ findForm().trigger('submit');
+
+ expect(mutationResolver).toHaveBeenCalledWith({
+ input: {
+ keepNDuplicatedPackageFiles: 'ALL_PACKAGE_FILES',
+ projectPath: 'path',
+ },
+ });
+ });
+
+ it('tracks the submit event', () => {
+ mountComponentWithApollo({
+ mutationResolver: jest.fn().mockResolvedValue(packagesCleanupPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+
+ expect(Tracking.event).toHaveBeenCalledWith(
+ undefined,
+ 'submit_packages_cleanup_form',
+ trackingPayload,
+ );
+ });
+
+ it('show a success toast when submit succeed', async () => {
+ mountComponentWithApollo({
+ mutationResolver: jest.fn().mockResolvedValue(packagesCleanupPolicyMutationPayload()),
+ });
+
+ await submitForm();
+
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE);
+ });
+
+ describe('when submit fails', () => {
+ it('shows an error', async () => {
+ mountComponentWithApollo({
+ mutationResolver: jest.fn().mockRejectedValue(packagesCleanupPolicyMutationPayload()),
+ });
+
+ await submitForm();
+
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_spec.js
new file mode 100644
index 00000000000..6dfeeca6862
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_spec.js
@@ -0,0 +1,81 @@
+import { GlAlert, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import component from '~/packages_and_registries/settings/project/components/packages_cleanup_policy.vue';
+import PackagesCleanupPolicyForm from '~/packages_and_registries/settings/project/components/packages_cleanup_policy_form.vue';
+import { FETCH_SETTINGS_ERROR_MESSAGE } from '~/packages_and_registries/settings/project/constants';
+import packagesCleanupPolicyQuery from '~/packages_and_registries/settings/project/graphql/queries/get_packages_cleanup_policy.query.graphql';
+import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+
+import { packagesCleanupPolicyPayload, packagesCleanupPolicyData } from '../mock_data';
+
+Vue.use(VueApollo);
+
+describe('Packages cleanup policy project settings', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findFormComponent = () => wrapper.findComponent(PackagesCleanupPolicyForm);
+ const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
+
+ const mountComponent = (provide = defaultProvidedValues, config) => {
+ wrapper = shallowMount(component, {
+ stubs: {
+ GlSprintf,
+ SettingsBlock,
+ },
+ provide,
+ ...config,
+ });
+ };
+
+ const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
+ const requestHandlers = [[packagesCleanupPolicyQuery, resolver]];
+
+ fakeApollo = createMockApollo(requestHandlers);
+ mountComponent(provide, {
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ it('renders the setting form', async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(packagesCleanupPolicyPayload()),
+ });
+ await waitForPromises();
+
+ expect(findFormComponent().exists()).toBe(true);
+ expect(findFormComponent().props('value')).toEqual(packagesCleanupPolicyData);
+ expect(findSettingsBlock().exists()).toBe(true);
+ });
+
+ describe('fetchSettingsError', () => {
+ beforeEach(async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockRejectedValue(new Error('GraphQL error')),
+ });
+ await waitForPromises();
+ });
+
+ it('the form is hidden', () => {
+ expect(findFormComponent().exists()).toBe(false);
+ });
+
+ it('shows an alert', () => {
+ expect(findAlert().html()).toContain(FETCH_SETTINGS_ERROR_MESSAGE);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
index 337991dfae0..f576bc79eae 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
@@ -1,19 +1,41 @@
import { shallowMount } from '@vue/test-utils';
import component from '~/packages_and_registries/settings/project/components/registry_settings_app.vue';
import ContainerExpirationPolicy from '~/packages_and_registries/settings/project/components/container_expiration_policy.vue';
+import PackagesCleanupPolicy from '~/packages_and_registries/settings/project/components/packages_cleanup_policy.vue';
describe('Registry Settings app', () => {
let wrapper;
+
const findContainerExpirationPolicy = () => wrapper.find(ContainerExpirationPolicy);
+ const findPackagesCleanupPolicy = () => wrapper.find(PackagesCleanupPolicy);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- it('renders container expiration policy component', () => {
- wrapper = shallowMount(component);
+ const mountComponent = (provide) => {
+ wrapper = shallowMount(component, {
+ provide,
+ });
+ };
- expect(findContainerExpirationPolicy().exists()).toBe(true);
- });
+ it.each`
+ showContainerRegistrySettings | showPackageRegistrySettings
+ ${true} | ${false}
+ ${true} | ${true}
+ ${false} | ${true}
+ ${false} | ${false}
+ `(
+ 'container expiration policy $showContainerRegistrySettings and package cleanup policy is $showPackageRegistrySettings',
+ ({ showContainerRegistrySettings, showPackageRegistrySettings }) => {
+ mountComponent({
+ showContainerRegistrySettings,
+ showPackageRegistrySettings,
+ });
+
+ expect(findContainerExpirationPolicy().exists()).toBe(showContainerRegistrySettings);
+ expect(findPackagesCleanupPolicy().exists()).toBe(showPackageRegistrySettings);
+ },
+ );
});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
index 33406c98f4b..d4b6c66ddeb 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
@@ -40,3 +40,33 @@ export const expirationPolicyMutationPayload = ({ override, errors = [] } = {})
},
},
});
+
+export const packagesCleanupPolicyData = {
+ keepNDuplicatedPackageFiles: 'ALL_PACKAGE_FILES',
+ nextRunAt: '2020-11-19T07:37:03.941Z',
+};
+
+export const packagesCleanupPolicyPayload = (override) => ({
+ data: {
+ project: {
+ id: '1',
+ packagesCleanupPolicy: {
+ __typename: 'PackagesCleanupPolicy',
+ ...packagesCleanupPolicyData,
+ ...override,
+ },
+ },
+ },
+});
+
+export const packagesCleanupPolicyMutationPayload = ({ override, errors = [] } = {}) => ({
+ data: {
+ updatePackagesCleanupPolicy: {
+ packagesCleanupPolicy: {
+ ...packagesCleanupPolicyData,
+ ...override,
+ },
+ errors,
+ },
+ },
+});
diff --git a/spec/frontend/packages_and_registries/shared/components/settings_block_spec.js b/spec/frontend/packages_and_registries/shared/components/settings_block_spec.js
new file mode 100644
index 00000000000..a4c1b989dac
--- /dev/null
+++ b/spec/frontend/packages_and_registries/shared/components/settings_block_spec.js
@@ -0,0 +1,43 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import SettingsBlock from '~/packages_and_registries/shared/components/settings_block.vue';
+
+describe('SettingsBlock', () => {
+ let wrapper;
+
+ const mountComponent = (propsData) => {
+ wrapper = shallowMountExtended(SettingsBlock, {
+ propsData,
+ slots: {
+ title: '<div data-testid="title-slot"></div>',
+ description: '<div data-testid="description-slot"></div>',
+ default: '<div data-testid="default-slot"></div>',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDefaultSlot = () => wrapper.findByTestId('default-slot');
+ const findTitleSlot = () => wrapper.findByTestId('title-slot');
+ const findDescriptionSlot = () => wrapper.findByTestId('description-slot');
+
+ it('has a default slot', () => {
+ mountComponent();
+
+ expect(findDefaultSlot().exists()).toBe(true);
+ });
+
+ it('has a title slot', () => {
+ mountComponent();
+
+ expect(findTitleSlot().exists()).toBe(true);
+ });
+
+ it('has a description slot', () => {
+ mountComponent();
+
+ expect(findDescriptionSlot().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/pages/dashboard/todos/index/todos_spec.js b/spec/frontend/pages/dashboard/todos/index/todos_spec.js
index 3a9b59f291c..03aed7454e3 100644
--- a/spec/frontend/pages/dashboard/todos/index/todos_spec.js
+++ b/spec/frontend/pages/dashboard/todos/index/todos_spec.js
@@ -1,5 +1,4 @@
import MockAdapter from 'axios-mock-adapter';
-import $ from 'jquery';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import '~/lib/utils/common_utils';
@@ -54,22 +53,28 @@ describe('Todos', () => {
let metakeyEvent;
beforeEach(() => {
- metakeyEvent = $.Event('click', { keyCode: 91, ctrlKey: true });
+ metakeyEvent = new MouseEvent('click', { ctrlKey: true });
windowOpenSpy = jest.spyOn(window, 'open').mockImplementation(() => {});
});
it('opens the todo url in another tab', () => {
const todoLink = todoItem.dataset.url;
- $('.todos-list .todo').trigger(metakeyEvent);
+ document.querySelectorAll('.todos-list .todo').forEach((el) => {
+ el.dispatchEvent(metakeyEvent);
+ });
expect(visitUrl).not.toHaveBeenCalled();
expect(windowOpenSpy).toHaveBeenCalledWith(todoLink, '_blank');
});
it('run native funcionality when avatar is clicked', () => {
- $('.todos-list a').on('click', (e) => e.preventDefault());
- $('.todos-list img').trigger(metakeyEvent);
+ document.querySelectorAll('.todos-list a').forEach((el) => {
+ el.addEventListener('click', (e) => e.preventDefault());
+ });
+ document.querySelectorAll('.todos-list img').forEach((el) => {
+ el.dispatchEvent(metakeyEvent);
+ });
expect(visitUrl).not.toHaveBeenCalled();
expect(windowOpenSpy).not.toHaveBeenCalled();
@@ -88,7 +93,7 @@ describe('Todos', () => {
.onDelete(path)
.replyOnce(200, { count: TEST_COUNT_BIG, done_count: TEST_DONE_COUNT_BIG });
onToggleSpy = jest.fn();
- $(document).on('todo:toggle', onToggleSpy);
+ document.addEventListener('todo:toggle', onToggleSpy);
// Act
el.click();
@@ -98,7 +103,13 @@ describe('Todos', () => {
});
it('dispatches todo:toggle', () => {
- expect(onToggleSpy).toHaveBeenCalledWith(expect.anything(), TEST_COUNT_BIG);
+ expect(onToggleSpy).toHaveBeenCalledWith(
+ expect.objectContaining({
+ detail: {
+ count: TEST_COUNT_BIG,
+ },
+ }),
+ );
});
it('updates pending text', () => {
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index efbfd83a071..2a0fde45384 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -400,10 +400,6 @@ describe('ForkForm component', () => {
);
};
- beforeEach(() => {
- setupComponent();
- });
-
const selectedMockNamespaceIndex = 1;
const namespaceId = MOCK_NAMESPACES_RESPONSE[selectedMockNamespaceIndex].id;
@@ -425,10 +421,14 @@ describe('ForkForm component', () => {
it('does not make POST request', async () => {
jest.spyOn(axios, 'post');
+ setupComponent();
+
expect(axios.post).not.toHaveBeenCalled();
});
it('does not redirect the current page', async () => {
+ setupComponent();
+
await submitForm();
expect(urlUtility.redirectTo).not.toHaveBeenCalled();
@@ -452,13 +452,10 @@ describe('ForkForm component', () => {
});
describe('with valid form', () => {
- beforeEach(() => {
- fillForm();
- });
-
it('make POST request with project param', async () => {
jest.spyOn(axios, 'post');
+ setupComponent();
await submitForm();
const {
@@ -486,6 +483,7 @@ describe('ForkForm component', () => {
const webUrl = `new/fork-project`;
jest.spyOn(axios, 'post').mockResolvedValue({ data: { web_url: webUrl } });
+ setupComponent();
await submitForm();
expect(urlUtility.redirectTo).toHaveBeenCalledWith(webUrl);
@@ -496,6 +494,7 @@ describe('ForkForm component', () => {
jest.spyOn(axios, 'post').mockRejectedValue(dummyError);
+ setupComponent();
await submitForm();
expect(urlUtility.redirectTo).not.toHaveBeenCalled();
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap
index 005b8968383..aab78c99190 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap
@@ -85,8 +85,6 @@ exports[`Learn GitLab renders correctly 1`] = `
<div
class="gl-mb-4"
>
- <!---->
-
<div
class="flex align-items-center"
>
@@ -105,7 +103,8 @@ exports[`Learn GitLab renders correctly 1`] = `
</svg>
Invite your colleagues
-
+
+ <!---->
</span>
<!---->
@@ -114,8 +113,6 @@ exports[`Learn GitLab renders correctly 1`] = `
<div
class="gl-mb-4"
>
- <!---->
-
<div
class="flex align-items-center"
>
@@ -133,8 +130,9 @@ exports[`Learn GitLab renders correctly 1`] = `
/>
</svg>
- Create or import a repository
-
+ Create a repository
+
+ <!---->
</span>
<!---->
@@ -143,23 +141,23 @@ exports[`Learn GitLab renders correctly 1`] = `
<div
class="gl-mb-4"
>
- <!---->
-
<div
class="flex align-items-center"
>
- <a
- class="gl-link"
- data-testid="uncompleted-learn-gitlab-link"
- data-track-action="click_link"
- data-track-label="Set up CI/CD"
- href="http://example.com/"
- target="_self"
- >
-
- Set up CI/CD
-
- </a>
+ <div>
+ <a
+ class="gl-link"
+ data-testid="uncompleted-learn-gitlab-link"
+ data-track-action="click_link"
+ data-track-label="set_up_your_first_project_s_ci_cd"
+ href="http://example.com/"
+ target="_self"
+ >
+ Set up your first project's CI/CD
+ </a>
+
+ <!---->
+ </div>
<!---->
</div>
@@ -167,24 +165,24 @@ exports[`Learn GitLab renders correctly 1`] = `
<div
class="gl-mb-4"
>
- <!---->
-
<div
class="flex align-items-center"
>
- <a
- class="gl-link"
- data-testid="uncompleted-learn-gitlab-link"
- data-track-action="click_link"
- data-track-label="Start a free Ultimate trial"
- href="http://example.com/"
- rel="noopener noreferrer"
- target="_blank"
- >
-
- Start a free Ultimate trial
-
- </a>
+ <div>
+ <a
+ class="gl-link"
+ data-testid="uncompleted-learn-gitlab-link"
+ data-track-action="click_link"
+ data-track-label="start_a_free_trial_of_gitlab_ultimate"
+ href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ Start a free trial of GitLab Ultimate
+ </a>
+
+ <!---->
+ </div>
<!---->
</div>
@@ -193,30 +191,30 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-mb-4"
>
<div
- class="gl-font-style-italic gl-text-gray-500"
- data-testid="trial-only"
- >
-
- Trial only
-
- </div>
-
- <div
class="flex align-items-center"
>
- <a
- class="gl-link"
- data-testid="uncompleted-learn-gitlab-link"
- data-track-action="click_link"
- data-track-label="Add code owners"
- href="http://example.com/"
- rel="noopener noreferrer"
- target="_blank"
- >
-
- Add code owners
-
- </a>
+ <div>
+ <a
+ class="gl-link"
+ data-testid="uncompleted-learn-gitlab-link"
+ data-track-action="click_link"
+ data-track-label="add_code_owners"
+ href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ Add code owners
+ </a>
+
+ <span
+ class="gl-font-style-italic gl-text-gray-500"
+ data-testid="trial-only"
+ >
+
+ - Included in trial
+
+ </span>
+ </div>
<!---->
</div>
@@ -225,30 +223,30 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-mb-4"
>
<div
- class="gl-font-style-italic gl-text-gray-500"
- data-testid="trial-only"
- >
-
- Trial only
-
- </div>
-
- <div
class="flex align-items-center"
>
- <a
- class="gl-link"
- data-testid="uncompleted-learn-gitlab-link"
- data-track-action="click_link"
- data-track-label="Add merge request approval"
- href="http://example.com/"
- rel="noopener noreferrer"
- target="_blank"
- >
-
- Add merge request approval
-
- </a>
+ <div>
+ <a
+ class="gl-link"
+ data-testid="uncompleted-learn-gitlab-link"
+ data-track-action="click_link"
+ data-track-label="enable_require_merge_approvals"
+ href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ Enable require merge approvals
+ </a>
+
+ <span
+ class="gl-font-style-italic gl-text-gray-500"
+ data-testid="trial-only"
+ >
+
+ - Included in trial
+
+ </span>
+ </div>
<!---->
</div>
@@ -290,23 +288,23 @@ exports[`Learn GitLab renders correctly 1`] = `
<div
class="gl-mb-4"
>
- <!---->
-
<div
class="flex align-items-center"
>
- <a
- class="gl-link"
- data-testid="uncompleted-learn-gitlab-link"
- data-track-action="click_link"
- data-track-label="Create an issue"
- href="http://example.com/"
- target="_self"
- >
-
- Create an issue
-
- </a>
+ <div>
+ <a
+ class="gl-link"
+ data-testid="uncompleted-learn-gitlab-link"
+ data-track-action="click_link"
+ data-track-label="create_an_issue"
+ href="http://example.com/"
+ target="_self"
+ >
+ Create an issue
+ </a>
+
+ <!---->
+ </div>
<!---->
</div>
@@ -314,23 +312,23 @@ exports[`Learn GitLab renders correctly 1`] = `
<div
class="gl-mb-4"
>
- <!---->
-
<div
class="flex align-items-center"
>
- <a
- class="gl-link"
- data-testid="uncompleted-learn-gitlab-link"
- data-track-action="click_link"
- data-track-label="Submit a merge request"
- href="http://example.com/"
- target="_self"
- >
-
- Submit a merge request
-
- </a>
+ <div>
+ <a
+ class="gl-link"
+ data-testid="uncompleted-learn-gitlab-link"
+ data-track-action="click_link"
+ data-track-label="submit_a_merge_request_mr"
+ href="http://example.com/"
+ target="_self"
+ >
+ Submit a merge request (MR)
+ </a>
+
+ <!---->
+ </div>
<!---->
</div>
@@ -372,24 +370,24 @@ exports[`Learn GitLab renders correctly 1`] = `
<div
class="gl-mb-4"
>
- <!---->
-
<div
class="flex align-items-center"
>
- <a
- class="gl-link"
- data-testid="uncompleted-learn-gitlab-link"
- data-track-action="click_link"
- data-track-label="Run a Security scan using CI/CD"
- href="https://docs.gitlab.com/ee/foobar/"
- rel="noopener noreferrer"
- target="_blank"
- >
-
- Run a Security scan using CI/CD
-
- </a>
+ <div>
+ <a
+ class="gl-link"
+ data-testid="uncompleted-learn-gitlab-link"
+ data-track-action="click_link"
+ data-track-label="run_a_security_scan_using_ci_cd"
+ href="https://docs.gitlab.com/ee/foobar/"
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ Run a Security scan using CI/CD
+ </a>
+
+ <!---->
+ </div>
<!---->
</div>
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_info_card_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_info_card_spec.js
deleted file mode 100644
index ad4bc826a9d..00000000000
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_info_card_spec.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import LearnGitlabInfoCard from '~/pages/projects/learn_gitlab/components/learn_gitlab_info_card.vue';
-
-const defaultProps = {
- title: 'Create Repository',
- description: 'Some description',
- actionLabel: 'Create Repository now',
- url: 'https://example.com',
- completed: false,
- svg: 'https://example.com/illustration.svg',
-};
-
-describe('Learn GitLab Info Card', () => {
- let wrapper;
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const createWrapper = (props = {}) => {
- wrapper = shallowMount(LearnGitlabInfoCard, {
- propsData: { ...defaultProps, ...props },
- });
- };
-
- it('renders no icon when not completed', () => {
- createWrapper({ completed: false });
-
- expect(wrapper.find('[data-testid="completed-icon"]').exists()).toBe(false);
- });
-
- it('renders the completion icon when completed', () => {
- createWrapper({ completed: true });
-
- expect(wrapper.find('[data-testid="completed-icon"]').exists()).toBe(true);
- });
-
- it('renders no trial only when it is not required', () => {
- createWrapper();
-
- expect(wrapper.find('[data-testid="trial-only"]').exists()).toBe(false);
- });
-
- it('renders trial only when trial is required', () => {
- createWrapper({ trialRequired: true });
-
- expect(wrapper.find('[data-testid="trial-only"]').exists()).toBe(true);
- });
-
- it('renders completion icon when completed a trial-only feature', () => {
- createWrapper({ trialRequired: true, completed: true });
-
- expect(wrapper.find('[data-testid="trial-only"]').exists()).toBe(false);
- expect(wrapper.find('[data-testid="completed-icon"]').exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
index d9aff37f703..897cbf5eaa4 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
@@ -119,7 +119,7 @@ describe('Learn GitLab Section Link', () => {
findUncompletedLink().trigger('click');
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_link', {
- label: 'Run a Security scan using CI/CD',
+ label: 'run_a_security_scan_using_ci_cd',
});
unmockTracking();
@@ -164,7 +164,7 @@ describe('Learn GitLab Section Link', () => {
triggerEvent(openInviteMembesrModalLink().element);
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_link', {
- label: 'Invite your colleagues',
+ label: 'invite_your_colleagues',
property: 'Growth::Activation::Experiment::InviteForHelpContinuousOnboarding',
});
@@ -203,7 +203,7 @@ describe('Learn GitLab Section Link', () => {
videoTutorialLink().trigger('click');
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_video_link', {
- label: 'Add code owners',
+ label: 'add_code_owners',
property: 'Growth::Conversion::Experiment::LearnGitLab',
context: {
data: {
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_trial_card_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_trial_card_spec.js
new file mode 100644
index 00000000000..6ab57e31fed
--- /dev/null
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_trial_card_spec.js
@@ -0,0 +1,12 @@
+import { shallowMount } from '@vue/test-utils';
+import IncludedInTrialIndicator from '~/pages/projects/learn_gitlab/components/included_in_trial_indicator.vue';
+
+describe('Learn GitLab Trial Card', () => {
+ it('renders correctly', () => {
+ const wrapper = shallowMount(IncludedInTrialIndicator);
+
+ expect(wrapper.text()).toEqual('- Included in trial');
+
+ wrapper.destroy();
+ });
+});
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index d5b4b3c22d8..99df5b58d90 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -31,10 +31,10 @@ describe('Interval Pattern Input Component', () => {
wrapper.findAll('input[type="radio"]').wrappers.find((x) => x.element.checked);
const findIcon = () => wrapper.findComponent(GlIcon);
const findSelectedRadioKey = () => findSelectedRadio()?.attributes('data-testid');
- const selectEveryDayRadio = () => findEveryDayRadio().trigger('click');
- const selectEveryWeekRadio = () => findEveryWeekRadio().trigger('click');
- const selectEveryMonthRadio = () => findEveryMonthRadio().trigger('click');
- const selectCustomRadio = () => findCustomRadio().trigger('click');
+ const selectEveryDayRadio = () => findEveryDayRadio().setChecked(true);
+ const selectEveryWeekRadio = () => findEveryWeekRadio().setChecked(true);
+ const selectEveryMonthRadio = () => findEveryMonthRadio().setChecked(true);
+ const selectCustomRadio = () => findCustomRadio().setChecked(true);
const createWrapper = (props = {}, data = {}) => {
if (wrapper) {
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 46f83ac89e5..85660d09baa 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -51,6 +51,7 @@ const defaultProps = {
requestCveAvailable: true,
confirmationPhrase: 'my-fake-project',
showVisibilityConfirmModal: false,
+ membersPagePath: '/my-fake-project/-/project_members',
};
const FEATURE_ACCESS_LEVEL_ANONYMOUS = 30;
@@ -59,7 +60,7 @@ describe('Settings Panel', () => {
let wrapper;
const mountComponent = (
- { currentSettings = {}, glFeatures = {}, ...customProps } = {},
+ { currentSettings = {}, glFeatures = {}, stubs = {}, ...customProps } = {},
mountFn = shallowMount,
) => {
const propsData = {
@@ -76,6 +77,7 @@ describe('Settings Panel', () => {
...glFeatures,
},
},
+ stubs,
});
};
@@ -176,7 +178,7 @@ describe('Settings Panel', () => {
);
it('should set the visibility level description based upon the selected visibility level', () => {
- wrapper = mountComponent();
+ wrapper = mountComponent({ stubs: { GlSprintf } });
findProjectVisibilityLevelInput().setValue(visibilityOptions.INTERNAL);
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js
index 365bb878485..108f816fe01 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js
@@ -7,8 +7,10 @@ import { renderGFM } from '~/pages/shared/wikis/render_gfm_facade';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
import waitForPromises from 'helpers/wait_for_promises';
+import { handleLocationHash } from '~/lib/utils/common_utils';
jest.mock('~/pages/shared/wikis/render_gfm_facade');
+jest.mock('~/lib/utils/common_utils');
describe('pages/shared/wikis/components/wiki_content', () => {
const PATH = '/test';
@@ -76,6 +78,12 @@ describe('pages/shared/wikis/components/wiki_content', () => {
expect(renderGFM).toHaveBeenCalledWith(wrapper.element);
});
+
+ it('handles hash after render', async () => {
+ await nextTick();
+
+ expect(handleLocationHash).toHaveBeenCalled();
+ });
});
describe('when loading content fails', () => {
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index d7f8dc3c98e..a5db10d106d 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -1,12 +1,14 @@
import { nextTick } from 'vue';
-import { GlAlert, GlButton } from '@gitlab/ui';
+import { GlAlert, GlButton, GlFormInput, GlFormGroup } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { mockTracking } from 'helpers/tracking_helper';
+import { stubComponent } from 'helpers/stub_component';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ContentEditor from '~/content_editor/components/content_editor.vue';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import WikiForm from '~/pages/shared/wikis/components/wiki_form.vue';
import {
CONTENT_EDITOR_LOADED_ACTION,
@@ -37,6 +39,7 @@ describe('WikiForm', () => {
const findMarkdownHelpLink = () => wrapper.findByTestId('wiki-markdown-help-link');
const findContentEditor = () => wrapper.findComponent(ContentEditor);
const findClassicEditor = () => wrapper.findComponent(MarkdownField);
+ const findLocalStorageSync = () => wrapper.find(LocalStorageSync);
const setFormat = (value) => {
const format = findFormat();
@@ -103,6 +106,9 @@ describe('WikiForm', () => {
MarkdownField,
GlAlert,
GlButton,
+ LocalStorageSync: stubComponent(LocalStorageSync),
+ GlFormInput,
+ GlFormGroup,
},
}),
);
@@ -128,7 +134,7 @@ describe('WikiForm', () => {
`(
'updates the commit message to $message when title is $title and persisted=$persisted',
async ({ title, message, persisted }) => {
- createWrapper({ persisted });
+ createWrapper({ persisted, mountFn: mount });
await findTitle().setValue(title);
@@ -137,7 +143,7 @@ describe('WikiForm', () => {
);
it('sets the commit message to "Update My page" when the page first loads when persisted', async () => {
- createWrapper({ persisted: true });
+ createWrapper({ persisted: true, mountFn: mount });
await nextTick();
@@ -157,7 +163,7 @@ describe('WikiForm', () => {
${'asciidoc'} | ${false} | ${'hides'}
${'org'} | ${false} | ${'hides'}
`('$action preview in the markdown field when format is $format', async ({ format, enabled }) => {
- createWrapper();
+ createWrapper({ mountFn: mount });
await setFormat(format);
@@ -254,7 +260,7 @@ describe('WikiForm', () => {
`(
"when title='$title', content='$content', then the button is $buttonState'",
async ({ title, content, disabledAttr }) => {
- createWrapper();
+ createWrapper({ mountFn: mount });
await findTitle().setValue(title);
await findContent().setValue(content);
@@ -291,7 +297,7 @@ describe('WikiForm', () => {
describe('toggle editing mode control', () => {
beforeEach(() => {
- createWrapper();
+ createWrapper({ mountFn: mount });
});
it.each`
@@ -330,6 +336,19 @@ describe('WikiForm', () => {
});
});
+ describe('markdown editor type persistance', () => {
+ it('loads content editor by default if it is persisted in local storage', async () => {
+ expect(findClassicEditor().exists()).toBe(true);
+ expect(findContentEditor().exists()).toBe(false);
+
+ // enable content editor
+ await findLocalStorageSync().vm.$emit('input', true);
+
+ expect(findContentEditor().exists()).toBe(true);
+ expect(findClassicEditor().exists()).toBe(false);
+ });
+ });
+
describe('when content editor is active', () => {
let mockContentEditor;
@@ -374,7 +393,7 @@ describe('WikiForm', () => {
});
describe('wiki content editor', () => {
- describe('clicking "use new editor": editor fails to load', () => {
+ describe('clicking "Edit rich text": editor fails to load', () => {
beforeEach(async () => {
createWrapper({ mountFn: mount });
mock.onPost(/preview-markdown/).reply(400);
@@ -401,7 +420,7 @@ describe('WikiForm', () => {
});
});
- describe('clicking "use new editor": editor loads successfully', () => {
+ describe('clicking "Edit rich text": editor loads successfully', () => {
beforeEach(async () => {
createWrapper({ persisted: true, mountFn: mount });
diff --git a/spec/frontend/pdf/index_spec.js b/spec/frontend/pdf/index_spec.js
index 2b0932493bb..98946412264 100644
--- a/spec/frontend/pdf/index_spec.js
+++ b/spec/frontend/pdf/index_spec.js
@@ -1,48 +1,33 @@
-import Vue from 'vue';
-
+import { shallowMount } from '@vue/test-utils';
import { FIXTURES_PATH } from 'spec/test_constants';
import PDFLab from '~/pdf/index.vue';
-jest.mock('pdfjs-dist/webpack', () => {
- return { default: jest.requireActual('pdfjs-dist/build/pdf') };
-});
-
-const pdf = `${FIXTURES_PATH}/blob/pdf/test.pdf`;
+describe('PDFLab component', () => {
+ let wrapper;
-const Component = Vue.extend(PDFLab);
+ const mountComponent = ({ pdf }) => shallowMount(PDFLab, { propsData: { pdf } });
-describe('PDF component', () => {
- let vm;
+ afterEach(() => {
+ wrapper.destroy();
+ });
describe('without PDF data', () => {
beforeEach(() => {
- vm = new Component({
- propsData: {
- pdf: '',
- },
- });
-
- vm.$mount();
+ wrapper = mountComponent({ pdf: '' });
});
it('does not render', () => {
- expect(vm.$el.tagName).toBeUndefined();
+ expect(wrapper.isVisible()).toBe(false);
});
});
describe('with PDF data', () => {
beforeEach(() => {
- vm = new Component({
- propsData: {
- pdf,
- },
- });
-
- vm.$mount();
+ wrapper = mountComponent({ pdf: `${FIXTURES_PATH}/blob/pdf/test.pdf` });
});
- it('renders pdf component', () => {
- expect(vm.$el.tagName).toBeDefined();
+ it('renders', () => {
+ expect(wrapper.isVisible()).toBe(true);
});
});
});
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
index ae19ed9ab02..82ac390971d 100644
--- a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
+++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
@@ -152,4 +152,26 @@ describe('CI Lint Results', () => {
expect(findAfterScripts()).toHaveLength(filterEmptyScripts('afterScript').length);
});
});
+
+ describe('Hide Alert', () => {
+ it('hides alert on success if hide-alert prop is true', async () => {
+ await createComponent({ dryRun: true, hideAlert: true }, mount);
+
+ expect(findStatus().exists()).toBe(false);
+ });
+
+ it('hides alert on error if hide-alert prop is true', async () => {
+ await createComponent(
+ {
+ hideAlert: true,
+ isValid: false,
+ errors: mockErrors,
+ warnings: mockWarnings,
+ },
+ mount,
+ );
+
+ expect(findStatus().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
index 3ecf6472544..87a7f07f7d4 100644
--- a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -1,6 +1,8 @@
-import { GlAlert, GlLoadingIcon, GlTabs } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
+import { GlAlert, GlBadge, GlLoadingIcon, GlTabs } from '@gitlab/ui';
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
import Vue, { nextTick } from 'vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import CiConfigMergedPreview from '~/pipeline_editor/components/editor/ci_config_merged_preview.vue';
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
@@ -15,9 +17,21 @@ import {
EDITOR_APP_STATUS_INVALID,
EDITOR_APP_STATUS_VALID,
TAB_QUERY_PARAM,
+ VALIDATE_TAB,
+ VALIDATE_TAB_BADGE_DISMISSED_KEY,
} from '~/pipeline_editor/constants';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
-import { mockLintResponse, mockLintResponseWithoutMerged, mockCiYml } from '../mock_data';
+import getBlobContent from '~/pipeline_editor/graphql/queries/blob_content.query.graphql';
+import {
+ mockBlobContentQueryResponse,
+ mockCiLintPath,
+ mockCiYml,
+ mockLintResponse,
+ mockLintResponseWithoutMerged,
+} from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
Vue.config.ignoredElements = ['gl-emoji'];
@@ -33,11 +47,13 @@ describe('Pipeline editor tabs component', () => {
provide = {},
appStatus = EDITOR_APP_STATUS_VALID,
mountFn = shallowMount,
+ options = {},
} = {}) => {
wrapper = mountFn(PipelineEditorTabs, {
propsData: {
ciConfigData: mockLintResponse,
ciFileContent: mockCiYml,
+ currentTab: CREATE_TAB,
isNewCiConfigFile: true,
showDrawer: false,
...props,
@@ -47,12 +63,34 @@ describe('Pipeline editor tabs component', () => {
appStatus,
};
},
- provide: { ...provide },
+ provide: {
+ ciLintPath: mockCiLintPath,
+ ...provide,
+ },
stubs: {
TextEditor: MockTextEditor,
EditorTab,
},
listeners,
+ ...options,
+ });
+ };
+
+ let mockBlobContentData;
+ let mockApollo;
+
+ const createComponentWithApollo = ({ props, provide = {}, mountFn = shallowMount } = {}) => {
+ const handlers = [[getBlobContent, mockBlobContentData]];
+ mockApollo = createMockApollo(handlers);
+
+ createComponent({
+ props,
+ provide,
+ mountFn,
+ options: {
+ localVue,
+ apolloProvider: mockApollo,
+ },
});
};
@@ -63,6 +101,7 @@ describe('Pipeline editor tabs component', () => {
const findVisualizationTab = () => wrapper.find('[data-testid="visualization-tab"]');
const findAlert = () => wrapper.findComponent(GlAlert);
+ const findBadge = () => wrapper.findComponent(GlBadge);
const findCiLint = () => wrapper.findComponent(CiLint);
const findCiValidate = () => wrapper.findComponent(CiValidate);
const findGlTabs = () => wrapper.findComponent(GlTabs);
@@ -72,6 +111,10 @@ describe('Pipeline editor tabs component', () => {
const findMergedPreview = () => wrapper.findComponent(CiConfigMergedPreview);
const findWalkthroughPopover = () => wrapper.findComponent(WalkthroughPopover);
+ beforeEach(() => {
+ mockBlobContentData = jest.fn();
+ });
+
afterEach(() => {
wrapper.destroy();
});
@@ -114,37 +157,73 @@ describe('Pipeline editor tabs component', () => {
describe('validate tab', () => {
describe('with simulatePipeline feature flag ON', () => {
- describe('while loading', () => {
+ describe('after loading', () => {
beforeEach(() => {
createComponent({
- appStatus: EDITOR_APP_STATUS_LOADING,
- provide: {
- glFeatures: {
- simulatePipeline: true,
- },
- },
+ provide: { glFeatures: { simulatePipeline: true } },
});
});
- it('displays a loading icon if the lint query is loading', () => {
- expect(findLoadingIcon().exists()).toBe(true);
- });
-
- it('does not display the validate component', () => {
- expect(findCiValidate().exists()).toBe(false);
+ it('displays the tab and the validate component', () => {
+ expect(findValidateTab().exists()).toBe(true);
+ expect(findCiValidate().exists()).toBe(true);
});
});
- describe('after loading', () => {
- beforeEach(() => {
- createComponent({
- provide: { glFeatures: { simulatePipeline: true } },
+ describe('NEW badge', () => {
+ describe('default', () => {
+ beforeEach(() => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ createComponentWithApollo({
+ mountFn: mount,
+ props: {
+ currentTab: VALIDATE_TAB,
+ },
+ provide: {
+ glFeatures: { simulatePipeline: true },
+ ciConfigPath: '/path/to/ci-config',
+ currentBranch: 'main',
+ projectFullPath: '/path/to/project',
+ simulatePipelineHelpPagePath: 'path/to/help/page',
+ validateTabIllustrationPath: 'path/to/svg',
+ },
+ });
+ });
+
+ it('renders badge by default', () => {
+ expect(findBadge().exists()).toBe(true);
+ expect(findBadge().text()).toBe(wrapper.vm.$options.i18n.new);
+ });
+
+ it('hides badge when moving away from the validate tab', async () => {
+ expect(findBadge().exists()).toBe(true);
+
+ await findEditorTab().vm.$emit('click');
+
+ expect(findBadge().exists()).toBe(false);
});
});
- it('displays the tab and the validate component', () => {
- expect(findValidateTab().exists()).toBe(true);
- expect(findCiValidate().exists()).toBe(true);
+ describe('if badge has been dismissed before', () => {
+ beforeEach(() => {
+ localStorage.setItem(VALIDATE_TAB_BADGE_DISMISSED_KEY, 'true');
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ createComponentWithApollo({
+ mountFn: mount,
+ provide: {
+ glFeatures: { simulatePipeline: true },
+ ciConfigPath: '/path/to/ci-config',
+ currentBranch: 'main',
+ projectFullPath: '/path/to/project',
+ simulatePipelineHelpPagePath: 'path/to/help/page',
+ validateTabIllustrationPath: 'path/to/svg',
+ },
+ });
+ });
+
+ it('does not render badge if it has been dismissed before', () => {
+ expect(findBadge().exists()).toBe(false);
+ });
});
});
});
@@ -181,7 +260,6 @@ describe('Pipeline editor tabs component', () => {
expect(findCiLint().exists()).toBe(false);
});
});
-
describe('after loading', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js b/spec/frontend/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js
new file mode 100644
index 00000000000..97f785a71bc
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js
@@ -0,0 +1,43 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ValidatePopover from '~/pipeline_editor/components/popovers/validate_pipeline_popover.vue';
+import { VALIDATE_TAB_FEEDBACK_URL } from '~/pipeline_editor/constants';
+import { mockSimulatePipelineHelpPagePath } from '../../mock_data';
+
+describe('ValidatePopover component', () => {
+ let wrapper;
+
+ const createComponent = ({ stubs } = {}) => {
+ wrapper = shallowMountExtended(ValidatePopover, {
+ provide: {
+ simulatePipelineHelpPagePath: mockSimulatePipelineHelpPagePath,
+ },
+ stubs,
+ });
+ };
+
+ const findHelpLink = () => wrapper.findByTestId('help-link');
+ const findFeedbackLink = () => wrapper.findByTestId('feedback-link');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ beforeEach(async () => {
+ createComponent({
+ stubs: { GlLink, GlSprintf },
+ });
+ });
+
+ it('renders help link', () => {
+ expect(findHelpLink().exists()).toBe(true);
+ expect(findHelpLink().attributes('href')).toBe(mockSimulatePipelineHelpPagePath);
+ });
+
+ it('renders feedback link', () => {
+ expect(findFeedbackLink().exists()).toBe(true);
+ expect(findFeedbackLink().attributes('href')).toBe(VALIDATE_TAB_FEEDBACK_URL);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
index 6206a0f6aed..3a40ce32a24 100644
--- a/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
+++ b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlTabs } from '@gitlab/ui';
+import { GlAlert, GlBadge, GlTabs } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
@@ -30,10 +30,10 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
},
template: `
<gl-tabs>
- <editor-tab :title-link-attributes="{ 'data-testid': 'tab1-btn' }" :lazy="true">
+ <editor-tab title="Tab 1" :title-link-attributes="{ 'data-testid': 'tab1-btn' }" :lazy="true">
<mock-child content="${mockContent1}"/>
</editor-tab>
- <editor-tab :title-link-attributes="{ 'data-testid': 'tab2-btn' }" :lazy="true">
+ <editor-tab title="Tab 2" :title-link-attributes="{ 'data-testid': 'tab2-btn' }" :lazy="true" badge-title="NEW">
<mock-child content="${mockContent2}"/>
</editor-tab>
</gl-tabs>
@@ -46,7 +46,10 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
const createWrapper = ({ props } = {}) => {
wrapper = mount(EditorTab, {
- propsData: props,
+ propsData: {
+ title: 'Tab 1',
+ ...props,
+ },
slots: {
default: MockSourceEditor,
},
@@ -55,6 +58,7 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
const findSlotComponent = () => wrapper.findComponent(MockSourceEditor);
const findAlert = () => wrapper.findComponent(GlAlert);
+ const findBadges = () => wrapper.findAll(GlBadge);
beforeEach(() => {
mockChildMounted = jest.fn();
@@ -182,4 +186,15 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
expect(mockChildMounted).toHaveBeenNthCalledWith(2, mockContent2);
});
});
+
+ describe('valid state', () => {
+ beforeEach(() => {
+ createMockedWrapper();
+ });
+
+ it('renders correct number of badges', async () => {
+ expect(findBadges()).toHaveLength(1);
+ expect(findBadges().at(0).text()).toBe('NEW');
+ });
+ });
});
diff --git a/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js b/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
index 25972317593..f5f01b675b2 100644
--- a/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
+++ b/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
@@ -1,40 +1,279 @@
-import { GlButton, GlDropdown } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlDropdown, GlIcon, GlLoadingIcon, GlPopover } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue';
import CiValidate, { i18n } from '~/pipeline_editor/components/validate/ci_validate.vue';
+import ValidatePipelinePopover from '~/pipeline_editor/components/popovers/validate_pipeline_popover.vue';
+import getBlobContent from '~/pipeline_editor/graphql/queries/blob_content.query.graphql';
+import lintCIMutation from '~/pipeline_editor/graphql/mutations/client/lint_ci.mutation.graphql';
+import {
+ mockBlobContentQueryResponse,
+ mockCiLintPath,
+ mockCiYml,
+ mockSimulatePipelineHelpPagePath,
+} from '../../mock_data';
+import { mockLintDataError, mockLintDataValid } from '../../../ci_lint/mock_data';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
describe('Pipeline Editor Validate Tab', () => {
let wrapper;
+ let mockApollo;
+ let mockBlobContentData;
- const createComponent = ({ stubs } = {}) => {
- wrapper = shallowMount(CiValidate, {
+ const createComponent = ({
+ props,
+ stubs,
+ options,
+ isBlobLoading = false,
+ isSimulationLoading = false,
+ } = {}) => {
+ wrapper = shallowMountExtended(CiValidate, {
+ propsData: {
+ ciFileContent: mockCiYml,
+ ...props,
+ },
provide: {
+ ciConfigPath: '/path/to/ci-config',
+ ciLintPath: mockCiLintPath,
+ currentBranch: 'main',
+ projectFullPath: '/path/to/project',
validateTabIllustrationPath: '/path/to/img',
+ simulatePipelineHelpPagePath: mockSimulatePipelineHelpPagePath,
+ },
+ stubs,
+ mocks: {
+ $apollo: {
+ queries: {
+ initialBlobContent: {
+ loading: isBlobLoading,
+ },
+ },
+ mutations: {
+ lintCiMutation: {
+ loading: isSimulationLoading,
+ },
+ },
+ },
},
+ ...options,
+ });
+ };
+
+ const createComponentWithApollo = ({ props, stubs } = {}) => {
+ const handlers = [[getBlobContent, mockBlobContentData]];
+ mockApollo = createMockApollo(handlers);
+
+ createComponent({
+ props,
stubs,
+ options: {
+ localVue,
+ apolloProvider: mockApollo,
+ mocks: {},
+ },
});
};
- const findCta = () => wrapper.findComponent(GlButton);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findCancelBtn = () => wrapper.findByTestId('cancel-simulation');
+ const findContentChangeStatus = () => wrapper.findByTestId('content-status');
+ const findCta = () => wrapper.findByTestId('simulate-pipeline-button');
+ const findDisabledCtaTooltip = () => wrapper.findByTestId('cta-tooltip');
+ const findHelpIcon = () => wrapper.findComponent(GlIcon);
+ const findIllustration = () => wrapper.findByRole('img');
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findPipelineSource = () => wrapper.findComponent(GlDropdown);
+ const findPopover = () => wrapper.findComponent(GlPopover);
+ const findCiLintResults = () => wrapper.findComponent(CiLintResults);
+ const findResultsCta = () => wrapper.findByTestId('resimulate-pipeline-button');
+
+ beforeEach(() => {
+ mockBlobContentData = jest.fn();
+ });
afterEach(() => {
wrapper.destroy();
});
- describe('template', () => {
+ describe('while initial CI content is loading', () => {
beforeEach(() => {
- createComponent();
+ createComponent({ isBlobLoading: true });
+ });
+
+ it('renders disabled CTA with tooltip', () => {
+ expect(findCta().props('disabled')).toBe(true);
+ expect(findDisabledCtaTooltip().exists()).toBe(true);
+ });
+ });
+
+ describe('after initial CI content is loaded', () => {
+ beforeEach(async () => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ await createComponentWithApollo({ stubs: { GlPopover, ValidatePipelinePopover } });
});
it('renders disabled pipeline source dropdown', () => {
expect(findPipelineSource().exists()).toBe(true);
expect(findPipelineSource().attributes('text')).toBe(i18n.pipelineSourceDefault);
- expect(findPipelineSource().attributes('disabled')).toBe('true');
+ expect(findPipelineSource().props('disabled')).toBe(true);
});
- it('renders CTA', () => {
+ it('renders enabled CTA without tooltip', () => {
expect(findCta().exists()).toBe(true);
- expect(findCta().text()).toBe(i18n.cta);
+ expect(findCta().props('disabled')).toBe(false);
+ expect(findDisabledCtaTooltip().exists()).toBe(false);
+ });
+
+ it('popover is set to render when hovering over help icon', () => {
+ expect(findPopover().props('target')).toBe(findHelpIcon().attributes('id'));
+ expect(findPopover().props('triggers')).toBe('hover focus');
+ });
+ });
+
+ describe('simulating the pipeline', () => {
+ beforeEach(async () => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ await createComponentWithApollo();
+
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
+ });
+
+ it('renders loading state while simulation is ongoing', async () => {
+ findCta().vm.$emit('click');
+ await nextTick();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findCancelBtn().exists()).toBe(true);
+ expect(findCta().props('loading')).toBe(true);
+ });
+
+ it('calls mutation with the correct input', async () => {
+ await findCta().vm.$emit('click');
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: lintCIMutation,
+ variables: {
+ dry_run: true,
+ content: mockCiYml,
+ endpoint: mockCiLintPath,
+ },
+ });
+ });
+
+ describe('when results are successful', () => {
+ beforeEach(async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
+ await findCta().vm.$emit('click');
+ });
+
+ it('renders success alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().attributes('variant')).toBe('success');
+ expect(findAlert().attributes('title')).toBe(i18n.successAlertTitle);
+ });
+
+ it('does not render content change status or CTA for results page', () => {
+ expect(findContentChangeStatus().exists()).toBe(false);
+ expect(findResultsCta().exists()).toBe(false);
+ });
+
+ it('renders CI lint results with correct props', () => {
+ expect(findCiLintResults().exists()).toBe(true);
+ expect(findCiLintResults().props()).toMatchObject({
+ dryRun: true,
+ hideAlert: true,
+ isValid: true,
+ jobs: mockLintDataValid.data.lintCI.jobs,
+ });
+ });
+ });
+
+ describe('when results have errors', () => {
+ beforeEach(async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataError);
+ await findCta().vm.$emit('click');
+ });
+
+ it('renders error alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().attributes('variant')).toBe('danger');
+ expect(findAlert().attributes('title')).toBe(i18n.errorAlertTitle);
+ });
+
+ it('renders CI lint results with correct props', () => {
+ expect(findCiLintResults().exists()).toBe(true);
+ expect(findCiLintResults().props()).toMatchObject({
+ dryRun: true,
+ hideAlert: true,
+ isValid: false,
+ errors: mockLintDataError.data.lintCI.errors,
+ warnings: mockLintDataError.data.lintCI.warnings,
+ });
+ });
+ });
+ });
+
+ describe('when CI content has changed after a simulation', () => {
+ beforeEach(async () => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ await createComponentWithApollo();
+
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
+ await findCta().vm.$emit('click');
+ });
+
+ it('renders content change status', async () => {
+ await wrapper.setProps({ ciFileContent: 'new yaml content' });
+
+ expect(findContentChangeStatus().exists()).toBe(true);
+ expect(findResultsCta().exists()).toBe(true);
+ });
+
+ it('calls mutation with new content', async () => {
+ await wrapper.setProps({ ciFileContent: 'new yaml content' });
+ await findResultsCta().vm.$emit('click');
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(2);
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: lintCIMutation,
+ variables: {
+ dry_run: true,
+ content: 'new yaml content',
+ endpoint: mockCiLintPath,
+ },
+ });
+ });
+ });
+
+ describe('canceling a simulation', () => {
+ beforeEach(async () => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ await createComponentWithApollo();
+ });
+
+ it('returns to init state', async () => {
+ // init state
+ expect(findIllustration().exists()).toBe(true);
+ expect(findCiLintResults().exists()).toBe(false);
+
+ // mutations should have successful results
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
+ findCta().vm.$emit('click');
+ await nextTick();
+
+ // cancel before simulation succeeds
+ expect(findCancelBtn().exists()).toBe(true);
+ await findCancelBtn().vm.$emit('click');
+
+ // should still render init state
+ expect(findIllustration().exists()).toBe(true);
+ expect(findCiLintResults().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js
index 560b2820fae..2ea580b7b53 100644
--- a/spec/frontend/pipeline_editor/mock_data.js
+++ b/spec/frontend/pipeline_editor/mock_data.js
@@ -7,11 +7,13 @@ export const mockProjectFullPath = `${mockProjectNamespace}/${mockProjectPath}`;
export const mockDefaultBranch = 'main';
export const mockNewBranch = 'new-branch';
export const mockNewMergeRequestPath = '/-/merge_requests/new';
+export const mockCiLintPath = '/-/ci/lint';
export const mockCommitSha = 'aabbccdd';
export const mockCommitNextSha = 'eeffgghh';
export const mockIncludesHelpPagePath = '/-/includes/help';
export const mockLintHelpPagePath = '/-/lint-help';
export const mockLintUnavailableHelpPagePath = '/-/pipeline-editor/troubleshoot';
+export const mockSimulatePipelineHelpPagePath = '/-/simulate-pipeline-help';
export const mockYmlHelpPagePath = '/-/yml-help';
export const mockCommitMessage = 'My commit message';
diff --git a/spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js b/spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js
new file mode 100644
index 00000000000..43719595c5c
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js
@@ -0,0 +1,110 @@
+import { GlFormCheckbox, GlFormCheckboxGroup } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ChecklistWidget from '~/pipeline_wizard/components/widgets/checklist.vue';
+
+describe('Pipeline Wizard - Checklist Widget', () => {
+ let wrapper;
+ const props = {
+ title: 'Foobar',
+ items: [
+ 'foo bar baz', // simple, text-only content
+ {
+ text: 'abc',
+ help: 'def',
+ },
+ ],
+ };
+
+ const getLastUpdateValidEvent = () => {
+ const eventArray = wrapper.emitted('update:valid');
+ return eventArray[eventArray.length - 1];
+ };
+ const findItem = (atIndex = 0) => wrapper.findAllComponents(GlFormCheckbox).at(atIndex);
+ const getGlFormCheckboxGroup = () => wrapper.getComponent(GlFormCheckboxGroup);
+
+ // The item.ids *can* be passed inside props.items, but are usually
+ // autogenerated by lodash.uniqueId() inside the component. So to
+ // get the actual value that the component expects to be emitted in
+ // GlFormCheckboxGroup's `v-model`, we need to obtain the value that is
+ // actually passed to GlFormCheckbox.
+ const getAllItemIds = () => props.items.map((_, i) => findItem(i).attributes().value);
+
+ const createComponent = (mountFn = shallowMountExtended) => {
+ wrapper = mountFn(ChecklistWidget, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('creates the component', () => {
+ createComponent();
+ expect(wrapper.exists()).toBe(true);
+ });
+
+ it('displays the item', () => {
+ createComponent();
+ expect(findItem().exists()).toBe(true);
+ });
+
+ it("displays the item's text", () => {
+ createComponent();
+ expect(findItem().text()).toBe(props.items[0]);
+ });
+
+ it('displays an item with a help text', () => {
+ createComponent();
+ const { text, help } = props.items[1];
+
+ const itemWrapper = findItem(1);
+ const itemText = itemWrapper.text();
+ // Unfortunately there is no wrapper.slots() accessor in vue_test_utils.
+ // To make sure the help text is being passed to the correct slot, we need to
+ // access the slot internally.
+ // This selector accesses the text of the first slot named "help" in itemWrapper
+ const helpText = itemWrapper.vm.$slots?.help[0]?.text?.trim();
+
+ expect(itemText).toBe(text);
+ expect(helpText).toBe(help);
+ });
+
+ it("emits a 'update:valid' event after all boxes have been checked", async () => {
+ createComponent();
+ // initially, `valid` should be false
+ expect(wrapper.emitted('update:valid')).toEqual([[false]]);
+ const values = getAllItemIds();
+ // this mocks checking all the boxes
+ getGlFormCheckboxGroup().vm.$emit('input', values);
+
+ await nextTick();
+
+ expect(wrapper.emitted('update:valid')).toEqual([[false], [true]]);
+ });
+
+ it('emits a invalid event after a box has been unchecked', async () => {
+ createComponent();
+ // initially, `valid` should be false
+ expect(wrapper.emitted('update:valid')).toEqual([[false]]);
+
+ // checking all the boxes first
+ const values = getAllItemIds();
+ getGlFormCheckboxGroup().vm.$emit('input', values);
+ await nextTick();
+
+ // ensure the test later doesn't just pass because it doesn't emit
+ // `true` to begin with
+ expect(getLastUpdateValidEvent()).toEqual([true]);
+
+ // Now we're unchecking the last box.
+ values.pop();
+ getGlFormCheckboxGroup().vm.$emit('input', values);
+ await nextTick();
+
+ expect(getLastUpdateValidEvent()).toEqual([false]);
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/pipeline_wizard_spec.js b/spec/frontend/pipeline_wizard/pipeline_wizard_spec.js
index dd0304518a3..3f689ffdbc8 100644
--- a/spec/frontend/pipeline_wizard/pipeline_wizard_spec.js
+++ b/spec/frontend/pipeline_wizard/pipeline_wizard_spec.js
@@ -99,4 +99,12 @@ describe('PipelineWizard', () => {
parseDocument(template).get('description').toString(),
);
});
+
+ it('bubbles the done event upwards', () => {
+ createComponent();
+
+ wrapper.findComponent(PipelineWizardWrapper).vm.$emit('done');
+
+ expect(wrapper.emitted().done.length).toBe(1);
+ });
});
diff --git a/spec/frontend/pipeline_wizard/templates/pages_spec.js b/spec/frontend/pipeline_wizard/templates/pages_spec.js
new file mode 100644
index 00000000000..f89e8f05475
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/templates/pages_spec.js
@@ -0,0 +1,89 @@
+import { Document, parseDocument } from 'yaml';
+import PagesWizardTemplate from '~/pipeline_wizard/templates/pages.yml';
+import { merge } from '~/lib/utils/yaml';
+
+const VAR_BUILD_IMAGE = '$BUILD_IMAGE';
+const VAR_INSTALLATION_STEPS = '$INSTALLATION_STEPS';
+const VAR_BUILD_STEPS = '$BUILD_STEPS';
+
+const getYaml = () => parseDocument(PagesWizardTemplate);
+const getFinalTemplate = () => {
+ const merged = new Document();
+ const yaml = getYaml();
+ yaml.toJS().steps.forEach((_, i) => {
+ merge(merged, yaml.getIn(['steps', i, 'template']));
+ });
+ return merged;
+};
+
+describe('Pages Template', () => {
+ it('is valid yaml', () => {
+ // Testing equality to an empty array (as opposed to just comparing
+ // errors.length) will cause jest to print the underlying error
+ expect(getYaml().errors).toEqual([]);
+ });
+
+ it('includes all `target`s in the respective `template`', () => {
+ const yaml = getYaml();
+ const actual = yaml.toJS().steps.map((x, i) => ({
+ inputs: x.inputs,
+ template: yaml.getIn(['steps', i, 'template']).toString(),
+ }));
+
+ expect(actual).toEqual([
+ {
+ inputs: [
+ expect.objectContaining({
+ label: 'Select your build image',
+ target: VAR_BUILD_IMAGE,
+ }),
+ expect.objectContaining({
+ widget: 'checklist',
+ title: 'Before we begin, please check:',
+ }),
+ ],
+ template: expect.stringContaining(VAR_BUILD_IMAGE),
+ },
+ {
+ inputs: [
+ expect.objectContaining({
+ label: 'Installation Steps',
+ target: VAR_INSTALLATION_STEPS,
+ }),
+ ],
+ template: expect.stringContaining(VAR_INSTALLATION_STEPS),
+ },
+ {
+ inputs: [
+ expect.objectContaining({
+ label: 'Build Steps',
+ target: VAR_BUILD_STEPS,
+ }),
+ ],
+ template: expect.stringContaining(VAR_BUILD_STEPS),
+ },
+ ]);
+ });
+
+ it('addresses all relevant instructions for a pages pipeline', () => {
+ const fullTemplate = getFinalTemplate();
+
+ expect(fullTemplate.toString()).toEqual(
+ `# The Docker image that will be used to build your app
+image: ${VAR_BUILD_IMAGE}
+# Functions that should be executed before the build script is run
+before_script: ${VAR_INSTALLATION_STEPS}
+pages:
+ script: ${VAR_BUILD_STEPS}
+ artifacts:
+ paths:
+ # The folder that contains the files to be exposed at the Page URL
+ - public
+ rules:
+ # This ensures that only pushes to the default branch will trigger
+ # a pages deploy
+ - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
+`,
+ );
+ });
+});
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js b/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
index 3b5632a8a4e..bfbb5f934b9 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
+++ b/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
@@ -49,15 +49,15 @@ describe('Failed Jobs App', () => {
});
describe('loading spinner', () => {
- beforeEach(() => {
+ it('displays loading spinner when fetching failed jobs', () => {
createComponent(resolverSpy);
- });
- it('displays loading spinner when fetching failed jobs', () => {
expect(findLoadingSpinner().exists()).toBe(true);
});
it('hides loading spinner after the failed jobs have been fetched', async () => {
+ createComponent(resolverSpy);
+
await waitForPromises();
expect(findLoadingSpinner().exists()).toBe(false);
diff --git a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
index 81e19a6c221..89b6f764b2f 100644
--- a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
+++ b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
@@ -50,20 +50,23 @@ describe('Jobs app', () => {
});
describe('loading spinner', () => {
- beforeEach(async () => {
+ const setup = async () => {
createComponent(resolverSpy);
await waitForPromises();
triggerInfiniteScroll();
- });
+ };
+
+ it('displays loading spinner when fetching more jobs', async () => {
+ await setup();
- it('displays loading spinner when fetching more jobs', () => {
expect(findLoadingSpinner().exists()).toBe(true);
expect(findSkeletonLoader().exists()).toBe(false);
});
it('hides loading spinner after jobs have been fetched', async () => {
+ await setup();
await waitForPromises();
expect(findLoadingSpinner().exists()).toBe(false);
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
index 49d64c6eac0..3eaf06e0656 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
@@ -5,6 +5,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import waitForPromises from 'helpers/wait_for_promises';
import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
import getUserCallouts from '~/graphql_shared/queries/get_user_callouts.query.graphql';
@@ -29,10 +30,16 @@ import * as Api from '~/pipelines/components/graph_shared/api';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import * as parsingUtils from '~/pipelines/components/parsing_utils';
import getPipelineHeaderData from '~/pipelines/graphql/queries/get_pipeline_header_data.query.graphql';
+import getPerformanceInsights from '~/pipelines/graphql/queries/get_performance_insights.query.graphql';
import * as sentryUtils from '~/pipelines/utils';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import { mockRunningPipelineHeaderData } from '../mock_data';
-import { mapCallouts, mockCalloutsResponse, mockPipelineResponse } from './mock_data';
+import {
+ mapCallouts,
+ mockCalloutsResponse,
+ mockPipelineResponse,
+ mockPerformanceInsightsResponse,
+} from './mock_data';
const defaultProvide = {
graphqlResourceEtag: 'frog/amphibirama/etag/',
@@ -88,11 +95,15 @@ describe('Pipeline graph wrapper', () => {
const callouts = mapCallouts(calloutsList);
const getUserCalloutsHandler = jest.fn().mockResolvedValue(mockCalloutsResponse(callouts));
const getPipelineHeaderDataHandler = jest.fn().mockResolvedValue(mockRunningPipelineHeaderData);
+ const getPerformanceInsightsHandler = jest
+ .fn()
+ .mockResolvedValue(mockPerformanceInsightsResponse);
const requestHandlers = [
[getPipelineHeaderData, getPipelineHeaderDataHandler],
[getPipelineDetails, getPipelineDetailsHandler],
[getUserCallouts, getUserCalloutsHandler],
+ [getPerformanceInsights, getPerformanceInsightsHandler],
];
const apolloProvider = createMockApollo(requestHandlers);
@@ -502,9 +513,7 @@ describe('Pipeline graph wrapper', () => {
describe('when no duration is obtained', () => {
beforeEach(async () => {
- jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
- return [];
- });
+ stubPerformanceWebAPI();
createComponentWithApollo({
provide: {
diff --git a/spec/frontend/pipelines/graph/graph_view_selector_spec.js b/spec/frontend/pipelines/graph/graph_view_selector_spec.js
index f574f4dccc5..1397500bdc7 100644
--- a/spec/frontend/pipelines/graph/graph_view_selector_spec.js
+++ b/spec/frontend/pipelines/graph/graph_view_selector_spec.js
@@ -1,10 +1,19 @@
import { GlAlert, GlButton, GlButtonGroup, GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { LAYER_VIEW, STAGE_VIEW } from '~/pipelines/components/graph/constants';
import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import getPerformanceInsights from '~/pipelines/graphql/queries/get_performance_insights.query.graphql';
+import { mockPerformanceInsightsResponse } from './mock_data';
+
+Vue.use(VueApollo);
describe('the graph view selector component', () => {
let wrapper;
+ let trackingSpy;
const findDependenciesToggle = () => wrapper.find('[data-testid="show-links-toggle"]');
const findViewTypeSelector = () => wrapper.findComponent(GlButtonGroup);
@@ -13,11 +22,13 @@ describe('the graph view selector component', () => {
const findSwitcherLoader = () => wrapper.find('[data-testid="switcher-loading-state"]');
const findToggleLoader = () => findDependenciesToggle().find(GlLoadingIcon);
const findHoverTip = () => wrapper.findComponent(GlAlert);
+ const findPipelineInsightsBtn = () => wrapper.find('[data-testid="pipeline-insights-btn"]');
const defaultProps = {
showLinks: false,
tipPreviouslyDismissed: false,
type: STAGE_VIEW,
+ isPipelineComplete: true,
};
const defaultData = {
@@ -27,6 +38,14 @@ describe('the graph view selector component', () => {
showLinksActive: false,
};
+ const getPerformanceInsightsHandler = jest
+ .fn()
+ .mockResolvedValue(mockPerformanceInsightsResponse);
+
+ const requestHandlers = [[getPerformanceInsights, getPerformanceInsightsHandler]];
+
+ const apolloProvider = createMockApollo(requestHandlers);
+
const createComponent = ({ data = {}, mountFn = shallowMount, props = {} } = {}) => {
wrapper = mountFn(GraphViewSelector, {
propsData: {
@@ -39,6 +58,7 @@ describe('the graph view selector component', () => {
...data,
};
},
+ apolloProvider,
});
};
@@ -91,7 +111,6 @@ describe('the graph view selector component', () => {
describe('events', () => {
beforeEach(() => {
- jest.useFakeTimers();
createComponent({
mountFn: mount,
props: {
@@ -203,5 +222,44 @@ describe('the graph view selector component', () => {
expect(findHoverTip().exists()).toBe(false);
});
});
+
+ describe('pipeline insights', () => {
+ it.each`
+ isPipelineComplete | shouldShow
+ ${true} | ${true}
+ ${false} | ${false}
+ `(
+ 'button should display $shouldShow if isPipelineComplete is $isPipelineComplete ',
+ ({ isPipelineComplete, shouldShow }) => {
+ createComponent({
+ props: {
+ isPipelineComplete,
+ },
+ });
+
+ expect(findPipelineInsightsBtn().exists()).toBe(shouldShow);
+ },
+ );
+ });
+
+ describe('tracking', () => {
+ beforeEach(() => {
+ createComponent();
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks performance insights button click', () => {
+ findPipelineInsightsBtn().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_insights_button', {
+ label: 'performance_insights',
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/pipelines/graph/job_group_dropdown_spec.js b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
index 5d8e70bac31..d8afb33e148 100644
--- a/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
+++ b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
@@ -79,7 +79,7 @@ describe('job group dropdown component', () => {
it('renders button with group name and size', () => {
expect(findButton().text()).toContain(group.name);
- expect(findButton().text()).toContain(group.size);
+ expect(findButton().text()).toContain(group.size.toString());
});
it('renders dropdown with jobs', () => {
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index fd97c2dbe77..cdeaa0db61d 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -103,7 +103,7 @@ describe('Linked pipeline', () => {
expect(findCardTooltip().text()).toContain(mockPipeline.project.name);
expect(findCardTooltip().text()).toContain(mockPipeline.status.label);
expect(findCardTooltip().text()).toContain(mockPipeline.sourceJob.name);
- expect(findCardTooltip().text()).toContain(mockPipeline.id);
+ expect(findCardTooltip().text()).toContain(mockPipeline.id.toString());
});
it('should display multi-project label when pipeline project id is not the same as triggered pipeline project id', () => {
diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index 6124d67af09..959bbcefc98 100644
--- a/spec/frontend/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
@@ -1038,3 +1038,245 @@ export const triggerJob = {
action: null,
},
};
+
+export const mockPerformanceInsightsResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/20',
+ pipeline: {
+ __typename: 'Pipeline',
+ id: 'gid://gitlab/Ci::Pipeline/97',
+ jobs: {
+ __typename: 'CiJobConnection',
+ pageInfo: {
+ __typename: 'PageInfo',
+ hasNextPage: false,
+ },
+ nodes: [
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Bridge/2502',
+ duration: null,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2502-2502',
+ detailsPath: '/root/lots-of-jobs-project/-/pipelines/98',
+ },
+ name: 'trigger_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/303',
+ name: 'deploy',
+ },
+ startedAt: null,
+ queuedDuration: 424850.376278,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2501',
+ duration: 10,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2501-2501',
+ detailsPath: '/root/ci-project/-/jobs/2501',
+ },
+ name: 'artifact_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/303',
+ name: 'deploy',
+ },
+ startedAt: '2022-07-01T16:31:41Z',
+ queuedDuration: 2.621553,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2500',
+ duration: 4,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2500-2500',
+ detailsPath: '/root/ci-project/-/jobs/2500',
+ },
+ name: 'coverage_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/302',
+ name: 'test',
+ },
+ startedAt: '2022-07-01T16:31:33Z',
+ queuedDuration: 14.388869,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2499',
+ duration: 4,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2499-2499',
+ detailsPath: '/root/ci-project/-/jobs/2499',
+ },
+ name: 'test_job_two',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/302',
+ name: 'test',
+ },
+ startedAt: '2022-07-01T16:31:28Z',
+ queuedDuration: 15.792664,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2498',
+ duration: 4,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2498-2498',
+ detailsPath: '/root/ci-project/-/jobs/2498',
+ },
+ name: 'test_job_one',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/302',
+ name: 'test',
+ },
+ startedAt: '2022-07-01T16:31:17Z',
+ queuedDuration: 8.317072,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2497',
+ duration: 5,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'failed-2497-2497',
+ detailsPath: '/root/ci-project/-/jobs/2497',
+ },
+ name: 'allow_failure_test_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/302',
+ name: 'test',
+ },
+ startedAt: '2022-07-01T16:31:22Z',
+ queuedDuration: 3.547553,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2496',
+ duration: null,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'manual-2496-2496',
+ detailsPath: '/root/ci-project/-/jobs/2496',
+ },
+ name: 'test_manual_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/302',
+ name: 'test',
+ },
+ startedAt: null,
+ queuedDuration: null,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2495',
+ duration: 5,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2495-2495',
+ detailsPath: '/root/ci-project/-/jobs/2495',
+ },
+ name: 'large_log_output',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/301',
+ name: 'build',
+ },
+ startedAt: '2022-07-01T16:31:11Z',
+ queuedDuration: 79.128625,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2494',
+ duration: 5,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2494-2494',
+ detailsPath: '/root/ci-project/-/jobs/2494',
+ },
+ name: 'build_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/301',
+ name: 'build',
+ },
+ startedAt: '2022-07-01T16:31:05Z',
+ queuedDuration: 73.286895,
+ },
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Build/2493',
+ duration: 16,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2493-2493',
+ detailsPath: '/root/ci-project/-/jobs/2493',
+ },
+ name: 'wait_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/301',
+ name: 'build',
+ },
+ startedAt: '2022-07-01T16:30:48Z',
+ queuedDuration: 56.258856,
+ },
+ ],
+ },
+ },
+ },
+ },
+};
+
+export const mockPerformanceInsightsNextPageResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/20',
+ pipeline: {
+ __typename: 'Pipeline',
+ id: 'gid://gitlab/Ci::Pipeline/97',
+ jobs: {
+ __typename: 'CiJobConnection',
+ pageInfo: {
+ __typename: 'PageInfo',
+ hasNextPage: true,
+ },
+ nodes: [
+ {
+ __typename: 'CiJob',
+ id: 'gid://gitlab/Ci::Bridge/2502',
+ duration: null,
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-2502-2502',
+ detailsPath: '/root/lots-of-jobs-project/-/pipelines/98',
+ },
+ name: 'trigger_job',
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/303',
+ name: 'deploy',
+ },
+ startedAt: null,
+ queuedDuration: 424850.376278,
+ },
+ ],
+ },
+ },
+ },
+ },
+};
diff --git a/spec/frontend/pipelines/header_component_spec.js b/spec/frontend/pipelines/header_component_spec.js
index 5cc11adf696..859be8d342c 100644
--- a/spec/frontend/pipelines/header_component_spec.js
+++ b/spec/frontend/pipelines/header_component_spec.js
@@ -205,7 +205,7 @@ describe('Pipeline details header', () => {
});
it('should call deletePipeline Mutation with pipeline id when modal is submitted', () => {
- findDeleteModal().vm.$emit('ok');
+ findDeleteModal().vm.$emit('primary');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: deletePipelineMutation,
@@ -223,7 +223,7 @@ describe('Pipeline details header', () => {
},
});
- findDeleteModal().vm.$emit('ok');
+ findDeleteModal().vm.$emit('primary');
await waitForPromises();
expect(findAlert().text()).toBe(failureMessage);
diff --git a/spec/frontend/pipelines/performance_insights_modal_spec.js b/spec/frontend/pipelines/performance_insights_modal_spec.js
new file mode 100644
index 00000000000..b745eb1d78e
--- /dev/null
+++ b/spec/frontend/pipelines/performance_insights_modal_spec.js
@@ -0,0 +1,122 @@
+import { GlAlert, GlLink, GlModal } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import waitForPromises from 'helpers/wait_for_promises';
+import PerformanceInsightsModal from '~/pipelines/components/performance_insights_modal.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { trimText } from 'helpers/text_helper';
+import getPerformanceInsights from '~/pipelines/graphql/queries/get_performance_insights.query.graphql';
+import {
+ mockPerformanceInsightsResponse,
+ mockPerformanceInsightsNextPageResponse,
+} from './graph/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Performance insights modal', () => {
+ let wrapper;
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findQueuedCardData = () => wrapper.findByTestId('insights-queued-card-data');
+ const findQueuedCardLink = () => wrapper.findByTestId('insights-queued-card-link');
+ const findExecutedCardData = () => wrapper.findByTestId('insights-executed-card-data');
+ const findExecutedCardLink = () => wrapper.findByTestId('insights-executed-card-link');
+ const findSlowJobsStage = (index) => wrapper.findAllByTestId('insights-slow-job-stage').at(index);
+ const findSlowJobsLink = (index) => wrapper.findAllByTestId('insights-slow-job-link').at(index);
+
+ const getPerformanceInsightsHandler = jest
+ .fn()
+ .mockResolvedValue(mockPerformanceInsightsResponse);
+
+ const getPerformanceInsightsNextPageHandler = jest
+ .fn()
+ .mockResolvedValue(mockPerformanceInsightsNextPageResponse);
+
+ const requestHandlers = [[getPerformanceInsights, getPerformanceInsightsHandler]];
+
+ const createComponent = (handlers = requestHandlers) => {
+ wrapper = shallowMountExtended(PerformanceInsightsModal, {
+ provide: {
+ pipelineIid: '1',
+ pipelineProjectPath: 'root/ci-project',
+ },
+ apolloProvider: createMockApollo(handlers),
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('without next page', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('displays modal', () => {
+ expect(findModal().exists()).toBe(true);
+ });
+
+ it('does not dispaly alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ describe('queued duration card', () => {
+ it('displays card data', () => {
+ expect(trimText(findQueuedCardData().text())).toBe('4.9 days');
+ });
+ it('displays card link', () => {
+ expect(findQueuedCardLink().attributes('href')).toBe(
+ '/root/lots-of-jobs-project/-/pipelines/98',
+ );
+ });
+ });
+
+ describe('executed duration card', () => {
+ it('displays card data', () => {
+ expect(trimText(findExecutedCardData().text())).toBe('trigger_job');
+ });
+ it('displays card link', () => {
+ expect(findExecutedCardLink().attributes('href')).toBe(
+ '/root/lots-of-jobs-project/-/pipelines/98',
+ );
+ });
+ });
+
+ describe('slow jobs', () => {
+ it.each`
+ index | expectedStage | expectedName | expectedLink
+ ${0} | ${'build'} | ${'wait_job'} | ${'/root/ci-project/-/jobs/2493'}
+ ${1} | ${'deploy'} | ${'artifact_job'} | ${'/root/ci-project/-/jobs/2501'}
+ ${2} | ${'test'} | ${'allow_failure_test_job'} | ${'/root/ci-project/-/jobs/2497'}
+ ${3} | ${'build'} | ${'large_log_output'} | ${'/root/ci-project/-/jobs/2495'}
+ ${4} | ${'build'} | ${'build_job'} | ${'/root/ci-project/-/jobs/2494'}
+ `(
+ 'should display slow job correctly',
+ ({ index, expectedStage, expectedName, expectedLink }) => {
+ expect(findSlowJobsStage(index).text()).toBe(expectedStage);
+ expect(findSlowJobsLink(index).text()).toBe(expectedName);
+ expect(findSlowJobsLink(index).attributes('href')).toBe(expectedLink);
+ },
+ );
+ });
+ });
+
+ describe('limit alert', () => {
+ it('displays limit alert when there is a next page', async () => {
+ createComponent([[getPerformanceInsights, getPerformanceInsightsNextPageHandler]]);
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findLink().attributes('href')).toBe(
+ 'https://gitlab.com/gitlab-org/gitlab/-/issues/365902',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index de9f394db43..ad6d650670a 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -665,7 +665,6 @@ describe('Pipelines', () => {
it('stops polling & restarts polling', async () => {
findStagesDropdownToggle().trigger('click');
-
await waitForPromises();
expect(cancelMock).not.toHaveBeenCalled();
diff --git a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
index 6ab479a257c..f9b9da01a2b 100644
--- a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
@@ -49,7 +49,7 @@ describe('Mutations TestReports Store', () => {
describe('set suite error', () => {
it('should set the error message in state if provided', () => {
- const message = 'Test report artifacts have expired';
+ const message = 'Test report artifacts not found';
mutations[types.SET_SUITE_ERROR](mockState, {
response: { data: { errors: message } },
diff --git a/spec/frontend/pipelines/test_reports/test_case_details_spec.js b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
index 29c07e5e9f8..f194864447c 100644
--- a/spec/frontend/pipelines/test_reports/test_case_details_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
@@ -3,6 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import TestCaseDetails from '~/pipelines/components/test_reports/test_case_details.vue';
import CodeBlock from '~/vue_shared/components/code_block.vue';
+import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
describe('Test case details', () => {
let wrapper;
@@ -19,6 +20,7 @@ describe('Test case details', () => {
system_output: 'Line 42 is broken',
};
+ const findCopyFileBtn = () => wrapper.findComponent(ModalCopyButton);
const findModal = () => wrapper.findComponent(GlModal);
const findName = () => wrapper.findByTestId('test-case-name');
const findFile = () => wrapper.findByTestId('test-case-file');
@@ -66,6 +68,10 @@ describe('Test case details', () => {
expect(findFileLink().attributes('href')).toBe(defaultTestCase.filePath);
});
+ it('renders copy button for test case file', () => {
+ expect(findCopyFileBtn().attributes('text')).toBe(defaultTestCase.file);
+ });
+
it('renders the test case duration', () => {
expect(findDuration().text()).toBe(defaultTestCase.formattedTime);
});
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/pipelines/test_reports/test_reports_spec.js
index e0daf8cb4b5..3c3143b1865 100644
--- a/spec/frontend/pipelines/test_reports/test_reports_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_reports_spec.js
@@ -31,18 +31,30 @@ describe('Test reports app', () => {
const createComponent = ({ state = {} } = {}) => {
store = new Vuex.Store({
- state: {
- isLoading: false,
- selectedSuiteIndex: null,
- testReports,
- ...state,
+ modules: {
+ testReports: {
+ namespaced: true,
+ state: {
+ isLoading: false,
+ selectedSuiteIndex: null,
+ testReports,
+ ...state,
+ },
+ actions: actionSpies,
+ getters,
+ },
},
- actions: actionSpies,
- getters,
});
+ jest.spyOn(store, 'registerModule').mockReturnValue(null);
+
wrapper = extendedWrapper(
shallowMount(TestReports, {
+ provide: {
+ blobPath: '/blob/path',
+ summaryEndpoint: '/summary.json',
+ suiteEndpoint: '/suite.json',
+ },
store,
}),
);
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index 25650b24705..c372ac06c35 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -34,22 +34,32 @@ describe('Test reports suite table', () => {
const createComponent = ({ suite = testSuite, perPage = 20, errorMessage } = {}) => {
store = new Vuex.Store({
- state: {
- blobPath,
+ modules: {
testReports: {
- test_suites: [suite],
+ namespaced: true,
+ state: {
+ blobPath,
+ testReports: {
+ test_suites: [suite],
+ },
+ selectedSuiteIndex: 0,
+ pageInfo: {
+ page: 1,
+ perPage,
+ },
+ errorMessage,
+ },
+ getters,
},
- selectedSuiteIndex: 0,
- pageInfo: {
- page: 1,
- perPage,
- },
- errorMessage,
},
- getters,
});
wrapper = shallowMountExtended(SuiteTable, {
+ provide: {
+ blobPath: '/blob/path',
+ summaryEndpoint: '/summary.json',
+ suiteEndpoint: '/suite.json',
+ },
store,
stubs: { GlFriendlyWrap },
});
diff --git a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
index 1598d5c337f..0e1229f7067 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
@@ -20,13 +20,23 @@ describe('Test reports summary table', () => {
const createComponent = (reports = null) => {
store = new Vuex.Store({
- state: {
- testReports: reports || testReports,
+ modules: {
+ testReports: {
+ namespaced: true,
+ state: {
+ testReports: reports || testReports,
+ },
+ getters,
+ },
},
- getters,
});
wrapper = mount(SummaryTable, {
+ provide: {
+ blobPath: '/blob/path',
+ summaryEndpoint: '/summary.json',
+ suiteEndpoint: '/suite.json',
+ },
propsData: defaultProps,
store,
});
diff --git a/spec/frontend/pipelines/utils_spec.js b/spec/frontend/pipelines/utils_spec.js
index 1c23a7e4fcf..a82390fae22 100644
--- a/spec/frontend/pipelines/utils_spec.js
+++ b/spec/frontend/pipelines/utils_spec.js
@@ -8,10 +8,14 @@ import {
removeOrphanNodes,
getMaxNodes,
} from '~/pipelines/components/parsing_utils';
-import { createNodeDict } from '~/pipelines/utils';
+import { createNodeDict, calculateJobStats, calculateSlowestFiveJobs } from '~/pipelines/utils';
import { mockParsedGraphQLNodes, missingJob } from './components/dag/mock_data';
-import { generateResponse, mockPipelineResponse } from './graph/mock_data';
+import {
+ generateResponse,
+ mockPipelineResponse,
+ mockPerformanceInsightsResponse,
+} from './graph/mock_data';
describe('DAG visualization parsing utilities', () => {
const nodeDict = createNodeDict(mockParsedGraphQLNodes);
@@ -158,4 +162,40 @@ describe('DAG visualization parsing utilities', () => {
expect(columns).toMatchSnapshot();
});
});
+
+ describe('performance insights', () => {
+ const {
+ data: {
+ project: {
+ pipeline: { jobs },
+ },
+ },
+ } = mockPerformanceInsightsResponse;
+
+ describe('calculateJobStats', () => {
+ const expectedJob = jobs.nodes[0];
+
+ it('returns the job that spent this longest time queued', () => {
+ expect(calculateJobStats(jobs, 'queuedDuration')).toEqual(expectedJob);
+ });
+
+ it('returns the job that was executed last', () => {
+ expect(calculateJobStats(jobs, 'startedAt')).toEqual(expectedJob);
+ });
+ });
+
+ describe('calculateSlowestFiveJobs', () => {
+ it('returns the slowest five jobs of the pipeline', () => {
+ const expectedJobs = [
+ jobs.nodes[9],
+ jobs.nodes[1],
+ jobs.nodes[5],
+ jobs.nodes[7],
+ jobs.nodes[8],
+ ];
+
+ expect(calculateSlowestFiveJobs(jobs)).toEqual(expectedJobs);
+ });
+ });
+ });
});
diff --git a/spec/frontend/projects/new/components/new_project_url_select_spec.js b/spec/frontend/projects/new/components/new_project_url_select_spec.js
index ba22622e1f7..b6d4ee32cf5 100644
--- a/spec/frontend/projects/new/components/new_project_url_select_spec.js
+++ b/spec/frontend/projects/new/components/new_project_url_select_spec.js
@@ -4,6 +4,7 @@ import {
GlDropdownItem,
GlDropdownSectionHeader,
GlSearchBoxByType,
+ GlTruncate,
} from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
@@ -15,7 +16,6 @@ import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import eventHub from '~/projects/new/event_hub';
import NewProjectUrlSelect from '~/projects/new/components/new_project_url_select.vue';
import searchQuery from '~/projects/new/queries/search_namespaces_where_user_can_create_projects.query.graphql';
-import { s__ } from '~/locale';
describe('NewProjectUrlSelect component', () => {
let wrapper;
@@ -90,6 +90,7 @@ describe('NewProjectUrlSelect component', () => {
const findButtonLabel = () => wrapper.findComponent(GlButton);
const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findSelectedPath = () => wrapper.findComponent(GlTruncate);
const findInput = () => wrapper.findComponent(GlSearchBoxByType);
const findHiddenNamespaceInput = () => wrapper.find('[name="project[namespace_id]"]');
@@ -121,14 +122,15 @@ describe('NewProjectUrlSelect component', () => {
describe('when namespaceId is provided', () => {
beforeEach(() => {
- wrapper = mountComponent();
+ wrapper = mountComponent({ mountFn: mount });
});
it('renders a dropdown with the given namespace full path as the text', () => {
- const dropdownProps = findDropdown().props();
+ expect(findSelectedPath().props('text')).toBe(defaultProvide.namespaceFullPath);
+ });
- expect(dropdownProps.text).toBe(defaultProvide.namespaceFullPath);
- expect(dropdownProps.toggleClass).not.toContain('gl-text-gray-500!');
+ it('renders a dropdown without the class', () => {
+ expect(findDropdown().props('toggleClass')).not.toContain('gl-text-gray-500!');
});
it('renders a hidden input with the given namespace id', () => {
@@ -150,14 +152,15 @@ describe('NewProjectUrlSelect component', () => {
};
beforeEach(() => {
- wrapper = mountComponent({ provide });
+ wrapper = mountComponent({ provide, mountFn: mount });
});
it("renders a dropdown with the user's namespace full path as the text", () => {
- const dropdownProps = findDropdown().props();
+ expect(findSelectedPath().props('text')).toBe('Pick a group or namespace');
+ });
- expect(dropdownProps.text).toBe(s__('ProjectsNew|Pick a group or namespace'));
- expect(dropdownProps.toggleClass).toContain('gl-text-gray-500!');
+ it('renders a dropdown with the class', () => {
+ expect(findDropdown().props('toggleClass')).toContain('gl-text-gray-500!');
});
it("renders a hidden input with the user's namespace id", () => {
@@ -236,8 +239,8 @@ describe('NewProjectUrlSelect component', () => {
expect(listItems.at(2).text()).toBe(data.currentUser.groups.nodes[2].fullPath);
});
- it('sets the selection to the group', async () => {
- expect(findDropdown().props('text')).toBe(fullPath);
+ it('sets the selection to the group', () => {
+ expect(findSelectedPath().props('text')).toBe(fullPath);
});
});
diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js
index 98c7856a61a..7b9011fa3d9 100644
--- a/spec/frontend/projects/pipelines/charts/components/app_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js
@@ -14,6 +14,7 @@ jest.mock('~/lib/utils/url_utility');
const DeploymentFrequencyChartsStub = { name: 'DeploymentFrequencyCharts', render: () => {} };
const LeadTimeChartsStub = { name: 'LeadTimeCharts', render: () => {} };
const TimeToRestoreServiceChartsStub = { name: 'TimeToRestoreServiceCharts', render: () => {} };
+const ChangeFailureRateChartsStub = { name: 'ChangeFailureRateCharts', render: () => {} };
const ProjectQualitySummaryStub = { name: 'ProjectQualitySummary', render: () => {} };
describe('ProjectsPipelinesChartsApp', () => {
@@ -33,6 +34,7 @@ describe('ProjectsPipelinesChartsApp', () => {
DeploymentFrequencyCharts: DeploymentFrequencyChartsStub,
LeadTimeCharts: LeadTimeChartsStub,
TimeToRestoreServiceCharts: TimeToRestoreServiceChartsStub,
+ ChangeFailureRateCharts: ChangeFailureRateChartsStub,
ProjectQualitySummary: ProjectQualitySummaryStub,
},
},
@@ -50,6 +52,7 @@ describe('ProjectsPipelinesChartsApp', () => {
const findGlTabAtIndex = (index) => findAllGlTabs().at(index);
const findLeadTimeCharts = () => wrapper.find(LeadTimeChartsStub);
const findTimeToRestoreServiceCharts = () => wrapper.find(TimeToRestoreServiceChartsStub);
+ const findChangeFailureRateCharts = () => wrapper.find(ChangeFailureRateChartsStub);
const findDeploymentFrequencyCharts = () => wrapper.find(DeploymentFrequencyChartsStub);
const findPipelineCharts = () => wrapper.find(PipelineCharts);
const findProjectQualitySummary = () => wrapper.find(ProjectQualitySummaryStub);
@@ -59,58 +62,49 @@ describe('ProjectsPipelinesChartsApp', () => {
createComponent();
});
- it('renders tabs', () => {
- expect(findGlTabs().exists()).toBe(true);
+ describe.each`
+ title | finderFn | index
+ ${'Pipelines'} | ${findPipelineCharts} | ${0}
+ ${'Deployment frequency'} | ${findDeploymentFrequencyCharts} | ${1}
+ ${'Lead time'} | ${findLeadTimeCharts} | ${2}
+ ${'Time to restore service'} | ${findTimeToRestoreServiceCharts} | ${3}
+ ${'Change failure rate'} | ${findChangeFailureRateCharts} | ${4}
+ ${'Project quality'} | ${findProjectQualitySummary} | ${5}
+ `('Tabs', ({ title, finderFn, index }) => {
+ it(`renders tab with a title ${title} at index ${index}`, () => {
+ expect(findGlTabAtIndex(index).attributes('title')).toBe(title);
+ });
- expect(findGlTabAtIndex(0).attributes('title')).toBe('Pipelines');
- expect(findGlTabAtIndex(1).attributes('title')).toBe('Deployment frequency');
- expect(findGlTabAtIndex(2).attributes('title')).toBe('Lead time');
- expect(findGlTabAtIndex(3).attributes('title')).toBe('Time to restore service');
- });
+ it(`renders the ${title} chart`, () => {
+ expect(finderFn().exists()).toBe(true);
+ });
- it('renders the pipeline charts', () => {
- expect(findPipelineCharts().exists()).toBe(true);
- });
+ it(`updates the current tab and url when the ${title} tab is clicked`, async () => {
+ let chartsPath;
+ const tabName = title.toLowerCase().replace(/\s/g, '-');
- it('renders the deployment frequency charts', () => {
- expect(findDeploymentFrequencyCharts().exists()).toBe(true);
- });
+ setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts`);
- it('renders the lead time charts', () => {
- expect(findLeadTimeCharts().exists()).toBe(true);
- });
+ mergeUrlParams.mockImplementation(({ chart }, path) => {
+ expect(chart).toBe(tabName);
+ expect(path).toBe(window.location.pathname);
+ chartsPath = `${path}?chart=${chart}`;
+ return chartsPath;
+ });
- it('renders the time to restore service charts', () => {
- expect(findTimeToRestoreServiceCharts().exists()).toBe(true);
- });
+ updateHistory.mockImplementation(({ url }) => {
+ expect(url).toBe(chartsPath);
+ });
+ const tabs = findGlTabs();
- it('renders the project quality summary', () => {
- expect(findProjectQualitySummary().exists()).toBe(true);
- });
+ expect(tabs.attributes('value')).toBe('0');
- it('sets the tab and url when a tab is clicked', async () => {
- let chartsPath;
- setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts`);
+ tabs.vm.$emit('input', index);
- mergeUrlParams.mockImplementation(({ chart }, path) => {
- expect(chart).toBe('deployment-frequency');
- expect(path).toBe(window.location.pathname);
- chartsPath = `${path}?chart=${chart}`;
- return chartsPath;
- });
+ await nextTick();
- updateHistory.mockImplementation(({ url }) => {
- expect(url).toBe(chartsPath);
+ expect(tabs.attributes('value')).toBe(index.toString());
});
- const tabs = findGlTabs();
-
- expect(tabs.attributes('value')).toBe('0');
-
- tabs.vm.$emit('input', 1);
-
- await nextTick();
-
- expect(tabs.attributes('value')).toBe('1');
});
it('should not try to push history if the tab does not change', async () => {
@@ -136,6 +130,7 @@ describe('ProjectsPipelinesChartsApp', () => {
${'deployment-frequency-tab'} | ${'p_analytics_ci_cd_deployment_frequency'}
${'lead-time-tab'} | ${'p_analytics_ci_cd_lead_time'}
${'time-to-restore-service-tab'} | ${'p_analytics_ci_cd_time_to_restore_service'}
+ ${'change-failure-rate-tab'} | ${'p_analytics_ci_cd_change_failure_rate'}
`('tracks the $event event when clicked', ({ testId, event }) => {
jest.spyOn(API, 'trackRedisHllUserEvent');
@@ -151,6 +146,7 @@ describe('ProjectsPipelinesChartsApp', () => {
describe('when provided with a query param', () => {
it.each`
chart | tab
+ ${'change-failure-rate'} | ${'4'}
${'time-to-restore-service'} | ${'3'}
${'lead-time'} | ${'2'}
${'deployment-frequency'} | ${'1'}
diff --git a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
index a42891423cd..1db48ce05d7 100644
--- a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
@@ -29,9 +29,20 @@ jest.mock('~/projects/settings/api/access_dropdown_api', () => ({
}),
getDeployKeys: jest.fn().mockResolvedValue({
data: [
- { id: 10, title: 'key10', fingerprint: 'abcdefghijklmnop', owner: { name: 'user1' } },
- { id: 11, title: 'key11', fingerprint: 'abcdefghijklmnop', owner: { name: 'user2' } },
- { id: 12, title: 'key12', fingerprint: 'abcdefghijklmnop', owner: { name: 'user3' } },
+ {
+ id: 10,
+ title: 'key10',
+ fingerprint: 'md5-abcdefghijklmnop',
+ fingerprint_sha256: 'sha256-abcdefghijklmnop',
+ owner: { name: 'user1' },
+ },
+ {
+ id: 11,
+ title: 'key11',
+ fingerprint_sha256: 'sha256-abcdefghijklmnop',
+ owner: { name: 'user2' },
+ },
+ { id: 12, title: 'key12', fingerprint: 'md5-abcdefghijklmnop', owner: { name: 'user3' } },
],
}),
}));
@@ -279,6 +290,7 @@ describe('Access Level Dropdown', () => {
{ id: 115, type: 'group', group_id: 5 },
{ id: 118, type: 'user', user_id: 8, name: 'user2' },
{ id: 121, type: 'deploy_key', deploy_key_id: 11 },
+ { id: 122, type: 'deploy_key', deploy_key_id: 12 },
];
const findSelected = (type) =>
@@ -309,8 +321,9 @@ describe('Access Level Dropdown', () => {
it('should set selected deploy keys as intersection between the server response and preselected mapping some keys', () => {
const selectedDeployKeys = findSelected(LEVEL_TYPES.DEPLOY_KEY);
- expect(selectedDeployKeys).toHaveLength(1);
- expect(selectedDeployKeys.at(0).text()).toContain('key11 (abcdefghijklmn...)');
+ expect(selectedDeployKeys).toHaveLength(2);
+ expect(selectedDeployKeys.at(0).text()).toContain('key11 (sha256-abcdefg...)');
+ expect(selectedDeployKeys.at(1).text()).toContain('key12 (md5-abcdefghij...)');
});
});
diff --git a/spec/frontend/protected_branches/protected_branch_edit_spec.js b/spec/frontend/protected_branches/protected_branch_edit_spec.js
index d842e00d850..6ef1b58a956 100644
--- a/spec/frontend/protected_branches/protected_branch_edit_spec.js
+++ b/spec/frontend/protected_branches/protected_branch_edit_spec.js
@@ -114,27 +114,30 @@ describe('ProtectedBranchEdit', () => {
});
describe('when clicked', () => {
- beforeEach(() => {
+ beforeEach(async () => {
mock.onPatch(TEST_URL, { protected_branch: { [patchParam]: true } }).replyOnce(200, {});
-
- toggle.click();
});
- it('checks and disables button', () => {
+ it('checks and disables button', async () => {
+ await toggle.click();
+
expect(toggle).toHaveClass(IS_CHECKED_CLASS);
expect(toggle.querySelector(IS_LOADING_SELECTOR)).not.toBe(null);
expect(toggle).toHaveClass(IS_DISABLED_CLASS);
});
- it('sends update to BE', () =>
- axios.waitForAll().then(() => {
- // Args are asserted in the `.onPatch` call
- expect(mock.history.patch).toHaveLength(1);
+ it('sends update to BE', async () => {
+ await toggle.click();
+
+ await axios.waitForAll();
- expect(toggle).not.toHaveClass(IS_DISABLED_CLASS);
- expect(toggle.querySelector(IS_LOADING_SELECTOR)).toBe(null);
- expect(createFlash).not.toHaveBeenCalled();
- }));
+ // Args are asserted in the `.onPatch` call
+ expect(mock.history.patch).toHaveLength(1);
+
+ expect(toggle).not.toHaveClass(IS_DISABLED_CLASS);
+ expect(toggle.querySelector(IS_LOADING_SELECTOR)).toBe(null);
+ expect(createFlash).not.toHaveBeenCalled();
+ });
});
describe('when clicked and BE error', () => {
@@ -143,10 +146,11 @@ describe('ProtectedBranchEdit', () => {
toggle.click();
});
- it('flashes error', () =>
- axios.waitForAll().then(() => {
- expect(createFlash).toHaveBeenCalled();
- }));
+ it('flashes error', async () => {
+ await axios.waitForAll();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
});
});
});
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index e1fc60f0d92..882cb2c1199 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -162,9 +162,9 @@ describe('Ref selector component', () => {
});
describe('initialization behavior', () => {
- beforeEach(createComponent);
-
it('initializes the dropdown with branches and tags when mounted', () => {
+ createComponent();
+
return waitForRequests().then(() => {
expect(branchesApiCallSpy).toHaveBeenCalledTimes(1);
expect(tagsApiCallSpy).toHaveBeenCalledTimes(1);
@@ -173,6 +173,8 @@ describe('Ref selector component', () => {
});
it('shows a spinner while network requests are in progress', () => {
+ createComponent();
+
expect(findLoadingIcon().exists()).toBe(true);
return waitForRequests().then(() => {
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index fd2a8eec4d4..90a33152877 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -57,7 +57,7 @@ Object {
"evidences": Array [],
"milestones": Array [],
"name": "The second release",
- "releasedAt": "2019-01-10T00:00:00Z",
+ "releasedAt": 2019-01-10T00:00:00.000Z,
"tagName": "v1.2",
"tagPath": "/releases-namespace/releases-project/-/tags/v1.2",
"upcomingRelease": true,
@@ -188,7 +188,7 @@ Object {
},
],
"name": "The first release",
- "releasedAt": "2018-12-10T00:00:00Z",
+ "releasedAt": 2018-12-10T00:00:00.000Z,
"tagName": "v1.1",
"tagPath": "/releases-namespace/releases-project/-/tags/v1.1",
"upcomingRelease": true,
@@ -196,10 +196,10 @@ Object {
],
"paginationInfo": Object {
"__typename": "PageInfo",
- "endCursor": "eyJyZWxlYXNlZF9hdCI6IjIwMTgtMTItMTAgMDA6MDA6MDAuMDAwMDAwMDAwIFVUQyIsImlkIjoiMSJ9",
+ "endCursor": "eyJyZWxlYXNlZF9hdCI6IjIwMTgtMTItMTAgMDA6MDA6MDAuMDAwMDAwMDAwICswMDAwIiwiaWQiOiIxIn0",
"hasNextPage": false,
"hasPreviousPage": false,
- "startCursor": "eyJyZWxlYXNlZF9hdCI6IjIwMTktMDEtMTAgMDA6MDA6MDAuMDAwMDAwMDAwIFVUQyIsImlkIjoiMiJ9",
+ "startCursor": "eyJyZWxlYXNlZF9hdCI6IjIwMTktMDEtMTAgMDA6MDA6MDAuMDAwMDAwMDAwICswMDAwIiwiaWQiOiIyIn0",
},
}
`;
@@ -267,7 +267,9 @@ Object {
},
],
"name": "The first release",
+ "releasedAt": 2018-12-10T00:00:00.000Z,
"tagName": "v1.1",
+ "tagPath": "/releases-namespace/releases-project/-/tags/v1.1",
},
}
`;
@@ -400,7 +402,7 @@ Object {
},
],
"name": "The first release",
- "releasedAt": "2018-12-10T00:00:00Z",
+ "releasedAt": 2018-12-10T00:00:00.000Z,
"tagName": "v1.1",
"tagPath": "/releases-namespace/releases-project/-/tags/v1.1",
"upcomingRelease": true,
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 80be27c92ff..cb044b9e891 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -1,21 +1,24 @@
-import { mount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { merge } from 'lodash';
import Vuex from 'vuex';
import { nextTick } from 'vue';
-import { GlFormCheckbox } from '@gitlab/ui';
-import originalRelease from 'test_fixtures/api/releases/release.json';
+import { GlDatepicker, GlFormCheckbox } from '@gitlab/ui';
+import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
+import { convertOneReleaseGraphQLResponse } from '~/releases/util';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
-import * as commonUtils from '~/lib/utils/common_utils';
import ReleaseEditNewApp from '~/releases/components/app_edit_new.vue';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
+import ConfirmDeleteModal from '~/releases/components/confirm_delete_modal.vue';
import { BACK_URL_PARAM } from '~/releases/constants';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+const originalRelease = originalOneReleaseForEditingQueryResponse.data.project.release;
const originalMilestones = originalRelease.milestones;
const releasesPagePath = 'path/to/releases/page';
+const upcomingReleaseDocsPath = 'path/to/upcoming/release/docs';
describe('Release edit/new component', () => {
let wrapper;
@@ -28,22 +31,24 @@ describe('Release edit/new component', () => {
const factory = async ({ featureFlags = {}, store: storeUpdates = {} } = {}) => {
state = {
release,
+ isExistingRelease: true,
markdownDocsPath: 'path/to/markdown/docs',
releasesPagePath,
projectId: '8',
groupId: '42',
groupMilestonesAvailable: true,
+ upcomingReleaseDocsPath,
};
actions = {
initializeRelease: jest.fn(),
saveRelease: jest.fn(),
addEmptyAssetLink: jest.fn(),
+ deleteRelease: jest.fn(),
};
getters = {
isValid: () => true,
- isExistingRelease: () => true,
validationErrors: () => ({
assets: {
links: [],
@@ -68,7 +73,7 @@ describe('Release edit/new component', () => {
),
);
- wrapper = mount(ReleaseEditNewApp, {
+ wrapper = mountExtended(ReleaseEditNewApp, {
store,
provide: {
glFeatures: featureFlags,
@@ -88,7 +93,7 @@ describe('Release edit/new component', () => {
mock.onGet('/api/v4/projects/8/milestones').reply(200, originalMilestones);
- release = commonUtils.convertObjectPropsToCamelCase(originalRelease, { deep: true });
+ release = convertOneReleaseGraphQLResponse(originalOneReleaseForEditingQueryResponse).data;
});
afterEach(() => {
@@ -128,6 +133,18 @@ describe('Release edit/new component', () => {
expect(wrapper.find('#release-title').element.value).toBe(release.name);
});
+ it('renders the released at date in the "Released at" datepicker', () => {
+ expect(wrapper.findComponent(GlDatepicker).props('value')).toBe(release.releasedAt);
+ });
+
+ it('links to the documentation on upcoming releases in the "Released at" description', () => {
+ const link = wrapper.findByRole('link', { name: 'Upcoming Release' });
+
+ expect(link.exists()).toBe(true);
+
+ expect(link.attributes('href')).toBe(upcomingReleaseDocsPath);
+ });
+
it('renders the release notes in the "Release notes" textarea', () => {
expect(wrapper.find('#release-notes').element.value).toBe(release.description);
});
@@ -191,9 +208,7 @@ describe('Release edit/new component', () => {
store: {
modules: {
editNew: {
- getters: {
- isExistingRelease: () => false,
- },
+ state: { isExistingRelease: false },
},
},
},
@@ -274,4 +289,31 @@ describe('Release edit/new component', () => {
});
});
});
+
+ describe('delete', () => {
+ const findConfirmDeleteModal = () => wrapper.findComponent(ConfirmDeleteModal);
+
+ it('calls the deleteRelease action on confirmation', async () => {
+ await factory();
+ findConfirmDeleteModal().vm.$emit('delete');
+
+ expect(actions.deleteRelease).toHaveBeenCalled();
+ });
+
+ it('is hidden if this is a new release', async () => {
+ await factory({
+ store: {
+ modules: {
+ editNew: {
+ state: {
+ isExistingRelease: false,
+ },
+ },
+ },
+ },
+ });
+
+ expect(findConfirmDeleteModal().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index 63ce4c8bb17..f64f07de90e 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -8,6 +8,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import allReleasesQuery from '~/releases/graphql/queries/all_releases.query.graphql';
import createFlash from '~/flash';
import { historyPushState } from '~/lib/utils/common_utils';
+import { sprintf, __ } from '~/locale';
import ReleasesIndexApp from '~/releases/components/app_index.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
@@ -15,6 +16,7 @@ import ReleasesEmptyState from '~/releases/components/releases_empty_state.vue';
import ReleasesPagination from '~/releases/components/releases_pagination.vue';
import ReleasesSort from '~/releases/components/releases_sort.vue';
import { PAGE_SIZE, CREATED_ASC, DEFAULT_SORT } from '~/releases/constants';
+import { deleteReleaseSessionKey } from '~/releases/util';
Vue.use(VueApollo);
@@ -44,6 +46,7 @@ describe('app_index.vue', () => {
let singleRelease;
let noReleases;
let queryMock;
+ let toast;
const createComponent = ({
singleResponse = Promise.resolve(singleRelease),
@@ -58,12 +61,17 @@ describe('app_index.vue', () => {
],
]);
+ toast = jest.fn();
+
wrapper = shallowMountExtended(ReleasesIndexApp, {
apolloProvider,
provide: {
newReleasePath,
projectPath,
},
+ mocks: {
+ $toast: { show: toast },
+ },
});
};
@@ -395,4 +403,27 @@ describe('app_index.vue', () => {
},
);
});
+
+ describe('after deleting', () => {
+ const release = 'fake release';
+ const key = deleteReleaseSessionKey(projectPath);
+
+ beforeEach(async () => {
+ window.sessionStorage.setItem(key, release);
+
+ await createComponent();
+ });
+
+ it('shows a toast', async () => {
+ expect(toast).toHaveBeenCalledWith(
+ sprintf(__('Release %{release} has been successfully deleted.'), {
+ release,
+ }),
+ );
+ });
+
+ it('clears session storage', async () => {
+ expect(window.sessionStorage.getItem(key)).toBe(null);
+ });
+ });
});
diff --git a/spec/frontend/releases/components/confirm_delete_modal_spec.js b/spec/frontend/releases/components/confirm_delete_modal_spec.js
new file mode 100644
index 00000000000..f7c526c1ced
--- /dev/null
+++ b/spec/frontend/releases/components/confirm_delete_modal_spec.js
@@ -0,0 +1,89 @@
+import Vue, { nextTick } from 'vue';
+import Vuex from 'vuex';
+import { GlModal } from '@gitlab/ui';
+import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
+import { convertOneReleaseGraphQLResponse } from '~/releases/util';
+import ConfirmDeleteModal from '~/releases/components/confirm_delete_modal.vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { __, sprintf } from '~/locale';
+
+Vue.use(Vuex);
+
+const release = convertOneReleaseGraphQLResponse(originalOneReleaseForEditingQueryResponse).data;
+const deleteReleaseDocsPath = 'path/to/delete/release/docs';
+
+describe('~/releases/components/confirm_delete_modal.vue', () => {
+ let wrapper;
+ let state;
+
+ const factory = async () => {
+ state = {
+ release,
+ deleteReleaseDocsPath,
+ };
+
+ const store = new Vuex.Store({
+ modules: {
+ editNew: {
+ namespaced: true,
+ state,
+ },
+ },
+ });
+
+ wrapper = mountExtended(ConfirmDeleteModal, {
+ store,
+ });
+
+ await nextTick();
+ };
+
+ beforeEach(() => {
+ factory();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('button', () => {
+ it('should open the modal on click', async () => {
+ await wrapper.findByRole('button', { name: 'Delete' }).trigger('click');
+
+ const title = wrapper.findByText(
+ sprintf(__('Delete release %{release}?'), { release: release.name }),
+ );
+
+ expect(title.exists()).toBe(true);
+ });
+ });
+
+ describe('modal', () => {
+ beforeEach(async () => {
+ await wrapper.findByRole('button', { name: 'Delete' }).trigger('click');
+ });
+
+ it('confirms the user wants to delete the release', () => {
+ const text = wrapper.findByText(__('Are you sure you want to delete this release?'));
+
+ expect(text.exists()).toBe(true);
+ });
+
+ it('links to the tag', () => {
+ const tagPath = wrapper.findByRole('link', { name: release.tagName });
+ expect(tagPath.attributes('href')).toBe(release.tagPath);
+ });
+
+ it('links to the docs on deleting releases', () => {
+ const docsPath = wrapper.findByRole('link', { name: 'Deleting a release' });
+
+ expect(docsPath.attributes('href')).toBe(deleteReleaseDocsPath);
+ });
+
+ it('emits a delete event on action primary', () => {
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+
+ expect(wrapper.emitted('delete')).toEqual([[]]);
+ });
+ });
+});
diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index b095e9e1d78..848e802df4b 100644
--- a/spec/frontend/releases/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -2,14 +2,16 @@ import { GlLink, GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import { nextTick } from 'vue';
-import originalRelease from 'test_fixtures/api/releases/release.json';
+import originalOneReleaseQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release.query.graphql.json';
+import { convertOneReleaseGraphQLResponse } from '~/releases/util';
import { trimText } from 'helpers/text_helper';
-import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
// TODO: Encapsulate date helpers https://gitlab.com/gitlab-org/gitlab/-/issues/320883
const MONTHS_IN_MS = 1000 * 60 * 60 * 24 * 31;
-const mockFutureDate = new Date(new Date().getTime() + MONTHS_IN_MS).toISOString();
+const mockFutureDate = new Date(new Date().getTime() + MONTHS_IN_MS);
+
+const originalRelease = convertOneReleaseGraphQLResponse(originalOneReleaseQueryResponse).data;
describe('Release block footer', () => {
let wrapper;
@@ -18,7 +20,7 @@ describe('Release block footer', () => {
const factory = async (props = {}) => {
wrapper = mount(ReleaseBlockFooter, {
propsData: {
- ...convertObjectPropsToCamelCase(release, { deep: true }),
+ ...originalRelease,
...props,
},
});
@@ -55,8 +57,8 @@ describe('Release block footer', () => {
const commitLink = commitInfoSectionLink();
expect(commitLink.exists()).toBe(true);
- expect(commitLink.text()).toBe(release.commit.short_id);
- expect(commitLink.attributes('href')).toBe(release.commit_path);
+ expect(commitLink.text()).toBe(release.commit.shortId);
+ expect(commitLink.attributes('href')).toBe(release.commitPath);
});
it('renders the tag icon', () => {
@@ -70,8 +72,8 @@ describe('Release block footer', () => {
const commitLink = tagInfoSection().find(GlLink);
expect(commitLink.exists()).toBe(true);
- expect(commitLink.text()).toBe(release.tag_name);
- expect(commitLink.attributes('href')).toBe(release.tag_path);
+ expect(commitLink.text()).toBe(release.tagName);
+ expect(commitLink.attributes('href')).toBe(release.tagPath);
});
it('renders the author and creation time info', () => {
@@ -114,14 +116,14 @@ describe('Release block footer', () => {
const avatarImg = authorDateInfoSection().find('img');
expect(avatarImg.exists()).toBe(true);
- expect(avatarImg.attributes('src')).toBe(release.author.avatar_url);
+ expect(avatarImg.attributes('src')).toBe(release.author.avatarUrl);
});
it("renders a link to the author's profile", () => {
const authorLink = authorDateInfoSection().find(GlLink);
expect(authorLink.exists()).toBe(true);
- expect(authorLink.attributes('href')).toBe(release.author.web_url);
+ expect(authorLink.attributes('href')).toBe(release.author.webUrl);
});
});
@@ -138,7 +140,7 @@ describe('Release block footer', () => {
it('renders the commit SHA as plain text (instead of a link)', () => {
expect(commitInfoSectionLink().exists()).toBe(false);
- expect(commitInfoSection().text()).toBe(release.commit.short_id);
+ expect(commitInfoSection().text()).toBe(release.commit.shortId);
});
});
@@ -155,7 +157,7 @@ describe('Release block footer', () => {
it('renders the tag name as plain text (instead of a link)', () => {
expect(tagInfoSectionLink().exists()).toBe(false);
- expect(tagInfoSection().text()).toBe(release.tag_name);
+ expect(tagInfoSection().text()).toBe(release.tagName);
});
});
diff --git a/spec/frontend/releases/components/release_block_spec.js b/spec/frontend/releases/components/release_block_spec.js
index c4910ae9b2f..17e2af687a6 100644
--- a/spec/frontend/releases/components/release_block_spec.js
+++ b/spec/frontend/releases/components/release_block_spec.js
@@ -1,7 +1,8 @@
import { mount } from '@vue/test-utils';
import $ from 'jquery';
import { nextTick } from 'vue';
-import originalRelease from 'test_fixtures/api/releases/release.json';
+import originalOneReleaseQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release.query.graphql.json';
+import { convertOneReleaseGraphQLResponse } from '~/releases/util';
import * as commonUtils from '~/lib/utils/common_utils';
import * as urlUtility from '~/lib/utils/url_utility';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
@@ -34,7 +35,7 @@ describe('Release block', () => {
beforeEach(() => {
jest.spyOn($.fn, 'renderGFM');
- release = commonUtils.convertObjectPropsToCamelCase(originalRelease, { deep: true });
+ release = convertOneReleaseGraphQLResponse(originalOneReleaseQueryResponse).data;
});
afterEach(() => {
diff --git a/spec/frontend/releases/components/tag_field_spec.js b/spec/frontend/releases/components/tag_field_spec.js
index db08f874959..e7b9aa4abbb 100644
--- a/spec/frontend/releases/components/tag_field_spec.js
+++ b/spec/frontend/releases/components/tag_field_spec.js
@@ -9,14 +9,14 @@ describe('releases/components/tag_field', () => {
let store;
let wrapper;
- const createComponent = ({ tagName }) => {
+ const createComponent = ({ isExistingRelease }) => {
store = createStore({
modules: {
editNew: createEditNewModule({}),
},
});
- store.state.editNew.tagName = tagName;
+ store.state.editNew.isExistingRelease = isExistingRelease;
wrapper = shallowMount(TagField, { store });
};
@@ -31,7 +31,7 @@ describe('releases/components/tag_field', () => {
describe('when an existing release is being edited', () => {
beforeEach(() => {
- createComponent({ tagName: 'v1.0' });
+ createComponent({ isExistingRelease: true });
});
it('renders the TagFieldExisting component', () => {
@@ -45,7 +45,7 @@ describe('releases/components/tag_field', () => {
describe('when a new release is being created', () => {
beforeEach(() => {
- createComponent({ tagName: null });
+ createComponent({ isExistingRelease: false });
});
it('renders the TagFieldNew component', () => {
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 41653f62ebf..ce3b690213c 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -9,10 +9,15 @@ import { ASSET_LINK_TYPE } from '~/releases/constants';
import createReleaseAssetLinkMutation from '~/releases/graphql/mutations/create_release_link.mutation.graphql';
import deleteReleaseAssetLinkMutation from '~/releases/graphql/mutations/delete_release_link.mutation.graphql';
import updateReleaseMutation from '~/releases/graphql/mutations/update_release.mutation.graphql';
+import deleteReleaseMutation from '~/releases/graphql/mutations/delete_release.mutation.graphql';
import * as actions from '~/releases/stores/modules/edit_new/actions';
import * as types from '~/releases/stores/modules/edit_new/mutation_types';
import createState from '~/releases/stores/modules/edit_new/state';
-import { gqClient, convertOneReleaseGraphQLResponse } from '~/releases/util';
+import {
+ gqClient,
+ convertOneReleaseGraphQLResponse,
+ deleteReleaseSessionKey,
+} from '~/releases/util';
jest.mock('~/api/tags_api');
@@ -37,19 +42,15 @@ describe('Release edit/new actions', () => {
let error;
const setupState = (updates = {}) => {
- const getters = {
- isExistingRelease: true,
- };
-
state = {
...createState({
projectId: '18',
+ isExistingRelease: true,
tagName: releaseResponse.tag_name,
releasesPagePath: 'path/to/releases/page',
markdownDocsPath: 'path/to/markdown/docs',
markdownPreviewPath: 'path/to/markdown/preview',
}),
- ...getters,
...updates,
};
};
@@ -168,6 +169,15 @@ describe('Release edit/new actions', () => {
});
});
+ describe('updateReleasedAt', () => {
+ it(`commits ${types.UPDATE_RELEASED_AT} with the updated date`, () => {
+ const newDate = new Date();
+ return testAction(actions.updateReleasedAt, newDate, state, [
+ { type: types.UPDATE_RELEASED_AT, payload: newDate },
+ ]);
+ });
+ });
+
describe('updateCreateFrom', () => {
it(`commits ${types.UPDATE_CREATE_FROM} with the updated ref`, () => {
const newRef = 'my-feature-branch';
@@ -177,6 +187,15 @@ describe('Release edit/new actions', () => {
});
});
+ describe('updateShowCreateFrom', () => {
+ it(`commits ${types.UPDATE_SHOW_CREATE_FROM} with the updated ref`, () => {
+ const newRef = 'my-feature-branch';
+ return testAction(actions.updateShowCreateFrom, newRef, state, [
+ { type: types.UPDATE_SHOW_CREATE_FROM, payload: newRef },
+ ]);
+ });
+ });
+
describe('updateReleaseTitle', () => {
it(`commits ${types.UPDATE_RELEASE_TITLE} with the updated release title`, () => {
const newTitle = 'The new release title';
@@ -572,6 +591,133 @@ describe('Release edit/new actions', () => {
});
});
+ describe('deleteRelease', () => {
+ let getters;
+ let dispatch;
+ let commit;
+ let release;
+
+ beforeEach(() => {
+ getters = {
+ releaseDeleteMutationVariables: {
+ input: {
+ projectPath: 'test-org/test',
+ tagName: 'v1.0',
+ },
+ },
+ };
+
+ release = convertOneReleaseGraphQLResponse(releaseResponse).data;
+
+ setupState({
+ release,
+ originalRelease: release,
+ ...getters,
+ });
+
+ dispatch = jest.fn();
+ commit = jest.fn();
+
+ gqClient.mutate.mockResolvedValue({
+ data: {
+ releaseDelete: {
+ errors: [],
+ },
+ releaseAssetLinkDelete: {
+ errors: [],
+ },
+ },
+ });
+ });
+
+ describe('when the delete is successful', () => {
+ beforeEach(() => {
+ window.sessionStorage.clear();
+ });
+
+ it('dispatches receiveSaveReleaseSuccess', async () => {
+ await actions.deleteRelease({ commit, dispatch, state, getters });
+ expect(dispatch.mock.calls).toEqual([
+ ['receiveSaveReleaseSuccess', state.releasesPagePath],
+ ]);
+ });
+
+ it('deletes the release', async () => {
+ await actions.deleteRelease({ commit, dispatch, state, getters });
+ expect(gqClient.mutate.mock.calls[0]).toEqual([
+ {
+ mutation: deleteReleaseMutation,
+ variables: getters.releaseDeleteMutationVariables,
+ },
+ ]);
+ });
+
+ it('stores the name for toasting', async () => {
+ await actions.deleteRelease({ commit, dispatch, state, getters });
+ expect(window.sessionStorage.getItem(deleteReleaseSessionKey(state.projectPath))).toBe(
+ state.release.name,
+ );
+ });
+ });
+
+ describe('when the delete request fails', () => {
+ beforeEach(() => {
+ gqClient.mutate.mockRejectedValue(error);
+ });
+
+ it('dispatches requestDeleteRelease and receiveSaveReleaseError with an error object', async () => {
+ await actions.deleteRelease({ commit, dispatch, state, getters });
+
+ expect(commit.mock.calls).toContainEqual([types.RECEIVE_SAVE_RELEASE_ERROR, error]);
+ });
+
+ it('shows a flash message', async () => {
+ await actions.deleteRelease({ commit, dispatch, state, getters });
+
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'Something went wrong while deleting the release.',
+ });
+ });
+ });
+
+ describe('when the delete returns errors', () => {
+ beforeEach(() => {
+ gqClient.mutate.mockResolvedValue({
+ data: {
+ releaseUpdate: {
+ errors: ['Something went wrong!'],
+ },
+ releaseAssetLinkDelete: {
+ errors: [],
+ },
+ releaseAssetLinkCreate: {
+ errors: [],
+ },
+ },
+ });
+ });
+
+ it('dispatches requestDeleteRelease and receiveSaveReleaseError with an error object', async () => {
+ await actions.deleteRelease({ commit, dispatch, state, getters });
+
+ expect(commit.mock.calls).toContainEqual([
+ types.RECEIVE_SAVE_RELEASE_ERROR,
+ expect.any(Error),
+ ]);
+ });
+
+ it('shows a flash message', async () => {
+ await actions.deleteRelease({ commit, dispatch, state, getters });
+
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'Something went wrong while deleting the release.',
+ });
+ });
+ });
+ });
+
describe('fetchTagNotes', () => {
const tagName = 'v8.0.0';
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index c42c6c00f56..4ac6eaebaa2 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -2,20 +2,6 @@ import { s__ } from '~/locale';
import * as getters from '~/releases/stores/modules/edit_new/getters';
describe('Release edit/new getters', () => {
- describe('isExistingRelease', () => {
- it('returns true if the release is an existing release that already exists in the database', () => {
- const state = { tagName: 'test-tag-name' };
-
- expect(getters.isExistingRelease(state)).toBe(true);
- });
-
- it('returns false if the release is a new release that has not yet been saved to the database', () => {
- const state = { tagName: null };
-
- expect(getters.isExistingRelease(state)).toBe(false);
- });
- });
-
describe('releaseLinksToCreate', () => {
it("returns an empty array if state.release doesn't exist", () => {
const state = {};
@@ -302,6 +288,7 @@ describe('Release edit/new getters', () => {
name: 'release.name',
description: 'release.description',
milestones: ['release.milestone[0].title'],
+ releasedAt: new Date(2022, 5, 30),
},
},
{
@@ -310,6 +297,7 @@ describe('Release edit/new getters', () => {
name: 'release.name',
description: 'release.description',
milestones: ['release.milestone[0].title'],
+ releasedAt: new Date(2022, 5, 30),
},
],
[
@@ -381,6 +369,26 @@ describe('Release edit/new getters', () => {
});
});
+ describe('releaseDeleteMutationVariables', () => {
+ it('returns all the data needed for the releaseDelete GraphQL mutation', () => {
+ const state = {
+ projectPath: 'test-org/test',
+ release: { tagName: 'v1.0' },
+ };
+
+ const expectedVariables = {
+ input: {
+ projectPath: 'test-org/test',
+ tagName: 'v1.0',
+ },
+ };
+
+ const actualVariables = getters.releaseDeleteMutationVariables(state);
+
+ expect(actualVariables).toEqual(expectedVariables);
+ });
+ });
+
describe('formattedReleaseNotes', () => {
it.each`
description | includeTagNotes | tagNotes | included
diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index 85844831e0b..60b57c7a7ff 100644
--- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -25,11 +25,12 @@ describe('Release edit/new mutations', () => {
mutations[types.INITIALIZE_EMPTY_RELEASE](state);
expect(state.release).toEqual({
- tagName: null,
+ tagName: 'v1.3',
name: '',
description: '',
milestones: [],
groupMilestones: [],
+ releasedAt: new Date(),
assets: {
links: [],
},
@@ -82,6 +83,16 @@ describe('Release edit/new mutations', () => {
});
});
+ describe(`${types.UPDATE_RELEASED_AT}`, () => {
+ it("updates the release's released at date", () => {
+ state.release = release;
+ const newDate = new Date();
+ mutations[types.UPDATE_RELEASED_AT](state, newDate);
+
+ expect(state.release.releasedAt).toBe(newDate);
+ });
+ });
+
describe(`${types.UPDATE_CREATE_FROM}`, () => {
it('updates the ref that the ref will be created from', () => {
state.createFrom = 'main';
@@ -92,6 +103,16 @@ describe('Release edit/new mutations', () => {
});
});
+ describe(`${types.UPDATE_SHOW_CREATE_FROM}`, () => {
+ it('updates the ref that the ref will be created from', () => {
+ state.showCreateFrom = true;
+ const newValue = false;
+ mutations[types.UPDATE_SHOW_CREATE_FROM](state, newValue);
+
+ expect(state.showCreateFrom).toBe(newValue);
+ });
+ });
+
describe(`${types.UPDATE_RELEASE_TITLE}`, () => {
it("updates the release's title", () => {
state.release = release;
diff --git a/spec/frontend/reports/components/grouped_issues_list_spec.js b/spec/frontend/reports/components/grouped_issues_list_spec.js
index c6eebf05dd7..95ef0bcbcc7 100644
--- a/spec/frontend/reports/components/grouped_issues_list_spec.js
+++ b/spec/frontend/reports/components/grouped_issues_list_spec.js
@@ -17,7 +17,6 @@ describe('Grouped Issues List', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
it('renders a smart virtual list with the correct props', () => {
@@ -35,13 +34,15 @@ describe('Grouped Issues List', () => {
});
describe('without data', () => {
- beforeEach(createComponent);
+ beforeEach(() => {
+ createComponent();
+ });
it.each(['unresolved', 'resolved'])('does not a render a header for %s issues', (issueName) => {
expect(findHeading(issueName).exists()).toBe(false);
});
- it.each('resolved', 'unresolved')('does not render report items for %s issues', () => {
+ it.each(['resolved', 'unresolved'])('does not render report items for %s issues', () => {
expect(wrapper.find(ReportItem).exists()).toBe(false);
});
});
diff --git a/spec/frontend/reports/mock_data/new_failures_report.json b/spec/frontend/reports/mock_data/new_failures_report.json
index 8b9c12c6271..438f7c82788 100644
--- a/spec/frontend/reports/mock_data/new_failures_report.json
+++ b/spec/frontend/reports/mock_data/new_failures_report.json
@@ -8,12 +8,14 @@
{
"result": "failure",
"name": "Test#sum when a is 1 and b is 2 returns summary",
+ "file": "spec/file_1.rb",
"execution_time": 0.009411,
"system_output": "Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in <top (required)>'"
},
{
"result": "failure",
"name": "Test#sum when a is 100 and b is 200 returns summary",
+ "file": "spec/file_2.rb",
"execution_time": 0.000162,
"system_output": "Failure/Error: is_expected.to eq(300)\n\n expected: 300\n got: -100\n\n (compared using ==)\n./spec/test_spec.rb:21:in `block (4 levels) in <top (required)>'"
}
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index 4732d68c8c6..cb56f392ec9 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -17,7 +17,7 @@ exports[`Repository last commit component renders commit widget 1`] = `
/>
<div
- class="commit-detail flex-list"
+ class="commit-detail flex-list gl-display-flex gl-justify-content-space-between gl-align-items-flex-start gl-flex-grow-1 gl-min-w-0"
>
<div
class="commit-content qa-commit-content"
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index d498b6f0c4f..2b70cb84c67 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -136,6 +136,7 @@ describe('Blob content viewer component', () => {
const findBlobButtonGroup = () => wrapper.findComponent(BlobButtonGroup);
const findForkSuggestion = () => wrapper.findComponent(ForkSuggestion);
const findCodeIntelligence = () => wrapper.findComponent(CodeIntelligence);
+ const findSourceViewer = () => wrapper.findComponent(SourceViewer);
beforeEach(() => {
jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
@@ -197,6 +198,16 @@ describe('Blob content viewer component', () => {
expect(mockAxios.history.get[0].url).toBe(legacyViewerUrl);
});
+ it('loads a legacy viewer when the source viewer emits an error', async () => {
+ loadViewer.mockReturnValueOnce(SourceViewer);
+ await createComponent();
+ findSourceViewer().vm.$emit('error');
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toBe(legacyViewerUrl);
+ });
+
it('loads a legacy viewer when a viewer component is not available', async () => {
await createComponent({ blob: { ...simpleViewerMock, fileType: 'unknown' } });
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index cfbf74e34aa..3783b34e33a 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -1,179 +1,227 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
import LastCommit from '~/repository/components/last_commit.vue';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
-
-let vm;
-
-function createCommitData(data = {}) {
- const defaultData = {
- sha: '123456789',
- title: 'Commit title',
- titleHtml: 'Commit title',
- message: 'Commit message',
- webPath: '/commit/123',
- authoredDate: '2019-01-01',
- author: {
- name: 'Test',
- avatarUrl: 'https://test.com',
- webPath: '/test',
- },
- pipeline: {
+import pathLastCommitQuery from 'shared_queries/repository/path_last_commit.query.graphql';
+import { refMock } from '../mock_data';
+
+let wrapper;
+let mockResolver;
+
+const findPipeline = () => wrapper.find('.js-commit-pipeline');
+const findTextExpander = () => wrapper.find('.text-expander');
+const findUserLink = () => wrapper.find('.js-user-link');
+const findUserAvatarLink = () => wrapper.findComponent(UserAvatarLink);
+const findLastCommitLabel = () => wrapper.findByTestId('last-commit-id-label');
+const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+const findCommitRowDescription = () => wrapper.find('.commit-row-description');
+const findStatusBox = () => wrapper.find('.gpg-status-box');
+const findItemTitle = () => wrapper.find('.item-title');
+
+const defaultPipelineEdges = [
+ {
+ __typename: 'PipelineEdge',
+ node: {
+ __typename: 'Pipeline',
+ id: 'gid://gitlab/Ci::Pipeline/167',
detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'id',
detailsPath: 'https://test.com/pipeline',
- icon: 'failed',
+ icon: 'status_running',
tooltip: 'failed',
text: 'failed',
- group: {},
+ group: 'failed',
},
},
- };
- return Object.assign(defaultData, data);
-}
-
-function factory(commit = createCommitData(), loading = false) {
- vm = shallowMount(LastCommit, {
- mocks: {
- $apollo: {
- queries: {
- commit: {
- loading: true,
+ },
+];
+
+const defaultAuthor = {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ name: 'Test',
+ avatarUrl: 'https://test.com',
+ webPath: '/test',
+};
+
+const defaultMessage = 'Commit title';
+
+const createCommitData = ({
+ pipelineEdges = defaultPipelineEdges,
+ author = defaultAuthor,
+ descriptionHtml = '',
+ signatureHtml = null,
+ message = defaultMessage,
+}) => {
+ return {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/6',
+ repository: {
+ __typename: 'Repository',
+ paginatedTree: {
+ __typename: 'TreeConnection',
+ nodes: [
+ {
+ __typename: 'Tree',
+ lastCommit: {
+ __typename: 'Commit',
+ id: 'gid://gitlab/CommitPresenter/123456789',
+ sha: '123456789',
+ title: 'Commit title',
+ titleHtml: 'Commit title',
+ descriptionHtml,
+ message,
+ webPath: '/commit/123',
+ authoredDate: '2019-01-01',
+ authorName: 'Test',
+ authorGravatar: 'https://test.com',
+ author,
+ signatureHtml,
+ pipelines: {
+ __typename: 'PipelineConnection',
+ edges: pipelineEdges,
+ },
+ },
+ },
+ ],
},
},
},
},
- });
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- vm.setData({ commit });
- vm.vm.$apollo.queries.commit.loading = loading;
-}
+ };
+};
-const emptyMessageClass = 'font-italic';
+const createComponent = async (data = {}) => {
+ Vue.use(VueApollo);
-describe('Repository last commit component', () => {
- afterEach(() => {
- vm.destroy();
+ const currentPath = 'path';
+
+ mockResolver = jest.fn().mockResolvedValue(createCommitData(data));
+
+ wrapper = shallowMountExtended(LastCommit, {
+ apolloProvider: createMockApollo([[pathLastCommitQuery, mockResolver]]),
+ propsData: { currentPath },
+ mixins: [{ data: () => ({ ref: refMock }) }],
});
+};
+
+afterEach(() => {
+ wrapper.destroy();
+ mockResolver = null;
+});
+describe('Repository last commit component', () => {
it.each`
loading | label
${true} | ${'shows'}
${false} | ${'hides'}
- `('$label when loading icon $loading is true', async ({ loading }) => {
- factory(createCommitData(), loading);
+ `('$label when loading icon is $loading', async ({ loading }) => {
+ createComponent();
- await nextTick();
+ if (!loading) {
+ await waitForPromises();
+ }
- expect(vm.find(GlLoadingIcon).exists()).toBe(loading);
+ expect(findLoadingIcon().exists()).toBe(loading);
});
it('renders commit widget', async () => {
- factory();
+ createComponent();
+ await waitForPromises();
- await nextTick();
-
- expect(vm.element).toMatchSnapshot();
+ expect(wrapper.element).toMatchSnapshot();
});
it('renders short commit ID', async () => {
- factory();
-
- await nextTick();
+ createComponent();
+ await waitForPromises();
- expect(vm.find('[data-testid="last-commit-id-label"]').text()).toEqual('12345678');
+ expect(findLastCommitLabel().text()).toBe('12345678');
});
it('hides pipeline components when pipeline does not exist', async () => {
- factory(createCommitData({ pipeline: null }));
+ createComponent({ pipelineEdges: [] });
+ await waitForPromises();
- await nextTick();
-
- expect(vm.find('.js-commit-pipeline').exists()).toBe(false);
+ expect(findPipeline().exists()).toBe(false);
});
- it('renders pipeline components', async () => {
- factory();
-
- await nextTick();
+ it('renders pipeline components when pipeline exists', async () => {
+ createComponent();
+ await waitForPromises();
- expect(vm.find('.js-commit-pipeline').exists()).toBe(true);
+ expect(findPipeline().exists()).toBe(true);
});
it('hides author component when author does not exist', async () => {
- factory(createCommitData({ author: null }));
+ createComponent({ author: null });
+ await waitForPromises();
- await nextTick();
-
- expect(vm.find('.js-user-link').exists()).toBe(false);
- expect(vm.find(UserAvatarLink).exists()).toBe(false);
+ expect(findUserLink().exists()).toBe(false);
+ expect(findUserAvatarLink().exists()).toBe(false);
});
it('does not render description expander when description is null', async () => {
- factory(createCommitData({ descriptionHtml: null }));
-
- await nextTick();
+ createComponent();
+ await waitForPromises();
- expect(vm.find('.text-expander').exists()).toBe(false);
- expect(vm.find('.commit-row-description').exists()).toBe(false);
+ expect(findTextExpander().exists()).toBe(false);
+ expect(findCommitRowDescription().exists()).toBe(false);
});
- it('expands commit description when clicking expander', async () => {
- factory(createCommitData({ descriptionHtml: 'Test description' }));
-
- await nextTick();
-
- vm.find('.text-expander').vm.$emit('click');
-
- await nextTick();
-
- expect(vm.find('.commit-row-description').isVisible()).toBe(true);
- expect(vm.find('.text-expander').classes('open')).toBe(true);
- });
-
- it('strips the first newline of the description', async () => {
- factory(createCommitData({ descriptionHtml: '&#x000A;Update ADOPTERS.md' }));
-
- await nextTick();
-
- expect(vm.find('.commit-row-description').html()).toBe(
- '<pre class="commit-row-description gl-mb-3">Update ADOPTERS.md</pre>',
- );
+ describe('when the description is present', () => {
+ beforeEach(async () => {
+ createComponent({ descriptionHtml: '&#x000A;Update ADOPTERS.md' });
+ await waitForPromises();
+ });
+
+ it('strips the first newline of the description', () => {
+ expect(findCommitRowDescription().html()).toBe(
+ '<pre class="commit-row-description gl-mb-3">Update ADOPTERS.md</pre>',
+ );
+ });
+
+ it('expands commit description when clicking expander', async () => {
+ findTextExpander().vm.$emit('click');
+ await nextTick();
+
+ expect(findCommitRowDescription().isVisible()).toBe(true);
+ expect(findTextExpander().classes()).toContain('open');
+ });
});
it('renders the signature HTML as returned by the backend', async () => {
- factory(
- createCommitData({
- signatureHtml: `<a
- class="btn gpg-status-box valid"
- data-content="signature-content"
- data-html="true"
- data-placement="top"
- data-title="signature-title"
- data-toggle="popover"
- role="button"
- tabindex="0"
- >
- Verified
- </a>`,
- }),
- );
-
- await nextTick();
-
- expect(vm.find('.gpg-status-box').html()).toBe(
- `<a class="btn gpg-status-box valid" data-content="signature-content" data-html="true" data-placement="top" data-title="signature-title" data-toggle="popover" role="button" tabindex="0">
- Verified
-</a>`,
+ createComponent({
+ signatureHtml: `<a
+ class="btn gpg-status-box valid"
+ data-content="signature-content"
+ data-html="true"
+ data-placement="top"
+ data-title="signature-title"
+ data-toggle="popover"
+ role="button"
+ tabindex="0"
+ >Verified</a>`,
+ });
+ await waitForPromises();
+
+ expect(findStatusBox().html()).toBe(
+ `<a class="btn gpg-status-box valid" data-content="signature-content" data-html="true" data-placement="top" data-title="signature-title" data-toggle="popover" role="button" tabindex="0">Verified</a>`,
);
});
it('sets correct CSS class if the commit message is empty', async () => {
- factory(createCommitData({ message: '' }));
-
- await nextTick();
+ createComponent({ message: '' });
+ await waitForPromises();
- expect(vm.find('.item-title').classes()).toContain(emptyMessageClass);
+ expect(findItemTitle().classes()).toContain('font-italic');
});
});
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index 22570b2d6ed..13b09e57473 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -31,7 +31,7 @@ function factory(propsData = {}) {
GlHoverLoad: createMockDirective(),
},
provide: {
- glFeatures: { refactorBlobViewer: true, lazyLoadCommits: true },
+ glFeatures: { lazyLoadCommits: true },
},
mocks: {
$router,
@@ -244,8 +244,6 @@ describe('Repository table row component', () => {
});
describe('row visibility', () => {
- beforeAll(() => jest.useFakeTimers());
-
beforeEach(() => {
factory({
id: '1',
@@ -260,12 +258,13 @@ describe('Repository table row component', () => {
afterAll(() => jest.useRealTimers());
it('emits a `row-appear` event', async () => {
+ const setTimeoutSpy = jest.spyOn(global, 'setTimeout');
findIntersectionObserver().vm.$emit('appear');
jest.runAllTimers();
- expect(setTimeout).toHaveBeenCalledTimes(1);
- expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), ROW_APPEAR_DELAY);
+ expect(setTimeoutSpy).toHaveBeenCalledTimes(1);
+ expect(setTimeoutSpy).toHaveBeenLastCalledWith(expect.any(Function), ROW_APPEAR_DELAY);
expect(vm.emitted('row-appear')).toEqual([[123]]);
});
});
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index 5186c9a8992..e3b4dcb8acc 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -16,19 +16,18 @@ const mockData = [
commit_path: `https://test.com`,
commit_title_html: 'commit title',
file_name: 'index.js',
- type: 'blob',
},
];
describe('resolveCommit', () => {
it('calls resolve when commit found', () => {
const resolver = {
- entry: { name: 'index.js', type: 'blob' },
+ entry: { name: 'index.js' },
resolve: jest.fn(),
};
const commits = [
- { fileName: 'index.js', filePath: '/index.js', type: 'blob' },
- { fileName: 'index.js', filePath: '/app/assets/index.js', type: 'blob' },
+ { fileName: 'index.js', filePath: '/index.js' },
+ { fileName: 'index.js', filePath: '/app/assets/index.js' },
];
resolveCommit(commits, '', resolver);
@@ -36,7 +35,6 @@ describe('resolveCommit', () => {
expect(resolver.resolve).toHaveBeenCalledWith({
fileName: 'index.js',
filePath: '/index.js',
- type: 'blob',
});
});
});
@@ -56,7 +54,7 @@ describe('fetchLogsTree', () => {
global.gon = { relative_url_root: '' };
resolver = {
- entry: { name: 'index.js', type: 'blob' },
+ entry: { name: 'index.js' },
resolve: jest.fn(),
};
@@ -119,7 +117,6 @@ describe('fetchLogsTree', () => {
filePath: '/index.js',
message: 'testing message',
sha: '123',
- type: 'blob',
}),
);
}));
@@ -136,7 +133,6 @@ describe('fetchLogsTree', () => {
message: 'testing message',
sha: '123',
titleHtml: 'commit title',
- type: 'blob',
}),
],
});
diff --git a/spec/frontend/repository/utils/commit_spec.js b/spec/frontend/repository/utils/commit_spec.js
index aaaa39f739f..b3dd5118308 100644
--- a/spec/frontend/repository/utils/commit_spec.js
+++ b/spec/frontend/repository/utils/commit_spec.js
@@ -10,7 +10,6 @@ const mockData = [
commit_path: `https://test.com`,
commit_title_html: 'testing message',
file_name: 'index.js',
- type: 'blob',
},
];
@@ -24,7 +23,6 @@ describe('normalizeData', () => {
commitPath: 'https://test.com',
fileName: 'index.js',
filePath: '/index.js',
- type: 'blob',
titleHtml: 'testing message',
__typename: 'LogTreeCommit',
},
diff --git a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
index 28e7d192938..433be5d5027 100644
--- a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
+++ b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
@@ -9,6 +9,7 @@ import { redirectTo } from '~/lib/utils/url_utility';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerHeader from '~/runner/components/runner_header.vue';
+import RunnerDetails from '~/runner/components/runner_details.vue';
import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
@@ -37,6 +38,7 @@ describe('AdminRunnerShowApp', () => {
let mockRunnerQuery;
const findRunnerHeader = () => wrapper.findComponent(RunnerHeader);
+ const findRunnerDetails = () => wrapper.findComponent(RunnerDetails);
const findRunnerDeleteButton = () => wrapper.findComponent(RunnerDeleteButton);
const findRunnerEditButton = () => wrapper.findComponent(RunnerEditButton);
const findRunnerPauseButton = () => wrapper.findComponent(RunnerPauseButton);
@@ -179,12 +181,32 @@ describe('AdminRunnerShowApp', () => {
});
});
+ describe('When loading', () => {
+ beforeEach(() => {
+ mockRunnerQueryResult();
+
+ createComponent();
+ });
+
+ it('does not show runner details', () => {
+ expect(findRunnerDetails().exists()).toBe(false);
+ });
+
+ it('does not show runner jobs', () => {
+ expect(findRunnersJobs().exists()).toBe(false);
+ });
+ });
+
describe('When there is an error', () => {
beforeEach(async () => {
mockRunnerQuery = jest.fn().mockRejectedValueOnce(new Error('Error!'));
await createComponent();
});
+ it('does not show runner details', () => {
+ expect(findRunnerDetails().exists()).toBe(false);
+ });
+
it('error is reported to sentry', () => {
expect(captureException).toHaveBeenCalledWith({
error: new Error('Error!'),
@@ -201,13 +223,6 @@ describe('AdminRunnerShowApp', () => {
const stubs = {
GlTab,
GlTabs,
- RunnerDetails: {
- template: `
- <div>
- <slot name="jobs-tab"></slot>
- </div>
- `,
- },
};
it('without a runner, shows no jobs', () => {
diff --git a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
index 3d25ad075de..aa1aa723491 100644
--- a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
@@ -10,9 +10,11 @@ import {
} from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/flash';
+import { s__ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { updateHistory } from '~/lib/utils/url_utility';
+import { upgradeStatusTokenConfig } from 'ee_else_ce/runner/components/search_tokens/upgrade_status_token_config';
import { createLocalState } from '~/runner/graphql/list/local_state';
import AdminRunnersApp from '~/runner/admin_runners/admin_runners_app.vue';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
@@ -20,6 +22,7 @@ import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
+import RunnerCount from '~/runner/components/stat/runner_count.vue';
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
@@ -30,8 +33,6 @@ import {
CREATED_DESC,
DEFAULT_SORT,
INSTANCE_TYPE,
- GROUP_TYPE,
- PROJECT_TYPE,
PARAM_KEY_PAUSED,
PARAM_KEY_STATUS,
PARAM_KEY_TAG,
@@ -40,15 +41,14 @@ import {
STATUS_STALE,
RUNNER_PAGE_SIZE,
} from '~/runner/constants';
-import adminRunnersQuery from '~/runner/graphql/list/admin_runners.query.graphql';
-import adminRunnersCountQuery from '~/runner/graphql/list/admin_runners_count.query.graphql';
+import allRunnersQuery from 'ee_else_ce/runner/graphql/list/all_runners.query.graphql';
+import allRunnersCountQuery from '~/runner/graphql/list/all_runners_count.query.graphql';
import { captureException } from '~/runner/sentry_utils';
-import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import {
- runnersData,
+ allRunnersData,
runnersCountData,
- runnersDataPaginated,
+ allRunnersDataPaginated,
onlineContactTimeoutSecs,
staleTimeoutSecs,
emptyStateSvgPath,
@@ -56,9 +56,12 @@ import {
} from '../mock_data';
const mockRegistrationToken = 'MOCK_REGISTRATION_TOKEN';
-const mockRunners = runnersData.data.runners.nodes;
+const mockRunners = allRunnersData.data.runners.nodes;
const mockRunnersCount = runnersCountData.data.runners.count;
+const mockRunnersHandler = jest.fn();
+const mockRunnersCountHandler = jest.fn();
+
jest.mock('~/flash');
jest.mock('~/runner/sentry_utils');
jest.mock('~/lib/utils/url_utility', () => ({
@@ -71,8 +74,6 @@ Vue.use(GlToast);
describe('AdminRunnersApp', () => {
let wrapper;
- let mockRunnersQuery;
- let mockRunnersCountQuery;
let cacheConfig;
let localMutations;
@@ -85,7 +86,6 @@ describe('AdminRunnersApp', () => {
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
- const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
const createComponent = ({
props = {},
@@ -96,8 +96,8 @@ describe('AdminRunnersApp', () => {
({ cacheConfig, localMutations } = createLocalState());
const handlers = [
- [adminRunnersQuery, mockRunnersQuery],
- [adminRunnersCountQuery, mockRunnersCountQuery],
+ [allRunnersQuery, mockRunnersHandler],
+ [allRunnersCountQuery, mockRunnersCountHandler],
];
wrapper = mountFn(AdminRunnersApp, {
@@ -116,110 +116,62 @@ describe('AdminRunnersApp', () => {
},
...options,
});
- };
- beforeEach(async () => {
- setWindowLocation('/admin/runners');
+ return waitForPromises();
+ };
- mockRunnersQuery = jest.fn().mockResolvedValue(runnersData);
- mockRunnersCountQuery = jest.fn().mockResolvedValue(runnersCountData);
- createComponent();
- await waitForPromises();
+ beforeEach(() => {
+ mockRunnersHandler.mockResolvedValue(allRunnersData);
+ mockRunnersCountHandler.mockResolvedValue(runnersCountData);
});
afterEach(() => {
- mockRunnersQuery.mockReset();
- mockRunnersCountQuery.mockReset();
+ mockRunnersHandler.mockReset();
+ mockRunnersCountHandler.mockReset();
wrapper.destroy();
});
it('shows the runner tabs with a runner count for each type', async () => {
- mockRunnersCountQuery.mockImplementation(({ type }) => {
- let count;
- switch (type) {
- case INSTANCE_TYPE:
- count = 3;
- break;
- case GROUP_TYPE:
- count = 2;
- break;
- case PROJECT_TYPE:
- count = 1;
- break;
- default:
- count = 6;
- break;
- }
- return Promise.resolve({ data: { runners: { count } } });
- });
-
- createComponent({ mountFn: mountExtended });
- await waitForPromises();
-
- expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
- `All 6 Instance 3 Group 2 Project 1`,
- );
- });
-
- it('shows the runner tabs with a formatted runner count', async () => {
- mockRunnersCountQuery.mockImplementation(({ type }) => {
- let count;
- switch (type) {
- case INSTANCE_TYPE:
- count = 3000;
- break;
- case GROUP_TYPE:
- count = 2000;
- break;
- case PROJECT_TYPE:
- count = 1000;
- break;
- default:
- count = 6000;
- break;
- }
- return Promise.resolve({ data: { runners: { count } } });
- });
-
- createComponent({ mountFn: mountExtended });
- await waitForPromises();
+ await createComponent({ mountFn: mountExtended });
expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
- `All 6,000 Instance 3,000 Group 2,000 Project 1,000`,
+ `All ${mockRunnersCount} Instance ${mockRunnersCount} Group ${mockRunnersCount} Project ${mockRunnersCount}`,
);
});
it('shows the runner setup instructions', () => {
+ createComponent();
+
expect(findRegistrationDropdown().props('registrationToken')).toBe(mockRegistrationToken);
expect(findRegistrationDropdown().props('type')).toBe(INSTANCE_TYPE);
});
it('shows total runner counts', async () => {
- expect(mockRunnersCountQuery).toHaveBeenCalledWith({
- status: STATUS_ONLINE,
- });
- expect(mockRunnersCountQuery).toHaveBeenCalledWith({
- status: STATUS_OFFLINE,
- });
- expect(mockRunnersCountQuery).toHaveBeenCalledWith({
- status: STATUS_STALE,
- });
+ await createComponent({ mountFn: mountExtended });
- expect(findRunnerStats().props()).toMatchObject({
- onlineRunnersCount: mockRunnersCount,
- offlineRunnersCount: mockRunnersCount,
- staleRunnersCount: mockRunnersCount,
- });
+ expect(mockRunnersCountHandler).toHaveBeenCalledWith({ status: STATUS_ONLINE });
+ expect(mockRunnersCountHandler).toHaveBeenCalledWith({ status: STATUS_OFFLINE });
+ expect(mockRunnersCountHandler).toHaveBeenCalledWith({ status: STATUS_STALE });
+
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Online runners')} ${mockRunnersCount}`,
+ );
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Offline runners')} ${mockRunnersCount}`,
+ );
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Stale runners')} ${mockRunnersCount}`,
+ );
});
- it('shows the runners list', () => {
+ it('shows the runners list', async () => {
+ await createComponent();
+
expect(findRunnerList().props('runners')).toEqual(mockRunners);
});
it('runner item links to the runner admin page', async () => {
- createComponent({ mountFn: mountExtended });
-
- await waitForPromises();
+ await createComponent({ mountFn: mountExtended });
const { id, shortSha } = mockRunners[0];
const numericId = getIdFromGraphQLId(id);
@@ -231,12 +183,9 @@ describe('AdminRunnersApp', () => {
});
it('renders runner actions for each runner', async () => {
- createComponent({ mountFn: mountExtended });
-
- await waitForPromises();
+ await createComponent({ mountFn: mountExtended });
const runnerActions = wrapper.find('tr [data-testid="td-actions"]').find(RunnerActionsCell);
-
const runner = mockRunners[0];
expect(runnerActions.props()).toEqual({
@@ -245,8 +194,10 @@ describe('AdminRunnersApp', () => {
});
});
- it('requests the runners with no filters', () => {
- expect(mockRunnersQuery).toHaveBeenLastCalledWith({
+ it('requests the runners with no filters', async () => {
+ await createComponent();
+
+ expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: undefined,
type: undefined,
sort: DEFAULT_SORT,
@@ -255,9 +206,9 @@ describe('AdminRunnersApp', () => {
});
it('sets tokens in the filtered search', () => {
- createComponent({ mountFn: mountExtended });
+ createComponent();
- expect(findFilteredSearch().props('tokens')).toEqual([
+ expect(findRunnerFilteredSearchBar().props('tokens')).toEqual([
expect.objectContaining({
type: PARAM_KEY_PAUSED,
options: expect.any(Array),
@@ -270,6 +221,7 @@ describe('AdminRunnersApp', () => {
type: PARAM_KEY_TAG,
recentSuggestionsStorageKey: `${ADMIN_FILTERED_SEARCH_NAMESPACE}-recent-tags`,
}),
+ upgradeStatusTokenConfig,
]);
});
@@ -282,12 +234,10 @@ describe('AdminRunnersApp', () => {
const FILTERED_COUNT_QUERIES = 4; // Smart queries that display a count of runners in tabs
beforeEach(async () => {
- mockRunnersCountQuery.mockClear();
+ mockRunnersCountHandler.mockClear();
- createComponent({ mountFn: mountExtended });
+ await createComponent({ mountFn: mountExtended });
showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
-
- await waitForPromises();
});
it('Links to the runner page', async () => {
@@ -298,12 +248,11 @@ describe('AdminRunnersApp', () => {
});
it('When runner is paused or unpaused, some data is refetched', async () => {
- expect(mockRunnersCountQuery).toHaveBeenCalledTimes(COUNT_QUERIES);
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES);
findRunnerActionsCell().vm.$emit('toggledPaused');
- expect(mockRunnersCountQuery).toHaveBeenCalledTimes(COUNT_QUERIES + FILTERED_COUNT_QUERIES);
-
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES + FILTERED_COUNT_QUERIES);
expect(showToast).toHaveBeenCalledTimes(0);
});
@@ -319,8 +268,12 @@ describe('AdminRunnersApp', () => {
beforeEach(async () => {
setWindowLocation(`?status[]=${STATUS_ONLINE}&runner_type[]=${INSTANCE_TYPE}&tag[]=tag1`);
- createComponent();
- await waitForPromises();
+ await createComponent({
+ stubs: {
+ RunnerStats,
+ RunnerCount,
+ },
+ });
});
it('sets the filters in the search bar', () => {
@@ -336,7 +289,7 @@ describe('AdminRunnersApp', () => {
});
it('requests the runners with filter parameters', () => {
- expect(mockRunnersQuery).toHaveBeenLastCalledWith({
+ expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: STATUS_ONLINE,
type: INSTANCE_TYPE,
tagList: ['tag1'],
@@ -346,21 +299,22 @@ describe('AdminRunnersApp', () => {
});
it('fetches count results for requested status', () => {
- expect(mockRunnersCountQuery).toHaveBeenCalledWith({
+ expect(mockRunnersCountHandler).toHaveBeenCalledWith({
type: INSTANCE_TYPE,
status: STATUS_ONLINE,
tagList: ['tag1'],
});
-
- expect(findRunnerStats().props()).toMatchObject({
- onlineRunnersCount: mockRunnersCount,
- });
});
});
describe('when a filter is selected by the user', () => {
beforeEach(() => {
- mockRunnersCountQuery.mockClear();
+ createComponent({
+ stubs: {
+ RunnerStats,
+ RunnerCount,
+ },
+ });
findRunnerFilteredSearchBar().vm.$emit('input', {
runnerType: null,
@@ -375,12 +329,12 @@ describe('AdminRunnersApp', () => {
it('updates the browser url', () => {
expect(updateHistory).toHaveBeenLastCalledWith({
title: expect.any(String),
- url: 'http://test.host/admin/runners?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC',
+ url: expect.stringContaining('?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC'),
});
});
it('requests the runners with filters', () => {
- expect(mockRunnersQuery).toHaveBeenLastCalledWith({
+ expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: STATUS_ONLINE,
tagList: ['tag1'],
sort: CREATED_ASC,
@@ -389,30 +343,10 @@ describe('AdminRunnersApp', () => {
});
it('fetches count results for requested status', () => {
- expect(mockRunnersCountQuery).toHaveBeenCalledWith({
+ expect(mockRunnersCountHandler).toHaveBeenCalledWith({
tagList: ['tag1'],
status: STATUS_ONLINE,
});
-
- expect(findRunnerStats().props()).toMatchObject({
- onlineRunnersCount: mockRunnersCount,
- });
- });
-
- it('skips fetching count results for status that were not in filter', () => {
- expect(mockRunnersCountQuery).not.toHaveBeenCalledWith({
- tagList: ['tag1'],
- status: STATUS_OFFLINE,
- });
- expect(mockRunnersCountQuery).not.toHaveBeenCalledWith({
- tagList: ['tag1'],
- status: STATUS_STALE,
- });
-
- expect(findRunnerStats().props()).toMatchObject({
- offlineRunnersCount: null,
- staleRunnersCount: null,
- });
});
});
@@ -458,14 +392,13 @@ describe('AdminRunnersApp', () => {
describe('when no runners are found', () => {
beforeEach(async () => {
- mockRunnersQuery = jest.fn().mockResolvedValue({
+ mockRunnersHandler.mockResolvedValue({
data: {
runners: { nodes: [] },
},
});
- createComponent();
- await waitForPromises();
+ await createComponent();
});
it('shows an empty state', () => {
@@ -490,9 +423,8 @@ describe('AdminRunnersApp', () => {
describe('when runners query fails', () => {
beforeEach(async () => {
- mockRunnersQuery = jest.fn().mockRejectedValue(new Error('Error!'));
- createComponent();
- await waitForPromises();
+ mockRunnersHandler.mockRejectedValue(new Error('Error!'));
+ await createComponent();
});
it('error is shown to the user', async () => {
@@ -509,19 +441,18 @@ describe('AdminRunnersApp', () => {
describe('Pagination', () => {
beforeEach(async () => {
- mockRunnersQuery = jest.fn().mockResolvedValue(runnersDataPaginated);
+ mockRunnersHandler.mockResolvedValue(allRunnersDataPaginated);
- createComponent({ mountFn: mountExtended });
- await waitForPromises();
+ await createComponent({ mountFn: mountExtended });
});
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
- expect(mockRunnersQuery).toHaveBeenLastCalledWith({
+ expect(mockRunnersHandler).toHaveBeenLastCalledWith({
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
- after: runnersDataPaginated.data.runners.pageInfo.endCursor,
+ after: allRunnersDataPaginated.data.runners.pageInfo.endCursor,
});
});
});
diff --git a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
index 7a949cb6505..ffd6f126627 100644
--- a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
@@ -4,9 +4,9 @@ import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue
import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
-import { runnersData } from '../../mock_data';
+import { allRunnersData } from '../../mock_data';
-const mockRunner = runnersData.data.runners.nodes[0];
+const mockRunner = allRunnersData.data.runners.nodes[0];
describe('RunnerActionsCell', () => {
let wrapper;
diff --git a/spec/frontend/runner/components/runner_delete_button_spec.js b/spec/frontend/runner/components/runner_delete_button_spec.js
index b11c749d0a7..52fe803c536 100644
--- a/spec/frontend/runner/components/runner_delete_button_spec.js
+++ b/spec/frontend/runner/components/runner_delete_button_spec.js
@@ -17,9 +17,9 @@ import {
import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue';
-import { runnersData } from '../mock_data';
+import { allRunnersData } from '../mock_data';
-const mockRunner = runnersData.data.runners.nodes[0];
+const mockRunner = allRunnersData.data.runners.nodes[0];
const mockRunnerId = getIdFromGraphQLId(mockRunner.id);
Vue.use(VueApollo);
diff --git a/spec/frontend/runner/components/runner_details_spec.js b/spec/frontend/runner/components/runner_details_spec.js
index 9e0f7014750..552ee29b6f9 100644
--- a/spec/frontend/runner/components/runner_details_spec.js
+++ b/spec/frontend/runner/components/runner_details_spec.js
@@ -25,12 +25,7 @@ describe('RunnerDetails', () => {
const findDetailGroups = () => wrapper.findComponent(RunnerGroups);
- const createComponent = ({
- props = {},
- stubs,
- mountFn = shallowMountExtended,
- ...options
- } = {}) => {
+ const createComponent = ({ props = {}, stubs, mountFn = shallowMountExtended } = {}) => {
wrapper = mountFn(RunnerDetails, {
propsData: {
...props,
@@ -39,7 +34,6 @@ describe('RunnerDetails', () => {
RunnerDetail,
...stubs,
},
- ...options,
});
};
@@ -47,16 +41,6 @@ describe('RunnerDetails', () => {
wrapper.destroy();
});
- it('when no runner is present, no contents are shown', () => {
- createComponent({
- props: {
- runner: null,
- },
- });
-
- expect(wrapper.text()).toBe('');
- });
-
describe('Details tab', () => {
describe.each`
field | runner | expectedValue
@@ -141,18 +125,4 @@ describe('RunnerDetails', () => {
});
});
});
-
- describe('Jobs tab slot', () => {
- it('shows job tab slot', () => {
- const JOBS_TAB = '<div>Jobs Tab</div>';
-
- createComponent({
- slots: {
- 'jobs-tab': JOBS_TAB,
- },
- });
-
- expect(wrapper.html()).toContain(JOBS_TAB);
- });
- });
});
diff --git a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
index b1b436e5443..83fb1764c6d 100644
--- a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
+++ b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
@@ -89,6 +89,16 @@ describe('RunnerList', () => {
]);
});
+ it('can be configured with null or undefined tokens, which are ignored', () => {
+ createComponent({
+ props: {
+ tokens: [statusTokenConfig, null, undefined],
+ },
+ });
+
+ expect(findFilteredSearch().props('tokens')).toEqual([statusTokenConfig]);
+ });
+
it('fails validation for v-model with the wrong shape', () => {
expect(() => {
createComponent({ props: { value: { filters: 'wrong_filters', sort: 'sort' } } });
diff --git a/spec/frontend/runner/components/runner_list_spec.js b/spec/frontend/runner/components/runner_list_spec.js
index 872394430ae..eca4bbc3490 100644
--- a/spec/frontend/runner/components/runner_list_spec.js
+++ b/spec/frontend/runner/components/runner_list_spec.js
@@ -7,9 +7,9 @@ import {
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerStatusPopover from '~/runner/components/runner_status_popover.vue';
-import { runnersData, onlineContactTimeoutSecs, staleTimeoutSecs } from '../mock_data';
+import { allRunnersData, onlineContactTimeoutSecs, staleTimeoutSecs } from '../mock_data';
-const mockRunners = runnersData.data.runners.nodes;
+const mockRunners = allRunnersData.data.runners.nodes;
const mockActiveRunnersCount = mockRunners.length;
describe('RunnerList', () => {
diff --git a/spec/frontend/runner/components/runner_pause_button_spec.js b/spec/frontend/runner/components/runner_pause_button_spec.js
index 9ebb30b6ed7..61476007571 100644
--- a/spec/frontend/runner/components/runner_pause_button_spec.js
+++ b/spec/frontend/runner/components/runner_pause_button_spec.js
@@ -1,4 +1,4 @@
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import { GlButton } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -16,9 +16,9 @@ import {
} from '~/runner/constants';
import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
-import { runnersData } from '../mock_data';
+import { allRunnersData } from '../mock_data';
-const mockRunner = runnersData.data.runners.nodes[0];
+const mockRunner = allRunnersData.data.runners.nodes[0];
Vue.use(VueApollo);
@@ -115,15 +115,20 @@ describe('RunnerPauseButton', () => {
});
describe(`Immediately after the ${icon} button is clicked`, () => {
- beforeEach(async () => {
+ const setup = async () => {
findBtn().vm.$emit('click');
- });
+ await nextTick();
+ };
it('The button has a loading state', async () => {
+ await setup();
+
expect(findBtn().props('loading')).toBe(true);
});
it('The stale tooltip is removed', async () => {
+ await setup();
+
expect(getTooltip()).toBe('');
});
});
@@ -237,15 +242,20 @@ describe('RunnerPauseButton', () => {
});
describe('Immediately after the button is clicked', () => {
- beforeEach(async () => {
+ const setup = async () => {
findBtn().vm.$emit('click');
- });
+ await nextTick();
+ };
it('The button has a loading state', async () => {
+ await setup();
+
expect(findBtn().props('loading')).toBe(true);
});
it('The stale tooltip is removed', async () => {
+ await setup();
+
expect(getTooltip()).toBe('');
});
});
diff --git a/spec/frontend/runner/components/runner_type_tabs_spec.js b/spec/frontend/runner/components/runner_type_tabs_spec.js
index 9da5d842d8f..22d2a9e60f7 100644
--- a/spec/frontend/runner/components/runner_type_tabs_spec.js
+++ b/spec/frontend/runner/components/runner_type_tabs_spec.js
@@ -1,10 +1,30 @@
import { GlTab } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
+import RunnerCount from '~/runner/components/stat/runner_count.vue';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
const mockSearch = { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' };
+const mockCount = (type, multiplier = 1) => {
+ let count;
+ switch (type) {
+ case INSTANCE_TYPE:
+ count = 3;
+ break;
+ case GROUP_TYPE:
+ count = 2;
+ break;
+ case PROJECT_TYPE:
+ count = 1;
+ break;
+ default:
+ count = 6;
+ break;
+ }
+ return count * multiplier;
+};
+
describe('RunnerTypeTabs', () => {
let wrapper;
@@ -13,33 +33,94 @@ describe('RunnerTypeTabs', () => {
findTabs()
.filter((tab) => tab.attributes('active') === 'true')
.at(0);
- const getTabsTitles = () => findTabs().wrappers.map((tab) => tab.text());
+ const getTabsTitles = () => findTabs().wrappers.map((tab) => tab.text().replace(/\s+/g, ' '));
- const createComponent = ({ props, ...options } = {}) => {
+ const createComponent = ({ props, stubs, ...options } = {}) => {
wrapper = shallowMount(RunnerTypeTabs, {
propsData: {
value: mockSearch,
+ countScope: INSTANCE_TYPE,
+ countVariables: {},
...props,
},
stubs: {
GlTab,
+ ...stubs,
},
...options,
});
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
});
it('Renders all options to filter runners by default', () => {
+ createComponent();
+
expect(getTabsTitles()).toEqual(['All', 'Instance', 'Group', 'Project']);
});
+ it('Shows count when receiving a number', () => {
+ createComponent({
+ stubs: {
+ RunnerCount: {
+ props: ['variables'],
+ render() {
+ return this.$scopedSlots.default({
+ count: mockCount(this.variables.type),
+ });
+ },
+ },
+ },
+ });
+
+ expect(getTabsTitles()).toEqual([`All 6`, `Instance 3`, `Group 2`, `Project 1`]);
+ });
+
+ it('Shows formatted count when receiving a large number', () => {
+ createComponent({
+ stubs: {
+ RunnerCount: {
+ props: ['variables'],
+ render() {
+ return this.$scopedSlots.default({
+ count: mockCount(this.variables.type, 1000),
+ });
+ },
+ },
+ },
+ });
+
+ expect(getTabsTitles()).toEqual([
+ `All 6,000`,
+ `Instance 3,000`,
+ `Group 2,000`,
+ `Project 1,000`,
+ ]);
+ });
+
+ it('Renders a count next to each tab', () => {
+ const mockVariables = {
+ paused: true,
+ status: 'ONLINE',
+ };
+
+ createComponent({
+ props: {
+ countVariables: mockVariables,
+ },
+ });
+
+ findTabs().wrappers.forEach((tab) => {
+ expect(tab.find(RunnerCount).props()).toEqual({
+ scope: INSTANCE_TYPE,
+ skip: false,
+ variables: expect.objectContaining(mockVariables),
+ });
+ });
+ });
+
it('Renders fewer options to filter runners', () => {
createComponent({
props: {
@@ -51,6 +132,8 @@ describe('RunnerTypeTabs', () => {
});
it('"All" is selected by default', () => {
+ createComponent();
+
expect(findActiveTab().text()).toBe('All');
});
@@ -71,6 +154,7 @@ describe('RunnerTypeTabs', () => {
const emittedValue = () => wrapper.emitted('input')[0][0];
beforeEach(() => {
+ createComponent();
findTabs().at(2).vm.$emit('click');
});
@@ -89,27 +173,30 @@ describe('RunnerTypeTabs', () => {
});
});
- describe('When using a custom slot', () => {
- const mockContent = 'content';
-
- beforeEach(() => {
- createComponent({
- scopedSlots: {
- title: `
- <span>
- {{props.tab.title}} ${mockContent}
- </span>`,
- },
+ describe('Component API', () => {
+ describe('When .refetch() is called', () => {
+ let mockRefetch;
+
+ beforeEach(() => {
+ mockRefetch = jest.fn();
+
+ createComponent({
+ stubs: {
+ RunnerCount: {
+ methods: {
+ refetch: mockRefetch,
+ },
+ render() {},
+ },
+ },
+ });
+
+ wrapper.vm.refetch();
});
- });
- it('Renders tabs with additional information', () => {
- expect(findTabs().wrappers.map((tab) => tab.text())).toEqual([
- `All ${mockContent}`,
- `Instance ${mockContent}`,
- `Group ${mockContent}`,
- `Project ${mockContent}`,
- ]);
+ it('refetch is called for each count', () => {
+ expect(mockRefetch).toHaveBeenCalledTimes(4);
+ });
});
});
});
diff --git a/spec/frontend/runner/components/search_tokens/tag_token_spec.js b/spec/frontend/runner/components/search_tokens/tag_token_spec.js
index 52557ff716d..22f0561ca5f 100644
--- a/spec/frontend/runner/components/search_tokens/tag_token_spec.js
+++ b/spec/frontend/runner/components/search_tokens/tag_token_spec.js
@@ -134,8 +134,6 @@ describe('TagToken', () => {
describe('when the users filters suggestions', () => {
beforeEach(async () => {
findGlFilteredSearchToken().vm.$emit('input', { data: mockSearchTerm });
-
- jest.runAllTimers();
});
it('requests filtered tags suggestions', async () => {
@@ -145,6 +143,7 @@ describe('TagToken', () => {
});
it('shows the loading icon', async () => {
+ findGlFilteredSearchToken().vm.$emit('input', { data: mockSearchTerm });
await nextTick();
expect(findGlLoadingIcon().exists()).toBe(true);
diff --git a/spec/frontend/runner/components/stat/runner_count_spec.js b/spec/frontend/runner/components/stat/runner_count_spec.js
new file mode 100644
index 00000000000..89b51b1b4a7
--- /dev/null
+++ b/spec/frontend/runner/components/stat/runner_count_spec.js
@@ -0,0 +1,148 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import RunnerCount from '~/runner/components/stat/runner_count.vue';
+import { INSTANCE_TYPE, GROUP_TYPE } from '~/runner/constants';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { captureException } from '~/runner/sentry_utils';
+
+import allRunnersCountQuery from '~/runner/graphql/list/all_runners_count.query.graphql';
+import groupRunnersCountQuery from '~/runner/graphql/list/group_runners_count.query.graphql';
+
+import { runnersCountData, groupRunnersCountData } from '../../mock_data';
+
+jest.mock('~/runner/sentry_utils');
+
+Vue.use(VueApollo);
+
+describe('RunnerCount', () => {
+ let wrapper;
+ let mockRunnersCountHandler;
+ let mockGroupRunnersCountHandler;
+
+ const createComponent = ({ props = {}, ...options } = {}) => {
+ const handlers = [
+ [allRunnersCountQuery, mockRunnersCountHandler],
+ [groupRunnersCountQuery, mockGroupRunnersCountHandler],
+ ];
+
+ wrapper = shallowMount(RunnerCount, {
+ apolloProvider: createMockApollo(handlers),
+ propsData: {
+ ...props,
+ },
+ scopedSlots: {
+ default: '<strong>{{props.count}}</strong>',
+ },
+ ...options,
+ });
+
+ return waitForPromises();
+ };
+
+ beforeEach(() => {
+ mockRunnersCountHandler = jest.fn().mockResolvedValue(runnersCountData);
+ mockGroupRunnersCountHandler = jest.fn().mockResolvedValue(groupRunnersCountData);
+ });
+
+ describe('in admin scope', () => {
+ const mockVariables = { status: 'ONLINE' };
+
+ beforeEach(async () => {
+ await createComponent({ props: { scope: INSTANCE_TYPE } });
+ });
+
+ it('fetches data from admin query', () => {
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(1);
+ expect(mockRunnersCountHandler).toHaveBeenCalledWith({});
+ });
+
+ it('fetches data with filters', async () => {
+ await createComponent({ props: { scope: INSTANCE_TYPE, variables: mockVariables } });
+
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(2);
+ expect(mockRunnersCountHandler).toHaveBeenCalledWith(mockVariables);
+
+ expect(wrapper.html()).toBe(`<strong>${runnersCountData.data.runners.count}</strong>`);
+ });
+
+ it('does not fetch from the group query', async () => {
+ expect(mockGroupRunnersCountHandler).not.toHaveBeenCalled();
+ });
+
+ describe('when this query is skipped after data was loaded', () => {
+ beforeEach(async () => {
+ wrapper.setProps({ skip: true });
+
+ await nextTick();
+ });
+
+ it('clears current data', () => {
+ expect(wrapper.html()).toBe('<strong></strong>');
+ });
+ });
+ });
+
+ describe('when skipping query', () => {
+ beforeEach(async () => {
+ await createComponent({ props: { scope: INSTANCE_TYPE, skip: true } });
+ });
+
+ it('does not fetch data', async () => {
+ expect(mockRunnersCountHandler).not.toHaveBeenCalled();
+ expect(mockGroupRunnersCountHandler).not.toHaveBeenCalled();
+
+ expect(wrapper.html()).toBe('<strong></strong>');
+ });
+ });
+
+ describe('when runners query fails', () => {
+ const mockError = new Error('error!');
+
+ beforeEach(async () => {
+ mockRunnersCountHandler.mockRejectedValue(mockError);
+
+ await createComponent({ props: { scope: INSTANCE_TYPE } });
+ });
+
+ it('data is not shown and error is reported', async () => {
+ expect(wrapper.html()).toBe('<strong></strong>');
+
+ expect(captureException).toHaveBeenCalledWith({
+ component: 'RunnerCount',
+ error: mockError,
+ });
+ });
+ });
+
+ describe('in group scope', () => {
+ beforeEach(async () => {
+ await createComponent({ props: { scope: GROUP_TYPE } });
+ });
+
+ it('fetches data from the group query', async () => {
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledTimes(1);
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({});
+
+ expect(wrapper.html()).toBe(
+ `<strong>${groupRunnersCountData.data.group.runners.count}</strong>`,
+ );
+ });
+
+ it('does not fetch from the group query', () => {
+ expect(mockRunnersCountHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when .refetch() is called', () => {
+ beforeEach(async () => {
+ await createComponent({ props: { scope: INSTANCE_TYPE } });
+ wrapper.vm.refetch();
+ });
+
+ it('data is not shown and error is reported', async () => {
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(2);
+ });
+ });
+});
diff --git a/spec/frontend/runner/components/stat/runner_stats_spec.js b/spec/frontend/runner/components/stat/runner_stats_spec.js
index 68db8621ef0..f1ba6403dfb 100644
--- a/spec/frontend/runner/components/stat/runner_stats_spec.js
+++ b/spec/frontend/runner/components/stat/runner_stats_spec.js
@@ -1,21 +1,24 @@
import { shallowMount, mount } from '@vue/test-utils';
+import { s__ } from '~/locale';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
+import RunnerCount from '~/runner/components/stat/runner_count.vue';
import RunnerStatusStat from '~/runner/components/stat/runner_status_stat.vue';
-import { STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE } from '~/runner/constants';
+import { INSTANCE_TYPE, STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE } from '~/runner/constants';
describe('RunnerStats', () => {
let wrapper;
+ const findRunnerCountAt = (i) => wrapper.findAllComponents(RunnerCount).at(i);
const findRunnerStatusStatAt = (i) => wrapper.findAllComponents(RunnerStatusStat).at(i);
- const createComponent = ({ props = {}, mountFn = shallowMount } = {}) => {
+ const createComponent = ({ props = {}, mountFn = shallowMount, ...options } = {}) => {
wrapper = mountFn(RunnerStats, {
propsData: {
- onlineRunnersCount: 3,
- offlineRunnersCount: 2,
- staleRunnersCount: 1,
+ scope: INSTANCE_TYPE,
+ variables: {},
...props,
},
+ ...options,
});
};
@@ -24,13 +27,46 @@ describe('RunnerStats', () => {
});
it('Displays all the stats', () => {
- createComponent({ mountFn: mount });
+ const mockCounts = {
+ [STATUS_ONLINE]: 3,
+ [STATUS_OFFLINE]: 2,
+ [STATUS_STALE]: 1,
+ };
+
+ createComponent({
+ mountFn: mount,
+ stubs: {
+ RunnerCount: {
+ props: ['variables'],
+ render() {
+ return this.$scopedSlots.default({
+ count: mockCounts[this.variables.status],
+ });
+ },
+ },
+ },
+ });
+
+ const text = wrapper.text();
+ expect(text).toMatch(`${s__('Runners|Online runners')} 3`);
+ expect(text).toMatch(`${s__('Runners|Offline runners')} 2`);
+ expect(text).toMatch(`${s__('Runners|Stale runners')} 1`);
+ });
- const stats = wrapper.text();
+ it('Displays counts for filtered searches', () => {
+ createComponent({ props: { variables: { paused: true } } });
- expect(stats).toMatch('Online runners 3');
- expect(stats).toMatch('Offline runners 2');
- expect(stats).toMatch('Stale runners 1');
+ expect(findRunnerCountAt(0).props('variables').paused).toBe(true);
+ expect(findRunnerCountAt(1).props('variables').paused).toBe(true);
+ expect(findRunnerCountAt(2).props('variables').paused).toBe(true);
+ });
+
+ it('Skips overlapping statuses', () => {
+ createComponent({ props: { variables: { status: STATUS_ONLINE } } });
+
+ expect(findRunnerCountAt(0).props('skip')).toBe(false);
+ expect(findRunnerCountAt(1).props('skip')).toBe(true);
+ expect(findRunnerCountAt(2).props('skip')).toBe(true);
});
it.each`
@@ -38,9 +74,10 @@ describe('RunnerStats', () => {
${0} | ${STATUS_ONLINE}
${1} | ${STATUS_OFFLINE}
${2} | ${STATUS_STALE}
- `('Displays status types at index $i', ({ i, status }) => {
- createComponent();
+ `('Displays status $status at index $i', ({ i, status }) => {
+ createComponent({ mountFn: mount });
+ expect(findRunnerCountAt(i).props('variables').status).toBe(status);
expect(findRunnerStatusStatAt(i).props('status')).toBe(status);
});
});
diff --git a/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js b/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
new file mode 100644
index 00000000000..2065874c288
--- /dev/null
+++ b/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
@@ -0,0 +1,213 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert, VARIANT_SUCCESS } from '~/flash';
+import { redirectTo } from '~/lib/utils/url_utility';
+
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import RunnerHeader from '~/runner/components/runner_header.vue';
+import RunnerDetails from '~/runner/components/runner_details.vue';
+import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
+import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
+import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
+import runnerQuery from '~/runner/graphql/show/runner.query.graphql';
+import GroupRunnerShowApp from '~/runner/group_runner_show/group_runner_show_app.vue';
+import { captureException } from '~/runner/sentry_utils';
+import { saveAlertToLocalStorage } from '~/runner/local_storage_alert/save_alert_to_local_storage';
+
+import { runnerData } from '../mock_data';
+
+jest.mock('~/runner/local_storage_alert/save_alert_to_local_storage');
+jest.mock('~/flash');
+jest.mock('~/runner/sentry_utils');
+jest.mock('~/lib/utils/url_utility');
+
+const mockRunner = runnerData.data.runner;
+const mockRunnerGraphqlId = mockRunner.id;
+const mockRunnerId = `${getIdFromGraphQLId(mockRunnerGraphqlId)}`;
+const mockRunnersPath = '/groups/group1/-/runners';
+const mockEditGroupRunnerPath = `/groups/group1/-/runners/${mockRunnerId}/edit`;
+
+Vue.use(VueApollo);
+
+describe('GroupRunnerShowApp', () => {
+ let wrapper;
+ let mockRunnerQuery;
+
+ const findRunnerHeader = () => wrapper.findComponent(RunnerHeader);
+ const findRunnerDetails = () => wrapper.findComponent(RunnerDetails);
+ const findRunnerDeleteButton = () => wrapper.findComponent(RunnerDeleteButton);
+ const findRunnerEditButton = () => wrapper.findComponent(RunnerEditButton);
+ const findRunnerPauseButton = () => wrapper.findComponent(RunnerPauseButton);
+
+ const mockRunnerQueryResult = (runner = {}) => {
+ mockRunnerQuery = jest.fn().mockResolvedValue({
+ data: {
+ runner: { ...mockRunner, ...runner },
+ },
+ });
+ };
+
+ const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
+ wrapper = mountFn(GroupRunnerShowApp, {
+ apolloProvider: createMockApollo([[runnerQuery, mockRunnerQuery]]),
+ propsData: {
+ runnerId: mockRunnerId,
+ runnersPath: mockRunnersPath,
+ editGroupRunnerPath: mockEditGroupRunnerPath,
+ ...props,
+ },
+ ...options,
+ });
+
+ return waitForPromises();
+ };
+
+ afterEach(() => {
+ mockRunnerQuery.mockReset();
+ wrapper.destroy();
+ });
+
+ describe('When showing runner details', () => {
+ beforeEach(async () => {
+ mockRunnerQueryResult();
+
+ await createComponent({ mountFn: mountExtended });
+ });
+
+ it('expect GraphQL ID to be requested', async () => {
+ expect(mockRunnerQuery).toHaveBeenCalledWith({ id: mockRunnerGraphqlId });
+ });
+
+ it('displays the header', async () => {
+ expect(findRunnerHeader().text()).toContain(`Runner #${mockRunnerId}`);
+ });
+
+ it('displays edit, pause, delete buttons', async () => {
+ expect(findRunnerEditButton().exists()).toBe(true);
+ expect(findRunnerPauseButton().exists()).toBe(true);
+ expect(findRunnerDeleteButton().exists()).toBe(true);
+ });
+
+ it('shows basic runner details', () => {
+ const expected = `Description Instance runner
+ Last contact Never contacted
+ Version 1.0.0
+ IP Address 127.0.0.1
+ Executor None
+ Architecture None
+ Platform darwin
+ Configuration Runs untagged jobs
+ Maximum job timeout None
+ Tags None`.replace(/\s+/g, ' ');
+
+ expect(wrapper.text().replace(/\s+/g, ' ')).toContain(expected);
+ });
+
+ it('renders runner details component', () => {
+ expect(findRunnerDetails().props('runner')).toEqual(mockRunner);
+ });
+
+ describe('when runner cannot be updated', () => {
+ beforeEach(async () => {
+ mockRunnerQueryResult({
+ userPermissions: {
+ ...mockRunner.userPermissions,
+ updateRunner: false,
+ },
+ });
+
+ await createComponent({
+ mountFn: mountExtended,
+ });
+ });
+
+ it('does not display edit and pause buttons', () => {
+ expect(findRunnerEditButton().exists()).toBe(false);
+ expect(findRunnerPauseButton().exists()).toBe(false);
+ });
+
+ it('displays delete button', () => {
+ expect(findRunnerDeleteButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when runner cannot be deleted', () => {
+ beforeEach(async () => {
+ mockRunnerQueryResult({
+ userPermissions: {
+ ...mockRunner.userPermissions,
+ deleteRunner: false,
+ },
+ });
+
+ await createComponent({
+ mountFn: mountExtended,
+ });
+ });
+
+ it('does not display delete button', () => {
+ expect(findRunnerDeleteButton().exists()).toBe(false);
+ });
+
+ it('displays edit and pause buttons', () => {
+ expect(findRunnerEditButton().exists()).toBe(true);
+ expect(findRunnerPauseButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when runner is deleted', () => {
+ beforeEach(async () => {
+ await createComponent({
+ mountFn: mountExtended,
+ });
+ });
+
+ it('redirects to the runner list page', () => {
+ findRunnerDeleteButton().vm.$emit('deleted', { message: 'Runner deleted' });
+
+ expect(saveAlertToLocalStorage).toHaveBeenCalledWith({
+ message: 'Runner deleted',
+ variant: VARIANT_SUCCESS,
+ });
+ expect(redirectTo).toHaveBeenCalledWith(mockRunnersPath);
+ });
+ });
+ });
+
+ describe('When loading', () => {
+ beforeEach(() => {
+ mockRunnerQueryResult();
+
+ createComponent();
+ });
+
+ it('does not show runner details', () => {
+ expect(findRunnerDetails().exists()).toBe(false);
+ });
+ });
+
+ describe('When there is an error', () => {
+ beforeEach(async () => {
+ mockRunnerQuery = jest.fn().mockRejectedValueOnce(new Error('Error!'));
+ await createComponent();
+ });
+
+ it('does not show runner details', () => {
+ expect(findRunnerDetails().exists()).toBe(false);
+ });
+
+ it('error is reported to sentry', () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error('Error!'),
+ component: 'GroupRunnerShowApp',
+ });
+ });
+
+ it('error is shown to the user', () => {
+ expect(createAlert).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js
index eb9f85a7d0f..9c42b0d6865 100644
--- a/spec/frontend/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js
@@ -10,6 +10,7 @@ import {
} from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/flash';
+import { s__ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { updateHistory } from '~/lib/utils/url_utility';
@@ -18,6 +19,7 @@ import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
+import RunnerCount from '~/runner/components/stat/runner_count.vue';
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
@@ -28,7 +30,6 @@ import {
DEFAULT_SORT,
INSTANCE_TYPE,
GROUP_TYPE,
- PROJECT_TYPE,
PARAM_KEY_PAUSED,
PARAM_KEY_STATUS,
PARAM_KEY_TAG,
@@ -38,11 +39,10 @@ import {
RUNNER_PAGE_SIZE,
I18N_EDIT,
} from '~/runner/constants';
-import getGroupRunnersQuery from '~/runner/graphql/list/group_runners.query.graphql';
-import getGroupRunnersCountQuery from '~/runner/graphql/list/group_runners_count.query.graphql';
+import groupRunnersQuery from '~/runner/graphql/list/group_runners.query.graphql';
+import groupRunnersCountQuery from '~/runner/graphql/list/group_runners_count.query.graphql';
import GroupRunnersApp from '~/runner/group_runners/group_runners_app.vue';
import { captureException } from '~/runner/sentry_utils';
-import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import {
groupRunnersData,
groupRunnersDataPaginated,
@@ -61,6 +61,9 @@ const mockRegistrationToken = 'AABBCC';
const mockGroupRunnersEdges = groupRunnersData.data.group.runners.edges;
const mockGroupRunnersCount = mockGroupRunnersEdges.length;
+const mockGroupRunnersHandler = jest.fn();
+const mockGroupRunnersCountHandler = jest.fn();
+
jest.mock('~/flash');
jest.mock('~/runner/sentry_utils');
jest.mock('~/lib/utils/url_utility', () => ({
@@ -70,8 +73,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
describe('GroupRunnersApp', () => {
let wrapper;
- let mockGroupRunnersQuery;
- let mockGroupRunnersCountQuery;
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
@@ -83,17 +84,11 @@ describe('GroupRunnersApp', () => {
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
- const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
-
- const mockCountQueryResult = (count) =>
- Promise.resolve({
- data: { group: { id: groupRunnersCountData.data.group.id, runners: { count } } },
- });
- const createComponent = ({ props = {}, mountFn = shallowMountExtended } = {}) => {
+ const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
const handlers = [
- [getGroupRunnersQuery, mockGroupRunnersQuery],
- [getGroupRunnersCountQuery, mockGroupRunnersCountQuery],
+ [groupRunnersQuery, mockGroupRunnersHandler],
+ [groupRunnersCountQuery, mockGroupRunnersCountHandler],
];
wrapper = mountFn(GroupRunnersApp, {
@@ -110,90 +105,76 @@ describe('GroupRunnersApp', () => {
emptyStateSvgPath,
emptyStateFilteredSvgPath,
},
+ ...options,
});
+
+ return waitForPromises();
};
beforeEach(async () => {
- setWindowLocation(`/groups/${mockGroupFullPath}/-/runners`);
+ mockGroupRunnersHandler.mockResolvedValue(groupRunnersData);
+ mockGroupRunnersCountHandler.mockResolvedValue(groupRunnersCountData);
+ });
- mockGroupRunnersQuery = jest.fn().mockResolvedValue(groupRunnersData);
- mockGroupRunnersCountQuery = jest.fn().mockResolvedValue(groupRunnersCountData);
+ afterEach(() => {
+ mockGroupRunnersHandler.mockReset();
+ mockGroupRunnersCountHandler.mockReset();
+ wrapper.destroy();
+ });
+
+ it('shows the runner tabs with a runner count for each type', async () => {
+ await createComponent({ mountFn: mountExtended });
+ expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
+ `All ${mockGroupRunnersCount} Group ${mockGroupRunnersCount} Project ${mockGroupRunnersCount}`,
+ );
+ });
+
+ it('shows the runner setup instructions', () => {
createComponent();
- await waitForPromises();
+
+ expect(findRegistrationDropdown().props('registrationToken')).toBe(mockRegistrationToken);
+ expect(findRegistrationDropdown().props('type')).toBe(GROUP_TYPE);
});
it('shows total runner counts', async () => {
- expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
- groupFullPath: mockGroupFullPath,
+ await createComponent({ mountFn: mountExtended });
+
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
status: STATUS_ONLINE,
- });
- expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
groupFullPath: mockGroupFullPath,
- status: STATUS_OFFLINE,
});
- expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
+ status: STATUS_OFFLINE,
groupFullPath: mockGroupFullPath,
- status: STATUS_STALE,
- });
-
- expect(findRunnerStats().props()).toMatchObject({
- onlineRunnersCount: mockGroupRunnersCount,
- offlineRunnersCount: mockGroupRunnersCount,
- staleRunnersCount: mockGroupRunnersCount,
- });
- });
-
- it('shows the runner tabs with a runner count for each type', async () => {
- mockGroupRunnersCountQuery.mockImplementation(({ type }) => {
- switch (type) {
- case GROUP_TYPE:
- return mockCountQueryResult(2);
- case PROJECT_TYPE:
- return mockCountQueryResult(1);
- default:
- return mockCountQueryResult(4);
- }
});
-
- createComponent({ mountFn: mountExtended });
- await waitForPromises();
-
- expect(findRunnerTypeTabs().text()).toMatchInterpolatedText('All 4 Group 2 Project 1');
- });
-
- it('shows the runner tabs with a formatted runner count', async () => {
- mockGroupRunnersCountQuery.mockImplementation(({ type }) => {
- switch (type) {
- case GROUP_TYPE:
- return mockCountQueryResult(2000);
- case PROJECT_TYPE:
- return mockCountQueryResult(1000);
- default:
- return mockCountQueryResult(3000);
- }
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
+ status: STATUS_STALE,
+ groupFullPath: mockGroupFullPath,
});
- createComponent({ mountFn: mountExtended });
- await waitForPromises();
-
- expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
- 'All 3,000 Group 2,000 Project 1,000',
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Online runners')} ${mockGroupRunnersCount}`,
+ );
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Offline runners')} ${mockGroupRunnersCount}`,
+ );
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Stale runners')} ${mockGroupRunnersCount}`,
);
});
- it('shows the runner setup instructions', () => {
- expect(findRegistrationDropdown().props('registrationToken')).toBe(mockRegistrationToken);
- expect(findRegistrationDropdown().props('type')).toBe(GROUP_TYPE);
- });
+ it('shows the runners list', async () => {
+ await createComponent();
- it('shows the runners list', () => {
const runners = findRunnerList().props('runners');
expect(runners).toEqual(mockGroupRunnersEdges.map(({ node }) => node));
});
- it('requests the runners with group path and no other filters', () => {
- expect(mockGroupRunnersQuery).toHaveBeenLastCalledWith({
+ it('requests the runners with group path and no other filters', async () => {
+ await createComponent();
+
+ expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith({
groupFullPath: mockGroupFullPath,
status: undefined,
type: undefined,
@@ -203,9 +184,9 @@ describe('GroupRunnersApp', () => {
});
it('sets tokens in the filtered search', () => {
- createComponent({ mountFn: mountExtended });
+ createComponent();
- const tokens = findFilteredSearch().props('tokens');
+ const tokens = findRunnerFilteredSearchBar().props('tokens');
expect(tokens).toEqual([
expect.objectContaining({
@@ -229,12 +210,8 @@ describe('GroupRunnersApp', () => {
const FILTERED_COUNT_QUERIES = 3; // Smart queries that display a count of runners in tabs
beforeEach(async () => {
- mockGroupRunnersCountQuery.mockClear();
-
- createComponent({ mountFn: mountExtended });
+ await createComponent({ mountFn: mountExtended });
showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
-
- await waitForPromises();
});
it('view link is displayed correctly', () => {
@@ -254,11 +231,11 @@ describe('GroupRunnersApp', () => {
});
it('When runner is paused or unpaused, some data is refetched', async () => {
- expect(mockGroupRunnersCountQuery).toHaveBeenCalledTimes(COUNT_QUERIES);
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES);
findRunnerActionsCell().vm.$emit('toggledPaused');
- expect(mockGroupRunnersCountQuery).toHaveBeenCalledTimes(
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledTimes(
COUNT_QUERIES + FILTERED_COUNT_QUERIES,
);
@@ -277,8 +254,12 @@ describe('GroupRunnersApp', () => {
beforeEach(async () => {
setWindowLocation(`?status[]=${STATUS_ONLINE}&runner_type[]=${INSTANCE_TYPE}`);
- createComponent();
- await waitForPromises();
+ await createComponent({
+ stubs: {
+ RunnerStats,
+ RunnerCount,
+ },
+ });
});
it('sets the filters in the search bar', () => {
@@ -291,7 +272,7 @@ describe('GroupRunnersApp', () => {
});
it('requests the runners with filter parameters', () => {
- expect(mockGroupRunnersQuery).toHaveBeenLastCalledWith({
+ expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith({
groupFullPath: mockGroupFullPath,
status: STATUS_ONLINE,
type: INSTANCE_TYPE,
@@ -301,20 +282,23 @@ describe('GroupRunnersApp', () => {
});
it('fetches count results for requested status', () => {
- expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
groupFullPath: mockGroupFullPath,
type: INSTANCE_TYPE,
status: STATUS_ONLINE,
});
-
- expect(findRunnerStats().props()).toMatchObject({
- onlineRunnersCount: mockGroupRunnersCount,
- });
});
});
describe('when a filter is selected by the user', () => {
beforeEach(async () => {
+ createComponent({
+ stubs: {
+ RunnerStats,
+ RunnerCount,
+ },
+ });
+
findRunnerFilteredSearchBar().vm.$emit('input', {
runnerType: null,
filters: [
@@ -330,12 +314,12 @@ describe('GroupRunnersApp', () => {
it('updates the browser url', () => {
expect(updateHistory).toHaveBeenLastCalledWith({
title: expect.any(String),
- url: 'http://test.host/groups/group1/-/runners?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC',
+ url: expect.stringContaining('?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC'),
});
});
it('requests the runners with filters', () => {
- expect(mockGroupRunnersQuery).toHaveBeenLastCalledWith({
+ expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith({
groupFullPath: mockGroupFullPath,
status: STATUS_ONLINE,
tagList: ['tag1'],
@@ -345,33 +329,11 @@ describe('GroupRunnersApp', () => {
});
it('fetches count results for requested status', () => {
- expect(mockGroupRunnersCountQuery).toHaveBeenCalledWith({
+ expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
groupFullPath: mockGroupFullPath,
tagList: ['tag1'],
status: STATUS_ONLINE,
});
-
- expect(findRunnerStats().props()).toMatchObject({
- onlineRunnersCount: mockGroupRunnersCount,
- });
- });
-
- it('skips fetching count results for status that were not in filter', () => {
- expect(mockGroupRunnersCountQuery).not.toHaveBeenCalledWith({
- groupFullPath: mockGroupFullPath,
- tagList: ['tag1'],
- status: STATUS_OFFLINE,
- });
- expect(mockGroupRunnersCountQuery).not.toHaveBeenCalledWith({
- groupFullPath: mockGroupFullPath,
- tagList: ['tag1'],
- status: STATUS_STALE,
- });
-
- expect(findRunnerStats().props()).toMatchObject({
- offlineRunnersCount: null,
- staleRunnersCount: null,
- });
});
});
@@ -382,7 +344,7 @@ describe('GroupRunnersApp', () => {
describe('when no runners are found', () => {
beforeEach(async () => {
- mockGroupRunnersQuery = jest.fn().mockResolvedValue({
+ mockGroupRunnersHandler.mockResolvedValue({
data: {
group: {
id: '1',
@@ -390,8 +352,7 @@ describe('GroupRunnersApp', () => {
},
},
});
- createComponent();
- await waitForPromises();
+ await createComponent();
});
it('shows an empty state', async () => {
@@ -401,9 +362,8 @@ describe('GroupRunnersApp', () => {
describe('when runners query fails', () => {
beforeEach(async () => {
- mockGroupRunnersQuery = jest.fn().mockRejectedValue(new Error('Error!'));
- createComponent();
- await waitForPromises();
+ mockGroupRunnersHandler.mockRejectedValue(new Error('Error!'));
+ await createComponent();
});
it('error is shown to the user', async () => {
@@ -420,16 +380,15 @@ describe('GroupRunnersApp', () => {
describe('Pagination', () => {
beforeEach(async () => {
- mockGroupRunnersQuery = jest.fn().mockResolvedValue(groupRunnersDataPaginated);
+ mockGroupRunnersHandler.mockResolvedValue(groupRunnersDataPaginated);
- createComponent({ mountFn: mountExtended });
- await waitForPromises();
+ await createComponent({ mountFn: mountExtended });
});
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
- expect(mockGroupRunnersQuery).toHaveBeenLastCalledWith({
+ expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith({
groupFullPath: mockGroupFullPath,
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
diff --git a/spec/frontend/runner/mock_data.js b/spec/frontend/runner/mock_data.js
index 3368fc21544..e5472ace817 100644
--- a/spec/frontend/runner/mock_data.js
+++ b/spec/frontend/runner/mock_data.js
@@ -10,14 +10,216 @@ import runnerJobsData from 'test_fixtures/graphql/runner/show/runner_jobs.query.
import runnerFormData from 'test_fixtures/graphql/runner/edit/runner_form.query.graphql.json';
// List queries
-import runnersData from 'test_fixtures/graphql/runner/list/admin_runners.query.graphql.json';
-import runnersDataPaginated from 'test_fixtures/graphql/runner/list/admin_runners.query.graphql.paginated.json';
-import runnersCountData from 'test_fixtures/graphql/runner/list/admin_runners_count.query.graphql.json';
+import allRunnersData from 'test_fixtures/graphql/runner/list/all_runners.query.graphql.json';
+import allRunnersDataPaginated from 'test_fixtures/graphql/runner/list/all_runners.query.graphql.paginated.json';
+import runnersCountData from 'test_fixtures/graphql/runner/list/all_runners_count.query.graphql.json';
import groupRunnersData from 'test_fixtures/graphql/runner/list/group_runners.query.graphql.json';
import groupRunnersDataPaginated from 'test_fixtures/graphql/runner/list/group_runners.query.graphql.paginated.json';
import groupRunnersCountData from 'test_fixtures/graphql/runner/list/group_runners_count.query.graphql.json';
+import { RUNNER_PAGE_SIZE } from '~/runner/constants';
+
// Other mock data
+
+// Mock searches and their corresponding urls
+export const mockSearchExamples = [
+ {
+ name: 'a default query',
+ urlQuery: '',
+ search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
+ graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ isDefault: true,
+ },
+ {
+ name: 'a single status',
+ urlQuery: '?status[]=ACTIVE',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'a single term text search',
+ urlQuery: '?search=something',
+ search: {
+ runnerType: null,
+ filters: [
+ {
+ type: 'filtered-search-term',
+ value: { data: 'something' },
+ },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'a two terms text search',
+ urlQuery: '?search=something+else',
+ search: {
+ runnerType: null,
+ filters: [
+ {
+ type: 'filtered-search-term',
+ value: { data: 'something' },
+ },
+ {
+ type: 'filtered-search-term',
+ value: { data: 'else' },
+ },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'single instance type',
+ urlQuery: '?runner_type[]=INSTANCE_TYPE',
+ search: {
+ runnerType: 'INSTANCE_TYPE',
+ filters: [],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'multiple runner status',
+ urlQuery: '?status[]=ACTIVE&status[]=PAUSED',
+ search: {
+ runnerType: null,
+ filters: [
+ { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
+ { type: 'status', value: { data: 'PAUSED', operator: '=' } },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'multiple status, a single instance type and a non default sort',
+ urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC',
+ search: {
+ runnerType: 'INSTANCE_TYPE',
+ filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_ASC',
+ },
+ graphqlVariables: {
+ status: 'ACTIVE',
+ type: 'INSTANCE_TYPE',
+ sort: 'CREATED_ASC',
+ first: RUNNER_PAGE_SIZE,
+ },
+ },
+ {
+ name: 'a tag',
+ urlQuery: '?tag[]=tag-1',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: {
+ tagList: ['tag-1'],
+ first: 20,
+ sort: 'CREATED_DESC',
+ },
+ },
+ {
+ name: 'two tags',
+ urlQuery: '?tag[]=tag-1&tag[]=tag-2',
+ search: {
+ runnerType: null,
+ filters: [
+ { type: 'tag', value: { data: 'tag-1', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-2', operator: '=' } },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: {
+ tagList: ['tag-1', 'tag-2'],
+ first: 20,
+ sort: 'CREATED_DESC',
+ },
+ },
+ {
+ name: 'the next page',
+ urlQuery: '?page=2&after=AFTER_CURSOR',
+ search: {
+ runnerType: null,
+ filters: [],
+ pagination: { page: 2, after: 'AFTER_CURSOR' },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'the previous page',
+ urlQuery: '?page=2&before=BEFORE_CURSOR',
+ search: {
+ runnerType: null,
+ filters: [],
+ pagination: { page: 2, before: 'BEFORE_CURSOR' },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'the next page filtered by a status, an instance type, tags and a non default sort',
+ urlQuery:
+ '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
+ search: {
+ runnerType: 'INSTANCE_TYPE',
+ filters: [
+ { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-1', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-2', operator: '=' } },
+ ],
+ pagination: { page: 2, after: 'AFTER_CURSOR' },
+ sort: 'CREATED_ASC',
+ },
+ graphqlVariables: {
+ status: 'ACTIVE',
+ type: 'INSTANCE_TYPE',
+ tagList: ['tag-1', 'tag-2'],
+ sort: 'CREATED_ASC',
+ after: 'AFTER_CURSOR',
+ first: RUNNER_PAGE_SIZE,
+ },
+ },
+ {
+ name: 'paused runners',
+ urlQuery: '?paused[]=true',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'active runners',
+ urlQuery: '?paused[]=false',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+];
+
export const onlineContactTimeoutSecs = 2 * 60 * 60;
export const staleTimeoutSecs = 7889238; // Ruby's `3.months`
@@ -25,8 +227,8 @@ export const emptyStateSvgPath = 'emptyStateSvgPath.svg';
export const emptyStateFilteredSvgPath = 'emptyStateFilteredSvgPath.svg';
export {
- runnersData,
- runnersDataPaginated,
+ allRunnersData,
+ allRunnersDataPaginated,
runnersCountData,
groupRunnersData,
groupRunnersDataPaginated,
diff --git a/spec/frontend/runner/runner_search_utils_spec.js b/spec/frontend/runner/runner_search_utils_spec.js
index 1f102f86b2a..6f954143ab1 100644
--- a/spec/frontend/runner/runner_search_utils_spec.js
+++ b/spec/frontend/runner/runner_search_utils_spec.js
@@ -1,4 +1,3 @@
-import { RUNNER_PAGE_SIZE } from '~/runner/constants';
import {
searchValidator,
updateOutdatedUrl,
@@ -6,209 +5,12 @@ import {
fromSearchToUrl,
fromSearchToVariables,
isSearchFiltered,
-} from '~/runner/runner_search_utils';
+} from 'ee_else_ce/runner/runner_search_utils';
+import { mockSearchExamples } from './mock_data';
describe('search_params.js', () => {
- const examples = [
- {
- name: 'a default query',
- urlQuery: '',
- search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
- graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- isDefault: true,
- },
- {
- name: 'a single status',
- urlQuery: '?status[]=ACTIVE',
- search: {
- runnerType: null,
- filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'a single term text search',
- urlQuery: '?search=something',
- search: {
- runnerType: null,
- filters: [
- {
- type: 'filtered-search-term',
- value: { data: 'something' },
- },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'a two terms text search',
- urlQuery: '?search=something+else',
- search: {
- runnerType: null,
- filters: [
- {
- type: 'filtered-search-term',
- value: { data: 'something' },
- },
- {
- type: 'filtered-search-term',
- value: { data: 'else' },
- },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'single instance type',
- urlQuery: '?runner_type[]=INSTANCE_TYPE',
- search: {
- runnerType: 'INSTANCE_TYPE',
- filters: [],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'multiple runner status',
- urlQuery: '?status[]=ACTIVE&status[]=PAUSED',
- search: {
- runnerType: null,
- filters: [
- { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
- { type: 'status', value: { data: 'PAUSED', operator: '=' } },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'multiple status, a single instance type and a non default sort',
- urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC',
- search: {
- runnerType: 'INSTANCE_TYPE',
- filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_ASC',
- },
- graphqlVariables: {
- status: 'ACTIVE',
- type: 'INSTANCE_TYPE',
- sort: 'CREATED_ASC',
- first: RUNNER_PAGE_SIZE,
- },
- },
- {
- name: 'a tag',
- urlQuery: '?tag[]=tag-1',
- search: {
- runnerType: null,
- filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: {
- tagList: ['tag-1'],
- first: 20,
- sort: 'CREATED_DESC',
- },
- },
- {
- name: 'two tags',
- urlQuery: '?tag[]=tag-1&tag[]=tag-2',
- search: {
- runnerType: null,
- filters: [
- { type: 'tag', value: { data: 'tag-1', operator: '=' } },
- { type: 'tag', value: { data: 'tag-2', operator: '=' } },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: {
- tagList: ['tag-1', 'tag-2'],
- first: 20,
- sort: 'CREATED_DESC',
- },
- },
- {
- name: 'the next page',
- urlQuery: '?page=2&after=AFTER_CURSOR',
- search: {
- runnerType: null,
- filters: [],
- pagination: { page: 2, after: 'AFTER_CURSOR' },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'the previous page',
- urlQuery: '?page=2&before=BEFORE_CURSOR',
- search: {
- runnerType: null,
- filters: [],
- pagination: { page: 2, before: 'BEFORE_CURSOR' },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
- },
- {
- name: 'the next page filtered by a status, an instance type, tags and a non default sort',
- urlQuery:
- '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
- search: {
- runnerType: 'INSTANCE_TYPE',
- filters: [
- { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
- { type: 'tag', value: { data: 'tag-1', operator: '=' } },
- { type: 'tag', value: { data: 'tag-2', operator: '=' } },
- ],
- pagination: { page: 2, after: 'AFTER_CURSOR' },
- sort: 'CREATED_ASC',
- },
- graphqlVariables: {
- status: 'ACTIVE',
- type: 'INSTANCE_TYPE',
- tagList: ['tag-1', 'tag-2'],
- sort: 'CREATED_ASC',
- after: 'AFTER_CURSOR',
- first: RUNNER_PAGE_SIZE,
- },
- },
- {
- name: 'paused runners',
- urlQuery: '?paused[]=true',
- search: {
- runnerType: null,
- filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'active runners',
- urlQuery: '?paused[]=false',
- search: {
- runnerType: null,
- filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- ];
-
describe('searchValidator', () => {
- examples.forEach(({ name, search }) => {
+ mockSearchExamples.forEach(({ name, search }) => {
it(`Validates ${name} as a search object`, () => {
expect(searchValidator(search)).toBe(true);
});
@@ -235,7 +37,7 @@ describe('search_params.js', () => {
});
describe('fromUrlQueryToSearch', () => {
- examples.forEach(({ name, urlQuery, search }) => {
+ mockSearchExamples.forEach(({ name, urlQuery, search }) => {
it(`Converts ${name} to a search object`, () => {
expect(fromUrlQueryToSearch(urlQuery)).toEqual(search);
});
@@ -268,7 +70,7 @@ describe('search_params.js', () => {
});
describe('fromSearchToUrl', () => {
- examples.forEach(({ name, urlQuery, search }) => {
+ mockSearchExamples.forEach(({ name, urlQuery, search }) => {
it(`Converts ${name} to a url`, () => {
expect(fromSearchToUrl(search)).toBe(`http://test.host/${urlQuery}`);
});
@@ -295,7 +97,7 @@ describe('search_params.js', () => {
});
describe('fromSearchToVariables', () => {
- examples.forEach(({ name, graphqlVariables, search }) => {
+ mockSearchExamples.forEach(({ name, graphqlVariables, search }) => {
it(`Converts ${name} to a GraphQL query variables object`, () => {
expect(fromSearchToVariables(search)).toEqual(graphqlVariables);
});
@@ -335,7 +137,7 @@ describe('search_params.js', () => {
});
describe('isSearchFiltered', () => {
- examples.forEach(({ name, search, isDefault }) => {
+ mockSearchExamples.forEach(({ name, search, isDefault }) => {
it(`Given ${name}, evaluates to ${isDefault ? 'not ' : ''}filtered`, () => {
expect(isSearchFiltered(search)).toBe(!isDefault);
});
diff --git a/spec/frontend/security_configuration/components/app_spec.js b/spec/frontend/security_configuration/components/app_spec.js
index de91e51924d..222cabc6a63 100644
--- a/spec/frontend/security_configuration/components/app_spec.js
+++ b/spec/frontend/security_configuration/components/app_spec.js
@@ -1,11 +1,10 @@
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { GlTab, GlTabs, GlLink } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
import stubChildren from 'helpers/stub_children';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import SecurityConfigurationApp, { i18n } from '~/security_configuration/components/app.vue';
import AutoDevopsAlert from '~/security_configuration/components/auto_dev_ops_alert.vue';
import AutoDevopsEnabledAlert from '~/security_configuration/components/auto_dev_ops_enabled_alert.vue';
@@ -42,35 +41,57 @@ describe('App component', () => {
let wrapper;
let userCalloutDismissSpy;
- const createComponent = ({ shouldShowCallout = true, ...propsData }) => {
+ const securityFeaturesMock = [
+ {
+ name: SAST_NAME,
+ shortName: SAST_SHORT_NAME,
+ description: SAST_DESCRIPTION,
+ helpPath: SAST_HELP_PATH,
+ configurationHelpPath: SAST_CONFIG_HELP_PATH,
+ type: REPORT_TYPE_SAST,
+ available: true,
+ },
+ ];
+
+ const complianceFeaturesMock = [
+ {
+ name: LICENSE_COMPLIANCE_NAME,
+ description: LICENSE_COMPLIANCE_DESCRIPTION,
+ helpPath: LICENSE_COMPLIANCE_HELP_PATH,
+ type: REPORT_TYPE_LICENSE_COMPLIANCE,
+ configurationHelpPath: LICENSE_COMPLIANCE_HELP_PATH,
+ },
+ ];
+
+ const createComponent = ({ shouldShowCallout = true, ...propsData } = {}) => {
userCalloutDismissSpy = jest.fn();
- wrapper = extendedWrapper(
- mount(SecurityConfigurationApp, {
- propsData: {
- securityTrainingEnabled: true,
- ...propsData,
- },
- provide: {
- upgradePath,
- autoDevopsHelpPagePath,
- autoDevopsPath,
- projectFullPath,
- vulnerabilityTrainingDocsPath,
- },
- stubs: {
- ...stubChildren(SecurityConfigurationApp),
- GlLink: false,
- GlSprintf: false,
- LocalStorageSync: false,
- SectionLayout: false,
- UserCalloutDismisser: makeMockUserCalloutDismisser({
- dismiss: userCalloutDismissSpy,
- shouldShowCallout,
- }),
- },
- }),
- );
+ wrapper = mountExtended(SecurityConfigurationApp, {
+ propsData: {
+ augmentedSecurityFeatures: securityFeaturesMock,
+ augmentedComplianceFeatures: complianceFeaturesMock,
+ securityTrainingEnabled: true,
+ ...propsData,
+ },
+ provide: {
+ upgradePath,
+ autoDevopsHelpPagePath,
+ autoDevopsPath,
+ projectFullPath,
+ vulnerabilityTrainingDocsPath,
+ },
+ stubs: {
+ ...stubChildren(SecurityConfigurationApp),
+ GlLink: false,
+ GlSprintf: false,
+ LocalStorageSync: false,
+ SectionLayout: false,
+ UserCalloutDismisser: makeMockUserCalloutDismisser({
+ dismiss: userCalloutDismissSpy,
+ shouldShowCallout,
+ }),
+ },
+ });
};
const findMainHeading = () => wrapper.find('h1');
@@ -108,38 +129,13 @@ describe('App component', () => {
const findAutoDevopsEnabledAlert = () => wrapper.findComponent(AutoDevopsEnabledAlert);
const findVulnerabilityManagementTab = () => wrapper.findByTestId('vulnerability-management-tab');
- const securityFeaturesMock = [
- {
- name: SAST_NAME,
- shortName: SAST_SHORT_NAME,
- description: SAST_DESCRIPTION,
- helpPath: SAST_HELP_PATH,
- configurationHelpPath: SAST_CONFIG_HELP_PATH,
- type: REPORT_TYPE_SAST,
- available: true,
- },
- ];
-
- const complianceFeaturesMock = [
- {
- name: LICENSE_COMPLIANCE_NAME,
- description: LICENSE_COMPLIANCE_DESCRIPTION,
- helpPath: LICENSE_COMPLIANCE_HELP_PATH,
- type: REPORT_TYPE_LICENSE_COMPLIANCE,
- configurationHelpPath: LICENSE_COMPLIANCE_HELP_PATH,
- },
- ];
-
afterEach(() => {
wrapper.destroy();
});
describe('basic structure', () => {
- beforeEach(async () => {
- createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
- });
+ beforeEach(() => {
+ createComponent();
});
it('renders main-heading with correct text', () => {
@@ -199,10 +195,7 @@ describe('App component', () => {
describe('Manage via MR Error Alert', () => {
beforeEach(() => {
- createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
- });
+ createComponent();
});
describe('on initial load', () => {
@@ -238,8 +231,6 @@ describe('App component', () => {
describe('given the right props', () => {
beforeEach(() => {
createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
autoDevopsEnabled: false,
gitlabCiPresent: false,
canEnableAutoDevops: true,
@@ -261,10 +252,7 @@ describe('App component', () => {
describe('given the wrong props', () => {
beforeEach(() => {
- createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
- });
+ createComponent();
});
it('should not show AutoDevopsAlert', () => {
expect(findAutoDevopsAlert().exists()).toBe(false);
@@ -289,8 +277,6 @@ describe('App component', () => {
}
createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
autoDevopsEnabled,
});
});
@@ -348,7 +334,6 @@ describe('App component', () => {
describe('given at least one unavailable feature', () => {
beforeEach(() => {
createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock.map(makeAvailable(false)),
});
});
@@ -369,7 +354,6 @@ describe('App component', () => {
describe('given at least one unavailable feature, but banner is already dismissed', () => {
beforeEach(() => {
createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock.map(makeAvailable(false)),
shouldShowCallout: false,
});
@@ -397,8 +381,6 @@ describe('App component', () => {
describe('when given latestPipelinePath props', () => {
beforeEach(() => {
createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
latestPipelinePath: 'test/path',
});
});
@@ -425,8 +407,6 @@ describe('App component', () => {
describe('given gitlabCiPresent & gitlabCiHistoryPath props', () => {
beforeEach(() => {
createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
gitlabCiPresent: true,
gitlabCiHistoryPath,
});
@@ -442,42 +422,31 @@ describe('App component', () => {
});
describe('Vulnerability management', () => {
- it('does not show tab if security training is disabled', () => {
+ const props = { securityTrainingEnabled: true };
+
+ beforeEach(async () => {
createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
- securityTrainingEnabled: false,
+ ...props,
});
-
- expect(findVulnerabilityManagementTab().exists()).toBe(false);
});
- describe('security training enabled', () => {
- beforeEach(async () => {
- createComponent({
- augmentedSecurityFeatures: securityFeaturesMock,
- augmentedComplianceFeatures: complianceFeaturesMock,
- });
- });
-
- it('shows the tab if security training is enabled', () => {
- expect(findVulnerabilityManagementTab().exists()).toBe(true);
- });
+ it('shows the tab', () => {
+ expect(findVulnerabilityManagementTab().exists()).toBe(true);
+ });
- it('renders TrainingProviderList component', () => {
- expect(findTrainingProviderList().exists()).toBe(true);
- });
+ it('renders TrainingProviderList component', () => {
+ expect(findTrainingProviderList().props()).toMatchObject(props);
+ });
- it('renders security training description', () => {
- expect(findVulnerabilityManagementTab().text()).toContain(i18n.securityTrainingDescription);
- });
+ it('renders security training description', () => {
+ expect(findVulnerabilityManagementTab().text()).toContain(i18n.securityTrainingDescription);
+ });
- it('renders link to help docs', () => {
- const trainingLink = findVulnerabilityManagementTab().findComponent(GlLink);
+ it('renders link to help docs', () => {
+ const trainingLink = findVulnerabilityManagementTab().findComponent(GlLink);
- expect(trainingLink.text()).toBe('Learn more about vulnerability training');
- expect(trainingLink.attributes('href')).toBe(vulnerabilityTrainingDocsPath);
- });
+ expect(trainingLink.text()).toBe('Learn more about vulnerability training');
+ expect(trainingLink.attributes('href')).toBe(vulnerabilityTrainingDocsPath);
});
});
});
diff --git a/spec/frontend/security_configuration/components/training_provider_list_spec.js b/spec/frontend/security_configuration/components/training_provider_list_spec.js
index 309a9cd4cd6..184c16fda6e 100644
--- a/spec/frontend/security_configuration/components/training_provider_list_spec.js
+++ b/spec/frontend/security_configuration/components/training_provider_list_spec.js
@@ -36,7 +36,6 @@ import {
testProjectPath,
testProviderIds,
testProviderName,
- tempProviderLogos,
} from '../mock_data';
Vue.use(VueApollo);
@@ -54,6 +53,31 @@ const TEST_TRAINING_PROVIDERS_ALL_ENABLED = getSecurityTrainingProvidersData({
});
const TEST_TRAINING_PROVIDERS_DEFAULT = TEST_TRAINING_PROVIDERS_ALL_DISABLED;
+const TEMP_PROVIDER_LOGOS = {
+ Kontra: {
+ svg: '<svg>Kontra</svg>',
+ },
+ 'Secure Code Warrior': {
+ svg: '<svg>Secure Code Warrior</svg>',
+ },
+};
+jest.mock('~/security_configuration/components/constants', () => {
+ return {
+ TEMP_PROVIDER_URLS: jest.requireActual('~/security_configuration/components/constants')
+ .TEMP_PROVIDER_URLS,
+ // NOTE: Jest hoists all mocks to the top so we can't use TEMP_PROVIDER_LOGOS
+ // here directly.
+ TEMP_PROVIDER_LOGOS: {
+ Kontra: {
+ svg: '<svg>Kontra</svg>',
+ },
+ 'Secure Code Warrior': {
+ svg: '<svg>Secure Code Warrior</svg>',
+ },
+ },
+ };
+});
+
describe('TrainingProviderList component', () => {
let wrapper;
let apolloProvider;
@@ -76,7 +100,7 @@ describe('TrainingProviderList component', () => {
apolloProvider = createMockApollo(mergedHandlers);
};
- const createComponent = () => {
+ const createComponent = (props = {}) => {
wrapper = shallowMountExtended(TrainingProviderList, {
provide: {
projectFullPath: testProjectPath,
@@ -84,6 +108,10 @@ describe('TrainingProviderList component', () => {
directives: {
GlTooltip: createMockDirective(),
},
+ propsData: {
+ securityTrainingEnabled: true,
+ ...props,
+ },
apolloProvider,
});
};
@@ -99,6 +127,7 @@ describe('TrainingProviderList component', () => {
const findLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findErrorAlert = () => wrapper.findComponent(GlAlert);
const findLogos = () => wrapper.findAllByTestId('provider-logo');
+ const findUnavailableTexts = () => wrapper.findAllByTestId('unavailable-text');
const toggleFirstProvider = () => findFirstToggle().vm.$emit('change', testProviderIds[0]);
@@ -212,7 +241,6 @@ describe('TrainingProviderList component', () => {
describe('provider logo', () => {
beforeEach(async () => {
- wrapper.vm.$options.TEMP_PROVIDER_LOGOS = tempProviderLogos;
await waitForQueryToBeLoaded();
});
@@ -226,9 +254,9 @@ describe('TrainingProviderList component', () => {
expect(findLogos().at(provider).attributes('role')).toBe('presentation');
});
- it.each(providerIndexArray)('renders the svg content for provider %s', (provider) => {
+ it.each(providerIndexArray)('renders the svg content for provider %s', async (provider) => {
expect(findLogos().at(provider).html()).toContain(
- tempProviderLogos[testProviderName[provider]].svg,
+ TEMP_PROVIDER_LOGOS[testProviderName[provider]].svg,
);
});
});
@@ -351,6 +379,41 @@ describe('TrainingProviderList component', () => {
);
});
});
+
+ describe('non ultimate users', () => {
+ beforeEach(async () => {
+ createComponent({
+ securityTrainingEnabled: false,
+ });
+ await waitForQueryToBeLoaded();
+ });
+
+ it('displays unavailable text', () => {
+ findUnavailableTexts().wrappers.forEach((unavailableText) => {
+ expect(unavailableText.text()).toBe(TrainingProviderList.i18n.unavailableText);
+ });
+ });
+
+ it('has disabled state for toggle', () => {
+ findToggles().wrappers.forEach((toggle) => {
+ expect(toggle.props('disabled')).toBe(true);
+ });
+ });
+
+ it('has disabled state for radio', () => {
+ findPrimaryProviderRadios().wrappers.forEach((radio) => {
+ expect(radio.attributes('disabled')).toBeTruthy();
+ });
+ });
+
+ it('adds backgrounds color', () => {
+ findCards().wrappers.forEach((card) => {
+ expect(card.props('bodyClass')).toMatchObject({
+ 'gl-bg-gray-10': true,
+ });
+ });
+ });
+ });
});
describe('primary provider settings', () => {
@@ -442,7 +505,7 @@ describe('TrainingProviderList component', () => {
${'backend error'} | ${jest.fn().mockReturnValue(dismissUserCalloutErrorResponse)}
${'network error'} | ${jest.fn().mockRejectedValue()}
`('when dismissing the callout and a "$errorType" happens', ({ mutationHandler }) => {
- beforeEach(async () => {
+ it('logs the error to sentry', async () => {
jest.spyOn(Sentry, 'captureException').mockImplementation();
createApolloProvider({
@@ -460,9 +523,7 @@ describe('TrainingProviderList component', () => {
await waitForQueryToBeLoaded();
toggleFirstProvider();
- });
- it('logs the error to sentry', async () => {
expect(Sentry.captureException).not.toHaveBeenCalled();
await waitForMutationToBeLoaded();
diff --git a/spec/frontend/security_configuration/mock_data.js b/spec/frontend/security_configuration/mock_data.js
index 18a480bf082..2fe3b59cea3 100644
--- a/spec/frontend/security_configuration/mock_data.js
+++ b/spec/frontend/security_configuration/mock_data.js
@@ -100,14 +100,3 @@ export const updateSecurityTrainingProvidersErrorResponse = {
},
},
};
-
-// Will remove once this issue is resolved where the svg path will be available in the GraphQL query
-// https://gitlab.com/gitlab-org/gitlab/-/issues/346899
-export const tempProviderLogos = {
- [testProviderName[0]]: {
- svg: `<svg>${[testProviderName[0]]}</svg>`,
- },
- [testProviderName[1]]: {
- svg: `<svg>${[testProviderName[1]]}</svg>`,
- },
-};
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
index 62a9ff98243..11841106ed0 100644
--- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
@@ -8,7 +8,7 @@ exports[`self monitor component When the self monitor project has not been creat
class="settings-header"
>
<h4
- class="js-section-header"
+ class="js-section-header settings-title js-settings-toggle js-settings-toggle-trigger-only"
>
Self monitoring
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index 0b672cbc93e..e3b5478290a 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -1,10 +1,11 @@
import { GlModal, GlFormCheckbox } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { initEmojiMock, clearEmojiMock } from 'helpers/emoji';
import * as UserApi from '~/api/user_api';
import EmojiPicker from '~/emoji/components/picker.vue';
import createFlash from '~/flash';
+import stubChildren from 'helpers/stub_children';
import SetStatusModalWrapper, {
AVAILABILITY_STATUS,
} from '~/set_status_modal/set_status_modal_wrapper.vue';
@@ -26,12 +27,23 @@ describe('SetStatusModalWrapper', () => {
defaultEmoji,
};
+ const EmojiPickerStub = {
+ props: EmojiPicker.props,
+ template: '<div></div>',
+ };
+
const createComponent = (props = {}) => {
- return shallowMount(SetStatusModalWrapper, {
+ return mount(SetStatusModalWrapper, {
propsData: {
...defaultProps,
...props,
},
+ stubs: {
+ ...stubChildren(SetStatusModalWrapper),
+ GlFormInput: false,
+ GlFormInputGroup: false,
+ EmojiPicker: EmojiPickerStub,
+ },
mocks: {
$toast,
},
@@ -43,7 +55,7 @@ describe('SetStatusModalWrapper', () => {
const findClearStatusButton = () => wrapper.find('.js-clear-user-status-button');
const findAvailabilityCheckbox = () => wrapper.find(GlFormCheckbox);
const findClearStatusAtMessage = () => wrapper.find('[data-testid="clear-status-at-message"]');
- const getEmojiPicker = () => wrapper.findComponent(EmojiPicker);
+ const getEmojiPicker = () => wrapper.findComponent(EmojiPickerStub);
const initModal = async ({ mockOnUpdateSuccess = true, mockOnUpdateFailure = true } = {}) => {
const modal = findModal();
@@ -88,7 +100,7 @@ describe('SetStatusModalWrapper', () => {
});
it('has a clear status button', () => {
- expect(findClearStatusButton().isVisible()).toBe(true);
+ expect(findClearStatusButton().exists()).toBe(true);
});
it('displays the clear status at dropdown', () => {
@@ -125,7 +137,7 @@ describe('SetStatusModalWrapper', () => {
});
it('hides the clear status button', () => {
- expect(findClearStatusButton().isVisible()).toBe(false);
+ expect(findClearStatusButton().exists()).toBe(false);
});
});
diff --git a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
index a286eeef14f..517b4f12559 100644
--- a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
@@ -120,6 +120,7 @@ describe('AssigneeAvatarLink component', () => {
it('passes the correct user id for REST API', () => {
createComponent({
tooltipHasName: true,
+ issuableType: 'issue',
user: userDataMock(),
});
@@ -131,9 +132,22 @@ describe('AssigneeAvatarLink component', () => {
createComponent({
tooltipHasName: true,
+ issuableType: 'issue',
user: { ...userDataMock(), id: convertToGraphQLId(TYPE_USER, userId) },
});
expect(findUserLink().attributes('data-user-id')).toBe(String(userId));
});
+
+ it.each`
+ issuableType | userId
+ ${'merge_request'} | ${undefined}
+ ${'issue'} | ${'1'}
+ `('it sets data-user-id as $userId for $issuableType', ({ issuableType, userId }) => {
+ createComponent({
+ issuableType,
+ });
+
+ expect(findUserLink().attributes('data-user-id')).toBe(userId);
+ });
});
diff --git a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
index 3ddd41c0bd4..8ebd2dabfc2 100644
--- a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
+++ b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
@@ -107,6 +107,7 @@ describe('SidebarDropdownWidget', () => {
currentMilestoneSpy = jest.fn().mockResolvedValue(noCurrentMilestoneResponse),
} = {}) => {
Vue.use(VueApollo);
+
mockApollo = createMockApollo([
[projectMilestonesQuery, projectMilestonesSpy],
[projectIssueMilestoneQuery, currentMilestoneSpy],
@@ -415,11 +416,9 @@ describe('SidebarDropdownWidget', () => {
describe('when currentAttribute is not equal to attribute id', () => {
describe('when update is successful', () => {
- beforeEach(() => {
+ it('calls setIssueAttribute mutation', () => {
findDropdownItemWithText(mockMilestone2.title).vm.$emit('click');
- });
- it('calls setIssueAttribute mutation', () => {
expect(milestoneMutationSpy).toHaveBeenCalledWith({
iid: mockIssue.iid,
attributeId: getIdFromGraphQLId(mockMilestone2.id),
@@ -428,6 +427,8 @@ describe('SidebarDropdownWidget', () => {
});
it('sets the value returned from the mutation to currentAttribute', async () => {
+ findDropdownItemWithText(mockMilestone2.title).vm.$emit('click');
+ await nextTick();
expect(findSelectedAttribute().text()).toBe(mockMilestone2.title);
});
});
diff --git a/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
index 549ab99c6af..9a68940590d 100644
--- a/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
+++ b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
@@ -8,15 +8,22 @@ import createFlash from '~/flash';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
import SidebarSubscriptionWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
import issueSubscribedQuery from '~/sidebar/queries/issue_subscribed.query.graphql';
-import { issueSubscriptionsResponse } from '../../mock_data';
+import updateMergeRequestSubscriptionMutation from '~/sidebar/queries/update_merge_request_subscription.mutation.graphql';
+import toast from '~/vue_shared/plugins/global_toast';
+import {
+ issueSubscriptionsResponse,
+ mergeRequestSubscriptionMutationResponse,
+} from '../../mock_data';
jest.mock('~/flash');
+jest.mock('~/vue_shared/plugins/global_toast');
Vue.use(VueApollo);
describe('Sidebar Subscriptions Widget', () => {
let wrapper;
let fakeApollo;
+ let subscriptionMutationHandler;
const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
const findToggle = () => wrapper.findComponent(GlToggle);
@@ -24,18 +31,29 @@ describe('Sidebar Subscriptions Widget', () => {
const createComponent = ({
subscriptionsQueryHandler = jest.fn().mockResolvedValue(issueSubscriptionsResponse()),
+ issuableType = 'issue',
+ movedMrSidebar = false,
} = {}) => {
- fakeApollo = createMockApollo([[issueSubscribedQuery, subscriptionsQueryHandler]]);
+ subscriptionMutationHandler = jest
+ .fn()
+ .mockResolvedValue(mergeRequestSubscriptionMutationResponse);
+ fakeApollo = createMockApollo([
+ [issueSubscribedQuery, subscriptionsQueryHandler],
+ [updateMergeRequestSubscriptionMutation, subscriptionMutationHandler],
+ ]);
wrapper = shallowMount(SidebarSubscriptionWidget, {
apolloProvider: fakeApollo,
provide: {
canUpdate: true,
+ glFeatures: {
+ movedMrSidebar,
+ },
},
propsData: {
fullPath: 'group/project',
iid: '1',
- issuableType: 'issue',
+ issuableType,
},
stubs: {
SidebarEditableItem,
@@ -128,4 +146,21 @@ describe('Sidebar Subscriptions Widget', () => {
expect(createFlash).toHaveBeenCalled();
});
+
+ describe('merge request', () => {
+ it('displays toast when mutation is successful', async () => {
+ createComponent({
+ issuableType: 'merge_request',
+ movedMrSidebar: true,
+ subscriptionsQueryHandler: jest.fn().mockResolvedValue(issueSubscriptionsResponse(true)),
+ });
+ await waitForPromises();
+
+ await wrapper.find('.dropdown-item').trigger('click');
+
+ await waitForPromises();
+
+ expect(toast).toHaveBeenCalledWith('Notifications turned on.');
+ });
+ });
});
diff --git a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
index 8478d3d674d..bb757fdf63b 100644
--- a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
+++ b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
@@ -1,17 +1,23 @@
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import Vuex from 'vuex';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { createStore as createMrStore } from '~/mr_notes/stores';
import createStore from '~/notes/stores';
import EditForm from '~/sidebar/components/lock/edit_form.vue';
import IssuableLockForm from '~/sidebar/components/lock/issuable_lock_form.vue';
+import toast from '~/vue_shared/plugins/global_toast';
import { ISSUABLE_TYPE_ISSUE, ISSUABLE_TYPE_MR } from './constants';
+jest.mock('~/vue_shared/plugins/global_toast');
+
+Vue.use(Vuex);
+
describe('IssuableLockForm', () => {
let wrapper;
let store;
let issuableType; // Either ISSUABLE_TYPE_ISSUE or ISSUABLE_TYPE_MR
+ let updateLockedAttribute;
const setIssuableType = (pageType) => {
issuableType = pageType;
@@ -29,16 +35,27 @@ describe('IssuableLockForm', () => {
store = createStore();
store.getters.getNoteableData.targetType = 'issue';
} else {
- store = createMrStore();
+ updateLockedAttribute = jest.fn().mockResolvedValue();
+ store = new Vuex.Store({
+ getters: {
+ getNoteableData: () => ({ targetType: issuableType }),
+ },
+ actions: {
+ updateLockedAttribute,
+ },
+ });
}
store.getters.getNoteableData.discussion_locked = isLocked;
};
- const createComponent = ({ props = {} }) => {
+ const createComponent = ({ props = {} }, movedMrSidebar = false) => {
wrapper = shallowMount(IssuableLockForm, {
store,
provide: {
fullPath: '',
+ glFeatures: {
+ movedMrSidebar,
+ },
},
propsData: {
isEditable: true,
@@ -144,4 +161,24 @@ describe('IssuableLockForm', () => {
});
});
});
+
+ describe('merge requests', () => {
+ beforeEach(() => {
+ setIssuableType('merge_request');
+ });
+
+ it.each`
+ locked | message
+ ${true} | ${'Merge request locked.'}
+ ${false} | ${'Merge request unlocked.'}
+ `('displays $message when merge request is $locked', async ({ locked, message }) => {
+ initStore(locked);
+
+ createComponent({}, true);
+
+ await wrapper.find('.dropdown-item').trigger('click');
+
+ expect(toast).toHaveBeenCalledWith(message);
+ });
+ });
});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 2b421037339..229757ff40c 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -321,6 +321,19 @@ export const issueSubscriptionsResponse = (subscribed = false, emailsDisabled =
},
});
+export const mergeRequestSubscriptionMutationResponse = {
+ data: {
+ updateIssuableSubscription: {
+ issuable: {
+ __typename: 'MergeRequest',
+ id: 'gid://gitlab/MergeRequest/4',
+ subscribed: true,
+ },
+ errors: [],
+ },
+ },
+};
+
export const issuableQueryResponse = {
data: {
workspace: {
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index 8a767765149..f49ceb2fede 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -5,6 +5,7 @@ import { merge } from 'lodash';
import VueApollo, { ApolloMutation } from 'vue-apollo';
import { useFakeDate } from 'helpers/fake_date';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import GetSnippetQuery from 'shared_queries/snippet/snippet.query.graphql';
@@ -96,6 +97,8 @@ describe('Snippet Edit app', () => {
const originalRelativeUrlRoot = gon.relative_url_root;
beforeEach(() => {
+ stubPerformanceWebAPI();
+
getSpy = jest.fn().mockResolvedValue(createQueryResponse());
// See `mutateSpy` declaration comment for why we send a key
diff --git a/spec/frontend/snippets/components/show_spec.js b/spec/frontend/snippets/components/show_spec.js
index c73bf8f80a2..b29ed97099f 100644
--- a/spec/frontend/snippets/components/show_spec.js
+++ b/spec/frontend/snippets/components/show_spec.js
@@ -12,6 +12,7 @@ import {
SNIPPET_VISIBILITY_PUBLIC,
} from '~/snippets/constants';
import CloneDropdownButton from '~/vue_shared/components/clone_dropdown.vue';
+import { stubPerformanceWebAPI } from 'helpers/performance';
describe('Snippet view app', () => {
let wrapper;
@@ -45,6 +46,10 @@ describe('Snippet view app', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findEmbedDropdown = () => wrapper.findComponent(EmbedDropdown);
+ beforeEach(() => {
+ stubPerformanceWebAPI();
+ });
+
afterEach(() => {
wrapper.destroy();
});
diff --git a/spec/frontend/surveys/merge_request_performance/app_spec.js b/spec/frontend/surveys/merge_request_performance/app_spec.js
new file mode 100644
index 00000000000..6e8cc660b1d
--- /dev/null
+++ b/spec/frontend/surveys/merge_request_performance/app_spec.js
@@ -0,0 +1,143 @@
+import { nextTick } from 'vue';
+import { GlButton, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
+import MergeRequestExperienceSurveyApp from '~/surveys/merge_request_experience/app.vue';
+import SatisfactionRate from '~/surveys/components/satisfaction_rate.vue';
+
+describe('MergeRequestExperienceSurveyApp', () => {
+ let trackingSpy;
+ let wrapper;
+ let dismiss;
+ let dismisserComponent;
+
+ const findCloseButton = () =>
+ wrapper
+ .findAllComponents(GlButton)
+ .filter((button) => button.attributes('aria-label') === 'Close')
+ .at(0);
+
+ const createWrapper = ({ shouldShowCallout = true } = {}) => {
+ dismiss = jest.fn();
+ dismisserComponent = makeMockUserCalloutDismisser({
+ dismiss,
+ shouldShowCallout,
+ });
+ wrapper = shallowMountExtended(MergeRequestExperienceSurveyApp, {
+ stubs: {
+ UserCalloutDismisser: dismisserComponent,
+ GlSprintf,
+ },
+ });
+ };
+
+ describe('when user callout is visible', () => {
+ beforeEach(() => {
+ createWrapper();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ it('shows survey', async () => {
+ expect(wrapper.html()).toContain('Overall, how satisfied are you with merge requests?');
+ expect(wrapper.findComponent(SatisfactionRate).exists()).toBe(true);
+ expect(wrapper.emitted().close).toBe(undefined);
+ });
+
+ it('triggers user callout on close', async () => {
+ findCloseButton().vm.$emit('click');
+ expect(dismiss).toHaveBeenCalledTimes(1);
+ });
+
+ it('emits close event on close button click', async () => {
+ findCloseButton().vm.$emit('click');
+ expect(wrapper.emitted()).toMatchObject({ close: [[]] });
+ });
+
+ it('applies correct feature name for user callout', () => {
+ expect(wrapper.findComponent(dismisserComponent).props('featureName')).toBe(
+ 'mr_experience_survey',
+ );
+ });
+
+ it('dismisses user callout on survey rate', async () => {
+ const rate = wrapper.findComponent(SatisfactionRate);
+ expect(dismiss).not.toHaveBeenCalled();
+ rate.vm.$emit('rate', 5);
+ expect(dismiss).toHaveBeenCalledTimes(1);
+ });
+
+ it('steps through survey steps', async () => {
+ const rate = wrapper.findComponent(SatisfactionRate);
+ rate.vm.$emit('rate', 5);
+ await nextTick();
+ expect(wrapper.text()).toContain(
+ 'How satisfied are you with speed/performance of merge requests?',
+ );
+ });
+
+ it('tracks survey rates', async () => {
+ const rate = wrapper.findComponent(SatisfactionRate);
+ rate.vm.$emit('rate', 5);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'survey:mr_experience', {
+ value: 5,
+ label: 'overall',
+ });
+ rate.vm.$emit('rate', 4);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'survey:mr_experience', {
+ value: 4,
+ label: 'performance',
+ });
+ });
+
+ it('shows legal note', async () => {
+ expect(wrapper.text()).toContain(
+ 'By continuing, you acknowledge that responses will be used to improve GitLab and in accordance with the GitLab Privacy Policy.',
+ );
+ });
+
+ it('hides legal note after first step', async () => {
+ const rate = wrapper.findComponent(SatisfactionRate);
+ rate.vm.$emit('rate', 5);
+ await nextTick();
+ expect(wrapper.text()).not.toContain(
+ 'By continuing, you acknowledge that responses will be used to improve GitLab and in accordance with the GitLab Privacy Policy.',
+ );
+ });
+
+ it('shows disappearing thanks message', async () => {
+ const rate = wrapper.findComponent(SatisfactionRate);
+ rate.vm.$emit('rate', 5);
+ await nextTick();
+ rate.vm.$emit('rate', 5);
+ await nextTick();
+ expect(wrapper.text()).toContain('Thank you for your feedback!');
+ expect(wrapper.emitted()).toMatchObject({});
+ jest.runOnlyPendingTimers();
+ expect(wrapper.emitted()).toMatchObject({ close: [[]] });
+ });
+ });
+
+ describe('when user callout is hidden', () => {
+ beforeEach(() => {
+ createWrapper({ shouldShowCallout: false });
+ });
+
+ it('emits close event', async () => {
+ expect(wrapper.emitted()).toMatchObject({ close: [[]] });
+ });
+ });
+
+ describe('when Escape key is pressed', () => {
+ beforeEach(() => {
+ createWrapper();
+ const event = new KeyboardEvent('keyup', { key: 'Escape' });
+ document.dispatchEvent(event);
+ });
+
+ it('emits close event', async () => {
+ expect(wrapper.emitted()).toMatchObject({ close: [[]] });
+ expect(dismiss).toHaveBeenCalledTimes(1);
+ });
+ });
+});
diff --git a/spec/frontend/tabs/index_spec.js b/spec/frontend/tabs/index_spec.js
index 67e3d707adb..1d61d38a488 100644
--- a/spec/frontend/tabs/index_spec.js
+++ b/spec/frontend/tabs/index_spec.js
@@ -1,9 +1,16 @@
-import { GlTabsBehavior, TAB_SHOWN_EVENT } from '~/tabs';
+import { GlTabsBehavior, TAB_SHOWN_EVENT, HISTORY_TYPE_HASH } from '~/tabs';
import { ACTIVE_PANEL_CLASS, ACTIVE_TAB_CLASSES } from '~/tabs/constants';
+import { getLocationHash } from '~/lib/utils/url_utility';
+import { NO_SCROLL_TO_HASH_CLASS } from '~/lib/utils/common_utils';
import { getFixture, setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import setWindowLocation from 'helpers/set_window_location_helper';
const tabsFixture = getFixture('tabs/tabs.html');
+global.CSS = {
+ escape: (val) => val,
+};
+
describe('GlTabsBehavior', () => {
let glTabs;
let tabShownEventSpy;
@@ -41,6 +48,7 @@ describe('GlTabsBehavior', () => {
});
expect(panel.classList.contains(ACTIVE_PANEL_CLASS)).toBe(true);
+ expect(panel.classList.contains(NO_SCROLL_TO_HASH_CLASS)).toBe(true);
};
const expectInactiveTabAndPanel = (name) => {
@@ -67,6 +75,7 @@ describe('GlTabsBehavior', () => {
});
expect(panel.classList.contains(ACTIVE_PANEL_CLASS)).toBe(false);
+ expect(panel.classList.contains(NO_SCROLL_TO_HASH_CLASS)).toBe(true);
};
const expectGlTabShownEvent = (name) => {
@@ -263,4 +272,98 @@ describe('GlTabsBehavior', () => {
expectInactiveTabAndPanel('foo');
});
});
+
+ describe('using history=hash', () => {
+ const defaultTab = 'foo';
+ let tab;
+ let tabsEl;
+
+ beforeEach(() => {
+ setHTMLFixture(tabsFixture);
+ tabsEl = findByTestId('tabs');
+ });
+
+ afterEach(() => {
+ glTabs.destroy();
+ resetHTMLFixture();
+ });
+
+ describe('when a hash exists onInit', () => {
+ beforeEach(() => {
+ tab = 'bar';
+ setWindowLocation(`http://foo.com/index#${tab}`);
+ glTabs = new GlTabsBehavior(tabsEl, { history: HISTORY_TYPE_HASH });
+ });
+
+ it('sets the active tab to the hash and preserves hash', () => {
+ expectActiveTabAndPanel(tab);
+ expect(getLocationHash()).toBe(tab);
+ });
+ });
+
+ describe('when a hash does not exist onInit', () => {
+ beforeEach(() => {
+ setWindowLocation(`http://foo.com/index`);
+ glTabs = new GlTabsBehavior(tabsEl, { history: HISTORY_TYPE_HASH });
+ });
+
+ it('sets the active tab to the first tab and sets hash', () => {
+ expectActiveTabAndPanel(defaultTab);
+ expect(getLocationHash()).toBe(defaultTab);
+ });
+ });
+
+ describe('clicking on an inactive tab', () => {
+ beforeEach(() => {
+ tab = 'qux';
+ setWindowLocation(`http://foo.com/index`);
+ glTabs = new GlTabsBehavior(tabsEl, { history: HISTORY_TYPE_HASH });
+
+ findTab(tab).click();
+ });
+
+ it('changes the tabs and updates the hash', () => {
+ expectInactiveTabAndPanel(defaultTab);
+ expectActiveTabAndPanel(tab);
+ expect(getLocationHash()).toBe(tab);
+ });
+ });
+
+ describe('keyboard navigation', () => {
+ const secondTab = 'bar';
+
+ beforeEach(() => {
+ setWindowLocation(`http://foo.com/index`);
+ glTabs = new GlTabsBehavior(tabsEl, { history: HISTORY_TYPE_HASH });
+ });
+
+ it.each(['ArrowRight', 'ArrowDown'])(
+ 'pressing %s moves to next tab and updates hash',
+ (code) => {
+ expectActiveTabAndPanel(defaultTab);
+
+ triggerKeyDown(code, glTabs.activeTab);
+
+ expectInactiveTabAndPanel(defaultTab);
+ expectActiveTabAndPanel(secondTab);
+ expect(getLocationHash()).toBe(secondTab);
+ },
+ );
+
+ it.each(['ArrowLeft', 'ArrowUp'])(
+ 'pressing %s moves to previous tab and updates hash',
+ (code) => {
+ // First, make the 2nd tab active
+ findTab(secondTab).click();
+ expectActiveTabAndPanel(secondTab);
+
+ triggerKeyDown(code, glTabs.activeTab);
+
+ expectInactiveTabAndPanel(secondTab);
+ expectActiveTabAndPanel(defaultTab);
+ expect(getLocationHash()).toBe(defaultTab);
+ },
+ );
+ });
+ });
});
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 6c336152e9a..b4626625f31 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -2,8 +2,6 @@
import 'helpers/shared_test_setup';
import { initializeTestTimeout } from 'helpers/timeout';
-jest.mock('~/lib/utils/axios_utils', () => jest.requireActual('helpers/mocks/axios_utils'));
-
initializeTestTimeout(process.env.CI ? 6000 : 500);
afterEach(() =>
diff --git a/spec/frontend/tracking/tracking_spec.js b/spec/frontend/tracking/tracking_spec.js
index 08da3a9a465..4871644d99f 100644
--- a/spec/frontend/tracking/tracking_spec.js
+++ b/spec/frontend/tracking/tracking_spec.js
@@ -412,7 +412,7 @@ describe('Tracking', () => {
Tracking.setAnonymousUrls();
expect(snowplowSpy).not.toHaveBeenCalledWith('setReferrerUrl', testUrl);
- expect(localStorage.getItem(URLS_CACHE_STORAGE_KEY)).not.toContain(oldTimestamp);
+ expect(localStorage.getItem(URLS_CACHE_STORAGE_KEY)).not.toContain(oldTimestamp.toString());
});
});
});
diff --git a/spec/frontend/user_lists/store/edit/mutations_spec.js b/spec/frontend/user_lists/store/edit/mutations_spec.js
index 7971906429b..e07d9c0a1b5 100644
--- a/spec/frontend/user_lists/store/edit/mutations_spec.js
+++ b/spec/frontend/user_lists/store/edit/mutations_spec.js
@@ -35,7 +35,7 @@ describe('User List Edit Mutations', () => {
});
});
- describe(types.RECIEVE_USER_LIST_ERROR, () => {
+ describe(types.RECEIVE_USER_LIST_ERROR, () => {
beforeEach(() => {
mutations[types.RECEIVE_USER_LIST_ERROR](state, ['network error']);
});
@@ -44,7 +44,7 @@ describe('User List Edit Mutations', () => {
expect(state.status).toBe(statuses.ERROR);
});
- it('sets the error message to the recieved one', () => {
+ it('sets the error message to the received one', () => {
expect(state.errorMessage).toEqual(['network error']);
});
});
diff --git a/spec/frontend/user_lists/store/new/mutations_spec.js b/spec/frontend/user_lists/store/new/mutations_spec.js
index a928849e941..647ddd9c062 100644
--- a/spec/frontend/user_lists/store/new/mutations_spec.js
+++ b/spec/frontend/user_lists/store/new/mutations_spec.js
@@ -9,7 +9,7 @@ describe('User List Edit Mutations', () => {
state = createState({ projectId: '1' });
});
- describe(types.RECIEVE_USER_LIST_ERROR, () => {
+ describe(types.RECEIVE_USER_LIST_ERROR, () => {
beforeEach(() => {
mutations[types.RECEIVE_CREATE_USER_LIST_ERROR](state, ['network error']);
});
diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js
index 1544fed5240..b171c8fc9ed 100644
--- a/spec/frontend/user_popovers_spec.js
+++ b/spec/frontend/user_popovers_spec.js
@@ -12,12 +12,8 @@ jest.mock('~/api/user_api', () => ({
describe('User Popovers', () => {
const fixtureTemplate = 'merge_requests/merge_request_with_mentions.html';
- const selector = '.js-user-link, .gfm-project_member';
- const findFixtureLinks = () => {
- return Array.from(document.querySelectorAll(selector)).filter(
- ({ dataset }) => dataset.user || dataset.userId,
- );
- };
+ const selector = '.js-user-link[data-user], .js-user-link[data-user-id]';
+ const findFixtureLinks = () => Array.from(document.querySelectorAll(selector));
const createUserLink = () => {
const link = document.createElement('a');
@@ -95,6 +91,24 @@ describe('User Popovers', () => {
});
});
+ it('does not initialize the popovers for group references', async () => {
+ const [groupLink] = Array.from(document.querySelectorAll('.js-user-link[data-group]'));
+
+ triggerEvent('mouseover', groupLink);
+ jest.runOnlyPendingTimers();
+
+ expect(findPopovers().length).toBe(0);
+ });
+
+ it('does not initialize the popovers for @all references', async () => {
+ const [projectLink] = Array.from(document.querySelectorAll('.js-user-link[data-project]'));
+
+ triggerEvent('mouseover', projectLink);
+ jest.runOnlyPendingTimers();
+
+ expect(findPopovers().length).toBe(0);
+ });
+
it('does not initialize the user popovers twice for the same element', async () => {
const [firstUserLink] = findFixtureLinks();
triggerEvent('mouseover', firstUserLink);
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
index 6386746aee4..6db82cedd80 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
@@ -2,6 +2,9 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import WidgetRebase from '~/vue_merge_request_widget/components/states/mr_widget_rebase.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
+import toast from '~/vue_shared/plugins/global_toast';
+
+jest.mock('~/vue_shared/plugins/global_toast');
let wrapper;
@@ -261,6 +264,7 @@ describe('Merge request widget rebase component', () => {
return Promise.resolve({
data: {
rebase_in_progress: false,
+ should_be_rebased: false,
merge_error: null,
},
});
@@ -280,6 +284,7 @@ describe('Merge request widget rebase component', () => {
await nextTick();
expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetRebaseSuccess');
+ expect(toast).toHaveBeenCalledWith('Rebase completed');
});
});
});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js
index 8c5036e35f6..7e7438bcc0f 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js
@@ -60,7 +60,7 @@ describe('Deployment action button', () => {
it('renders slot and icon prop correctly', () => {
expect(wrapper.find(GlIcon).exists()).toBe(true);
- expect(wrapper.text()).toContain(actionButtonMocks[DEPLOYING]);
+ expect(wrapper.text()).toContain(actionButtonMocks[DEPLOYING].toString());
});
});
diff --git a/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js b/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js
index da4b990c078..5c1d3c8e8e8 100644
--- a/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js
+++ b/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js
@@ -1,4 +1,4 @@
-import { GlButton } from '@gitlab/ui';
+import { nextTick } from 'vue';
import MockAdapter from 'axios-mock-adapter';
import testReportExtension from '~/vue_merge_request_widget/extensions/test_report';
import { i18n } from '~/vue_merge_request_widget/extensions/test_report/constants';
@@ -38,7 +38,8 @@ describe('Test report extension', () => {
};
const findToggleCollapsedButton = () => wrapper.findByTestId('toggle-button');
- const findTertiaryButton = () => wrapper.find(GlButton);
+ const findFullReportLink = () => wrapper.findByTestId('full-report-link');
+ const findCopyFailedSpecsBtn = () => wrapper.findByTestId('copy-failed-specs-btn');
const findAllExtensionListItems = () => wrapper.findAllByTestId('extension-list-item');
const findModal = () => wrapper.find(TestCaseDetails);
@@ -72,14 +73,23 @@ describe('Test report extension', () => {
});
describe('summary', () => {
- it('displays loading text', () => {
+ it('displays loading state initially', () => {
mockApi(httpStatusCodes.OK);
createComponent();
expect(wrapper.text()).toContain(i18n.loading);
});
- it('displays failed loading text', async () => {
+ it('with a 204 response, continues to display loading state', async () => {
+ mockApi(httpStatusCodes.NO_CONTENT, '');
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain(i18n.loading);
+ });
+
+ it('with an error response, displays failed to load text', async () => {
mockApi(httpStatusCodes.INTERNAL_SERVER_ERROR);
createComponent();
@@ -121,8 +131,57 @@ describe('Test report extension', () => {
await waitForPromises();
- expect(findTertiaryButton().text()).toBe('Full report');
- expect(findTertiaryButton().attributes('href')).toBe('pipeline/path/test_report');
+ expect(findFullReportLink().text()).toBe('Full report');
+ expect(findFullReportLink().attributes('href')).toBe('pipeline/path/test_report');
+ });
+
+ it('hides copy failed tests button when there are no failing tests', async () => {
+ mockApi(httpStatusCodes.OK);
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findCopyFailedSpecsBtn().exists()).toBe(false);
+ });
+
+ it('displays copy failed tests button when there are failing tests', async () => {
+ mockApi(httpStatusCodes.OK, newFailedTestReports);
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findCopyFailedSpecsBtn().exists()).toBe(true);
+ expect(findCopyFailedSpecsBtn().text()).toBe(i18n.copyFailedSpecs);
+ expect(findCopyFailedSpecsBtn().attributes('data-clipboard-text')).toBe(
+ 'spec/file_1.rb spec/file_2.rb',
+ );
+ });
+
+ it('copy failed tests button updates tooltip text when clicked', async () => {
+ mockApi(httpStatusCodes.OK, newFailedTestReports);
+ createComponent();
+
+ await waitForPromises();
+
+ // original tooltip shows up
+ expect(findCopyFailedSpecsBtn().attributes()).toMatchObject({
+ title: i18n.copyFailedSpecsTooltip,
+ });
+
+ await findCopyFailedSpecsBtn().trigger('click');
+
+ // tooltip text is replaced for 1 second
+ expect(findCopyFailedSpecsBtn().attributes()).toMatchObject({
+ title: 'Copied',
+ });
+
+ jest.runAllTimers();
+ await nextTick();
+
+ // tooltip reverts back to original string
+ expect(findCopyFailedSpecsBtn().attributes()).toMatchObject({
+ title: i18n.copyFailedSpecsTooltip,
+ });
});
it('shows an error when a suite has a parsing error', async () => {
diff --git a/spec/frontend/vue_mr_widget/extensions/test_report/utils_spec.js b/spec/frontend/vue_mr_widget/extensions/test_report/utils_spec.js
new file mode 100644
index 00000000000..69ea70549fe
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/extensions/test_report/utils_spec.js
@@ -0,0 +1,242 @@
+import * as utils from '~/vue_merge_request_widget/extensions/test_report/utils';
+
+describe('test report widget extension utils', () => {
+ describe('summaryTextbuilder', () => {
+ it('should render text for no changed results in multiple tests', () => {
+ const name = 'Test summary';
+ const data = { total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}no%{strong_end} changed test results, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+
+ it('should render text for no changed results in one test', () => {
+ const name = 'Test summary';
+ const data = { total: 1 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}no%{strong_end} changed test results, %{strong_start}1%{strong_end} total test',
+ );
+ });
+
+ it('should render text for multiple failed results', () => {
+ const name = 'Test summary';
+ const data = { failed: 3, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}3%{strong_end} failed, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+
+ it('should render text for multiple errored results', () => {
+ const name = 'Test summary';
+ const data = { errored: 7, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}7%{strong_end} errors, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+
+ it('should render text for multiple fixed results', () => {
+ const name = 'Test summary';
+ const data = { resolved: 4, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}4%{strong_end} fixed test results, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+
+ it('should render text for multiple fixed, and multiple failed results', () => {
+ const name = 'Test summary';
+ const data = { failed: 3, resolved: 4, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}3%{strong_end} failed and %{strong_start}4%{strong_end} fixed test results, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+
+ it('should render text for a singular fixed, and a singular failed result', () => {
+ const name = 'Test summary';
+ const data = { failed: 1, resolved: 1, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}1%{strong_end} failed and %{strong_start}1%{strong_end} fixed test result, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+
+ it('should render text for singular failed, errored, and fixed results', () => {
+ const name = 'Test summary';
+ const data = { failed: 1, errored: 1, resolved: 1, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}1%{strong_end} failed, %{strong_start}1%{strong_end} error and %{strong_start}1%{strong_end} fixed test result, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+
+ it('should render text for multiple failed, errored, and fixed results', () => {
+ const name = 'Test summary';
+ const data = { failed: 2, errored: 3, resolved: 4, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary: %{strong_start}2%{strong_end} failed, %{strong_start}3%{strong_end} errors and %{strong_start}4%{strong_end} fixed test results, %{strong_start}10%{strong_end} total tests',
+ );
+ });
+ });
+
+ describe('reportTextBuilder', () => {
+ const name = 'Rspec';
+
+ it('should render text for no changed results in multiple tests', () => {
+ const data = { name, summary: { total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: no changed test results, 10 total tests');
+ });
+
+ it('should render text for no changed results in one test', () => {
+ const data = { name, summary: { total: 1 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: no changed test results, 1 total test');
+ });
+
+ it('should render text for multiple failed results', () => {
+ const data = { name, summary: { failed: 3, total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: 3 failed, 10 total tests');
+ });
+
+ it('should render text for multiple errored results', () => {
+ const data = { name, summary: { errored: 7, total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: 7 errors, 10 total tests');
+ });
+
+ it('should render text for multiple fixed results', () => {
+ const data = { name, summary: { resolved: 4, total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: 4 fixed test results, 10 total tests');
+ });
+
+ it('should render text for multiple fixed, and multiple failed results', () => {
+ const data = { name, summary: { failed: 3, resolved: 4, total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: 3 failed and 4 fixed test results, 10 total tests');
+ });
+
+ it('should render text for a singular fixed, and a singular failed result', () => {
+ const data = { name, summary: { failed: 1, resolved: 1, total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: 1 failed and 1 fixed test result, 10 total tests');
+ });
+
+ it('should render text for singular failed, errored, and fixed results', () => {
+ const data = { name, summary: { failed: 1, errored: 1, resolved: 1, total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: 1 failed, 1 error and 1 fixed test result, 10 total tests');
+ });
+
+ it('should render text for multiple failed, errored, and fixed results', () => {
+ const data = { name, summary: { failed: 2, errored: 3, resolved: 4, total: 10 } };
+ const result = utils.reportTextBuilder(data);
+
+ expect(result).toBe('Rspec: 2 failed, 3 errors and 4 fixed test results, 10 total tests');
+ });
+ });
+
+ describe('recentFailuresTextBuilder', () => {
+ it.each`
+ recentlyFailed | failed | expected
+ ${0} | ${1} | ${''}
+ ${1} | ${1} | ${'1 out of 1 failed test has failed more than once in the last 14 days'}
+ ${1} | ${2} | ${'1 out of 2 failed tests has failed more than once in the last 14 days'}
+ ${2} | ${3} | ${'2 out of 3 failed tests have failed more than once in the last 14 days'}
+ `(
+ 'should render summary for $recentlyFailed out of $failed failures',
+ ({ recentlyFailed, failed, expected }) => {
+ const result = utils.recentFailuresTextBuilder({ recentlyFailed, failed });
+
+ expect(result).toBe(expected);
+ },
+ );
+ });
+
+ describe('countRecentlyFailedTests', () => {
+ it('counts tests with more than one recent failure in a report', () => {
+ const report = {
+ new_failures: [{ recent_failures: { count: 2 } }],
+ existing_failures: [{ recent_failures: { count: 1 } }],
+ resolved_failures: [{ recent_failures: { count: 20 } }, { recent_failures: { count: 5 } }],
+ };
+ const result = utils.countRecentlyFailedTests(report);
+
+ expect(result).toBe(3);
+ });
+
+ it('counts tests with more than one recent failure in an array of reports', () => {
+ const reports = [
+ {
+ new_failures: [{ recent_failures: { count: 2 } }],
+ existing_failures: [
+ { recent_failures: { count: 20 } },
+ { recent_failures: { count: 5 } },
+ ],
+ resolved_failures: [{ recent_failures: { count: 2 } }],
+ },
+ {
+ new_failures: [{ recent_failures: { count: 8 } }, { recent_failures: { count: 14 } }],
+ existing_failures: [{ recent_failures: { count: 1 } }],
+ resolved_failures: [{ recent_failures: { count: 7 } }, { recent_failures: { count: 5 } }],
+ },
+ ];
+ const result = utils.countRecentlyFailedTests(reports);
+
+ expect(result).toBe(8);
+ });
+
+ it.each([
+ [],
+ {},
+ null,
+ undefined,
+ { new_failures: undefined },
+ [{ existing_failures: null }],
+ { resolved_failures: [{}] },
+ [{ new_failures: [{ recent_failures: {} }] }],
+ ])('returns 0 when subject is %s', (subject) => {
+ const result = utils.countRecentlyFailedTests(subject);
+
+ expect(result).toBe(0);
+ });
+ });
+
+ describe('formatFilePath', () => {
+ it.each`
+ file | expected
+ ${'./test.js'} | ${'test.js'}
+ ${'/test.js'} | ${'test.js'}
+ ${'.//////////////test.js'} | ${'test.js'}
+ ${'test.js'} | ${'test.js'}
+ ${'mock/path./test.js'} | ${'mock/path./test.js'}
+ ${'./mock/path./test.js'} | ${'mock/path./test.js'}
+ `('should format $file to be $expected', ({ file, expected }) => {
+ expect(utils.formatFilePath(file)).toBe(expected);
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js b/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js
index 6d1b3bb34a5..a06ad930abe 100644
--- a/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js
+++ b/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js
@@ -62,7 +62,7 @@ describe('Accessibility extension', () => {
expect(wrapper.text()).toBe('Accessibility scanning failed loading results');
});
- it('displays detected errors', async () => {
+ it('displays detected errors and is expandable', async () => {
mockApi(httpStatusCodes.OK, accessibilityReportResponseErrors);
createComponent();
@@ -72,9 +72,10 @@ describe('Accessibility extension', () => {
expect(wrapper.text()).toBe(
'Accessibility scanning detected 8 issues for the source branch only',
);
+ expect(findToggleCollapsedButton().exists()).toBe(true);
});
- it('displays no detected errors', async () => {
+ it('displays no detected errors and is not expandable', async () => {
mockApi(httpStatusCodes.OK, accessibilityReportResponseSuccess);
createComponent();
@@ -84,6 +85,7 @@ describe('Accessibility extension', () => {
expect(wrapper.text()).toBe(
'Accessibility scanning detected no issues for the source branch only',
);
+ expect(findToggleCollapsedButton().exists()).toBe(false);
});
});
diff --git a/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js b/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js
index 77b3576a3d3..d9faa7b2d25 100644
--- a/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js
+++ b/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js
@@ -142,11 +142,11 @@ describe('Terraform extension', () => {
expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_terraform_click_full_report',
+ 'i_code_review_merge_request_widget_terraform_click_full_report',
);
expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(1);
expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_terraform_count_click_full_report',
+ 'i_code_review_merge_request_widget_terraform_count_click_full_report',
);
});
});
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index 6abbb052aef..b3af5eba364 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -32,6 +32,7 @@ import {
fullReportExtension,
noTelemetryExtension,
pollingExtension,
+ pollingFullDataExtension,
pollingErrorExtension,
multiPollingExtension,
} from './test_extensions';
@@ -42,6 +43,13 @@ jest.mock('~/smart_interval');
jest.mock('~/lib/utils/favicon');
+jest.mock('@sentry/browser', () => ({
+ setExtra: jest.fn(),
+ setExtras: jest.fn(),
+ captureMessage: jest.fn(),
+ captureException: jest.fn(),
+}));
+
Vue.use(VueApollo);
describe('MrWidgetOptions', () => {
@@ -66,24 +74,16 @@ describe('MrWidgetOptions', () => {
afterEach(() => {
mock.restore();
wrapper.destroy();
- wrapper = null;
gl.mrWidgetData = {};
gon.features = {};
});
- const createComponent = (mrData = mockData, options = {}, glFeatures = {}) => {
- if (wrapper) {
- wrapper.destroy();
- }
-
+ const createComponent = (mrData = mockData, options = {}) => {
wrapper = mount(MrWidgetOptions, {
propsData: {
mrData: { ...mrData },
},
- provide: {
- glFeatures,
- },
...options,
});
@@ -521,7 +521,7 @@ describe('MrWidgetOptions', () => {
describe('rendering relatedLinks', () => {
beforeEach(() => {
- createComponent({
+ return createComponent({
...mockData,
issues_links: {
closing: `
@@ -531,8 +531,10 @@ describe('MrWidgetOptions', () => {
`,
},
});
+ });
- return nextTick();
+ afterEach(() => {
+ wrapper.destroy();
});
it('renders if there are relatedLinks', () => {
@@ -875,8 +877,8 @@ describe('MrWidgetOptions', () => {
});
describe('given feature flag is enabled', () => {
- beforeEach(() => {
- createComponent();
+ beforeEach(async () => {
+ await createComponent();
wrapper.vm.mr.hasCI = false;
});
@@ -905,6 +907,19 @@ describe('MrWidgetOptions', () => {
});
});
+ describe('merge error', () => {
+ it.each`
+ state | show | showText
+ ${'closed'} | ${false} | ${'hides'}
+ ${'merged'} | ${true} | ${'shows'}
+ ${'open'} | ${true} | ${'shows'}
+ `('it $showText merge error when state is $state', ({ state, show }) => {
+ createComponent({ ...mockData, state, merge_error: 'Error!' });
+
+ expect(wrapper.find('[data-testid="merge_error"]').exists()).toBe(show);
+ });
+ });
+
describe('mock extension', () => {
let pollRequest;
@@ -917,8 +932,6 @@ describe('MrWidgetOptions', () => {
});
afterEach(() => {
- pollRequest.mockRestore();
-
registeredExtensions.extensions = [];
});
@@ -970,16 +983,14 @@ describe('MrWidgetOptions', () => {
describe('expansion', () => {
it('hides collapse button', async () => {
registerExtension(workingExtension(false));
- createComponent();
- await waitForPromises();
+ await createComponent();
expect(findExtensionToggleButton().exists()).toBe(false);
});
it('shows collapse button', async () => {
registerExtension(workingExtension(true));
- createComponent();
- await waitForPromises();
+ await createComponent();
expect(findExtensionToggleButton().exists()).toBe(true);
});
@@ -997,17 +1008,7 @@ describe('MrWidgetOptions', () => {
});
afterEach(() => {
- pollRequest.mockRestore();
-
registeredExtensions.extensions = [];
-
- // Clear all left-over timeouts that may be registered in the poll class
- let id = window.setTimeout(() => {}, 0);
-
- while (id > 0) {
- window.clearTimeout(id);
- id -= 1;
- }
});
describe('success - multi polling', () => {
@@ -1058,87 +1059,81 @@ describe('MrWidgetOptions', () => {
describe('success', () => {
it('does not make additional requests after poll is successful', async () => {
registerExtension(pollingExtension);
+
await createComponent();
- // called two times due to parent component polling (mount) and extension polling
- expect(pollRequest).toHaveBeenCalledTimes(2);
+
+ expect(pollRequest).toHaveBeenCalledTimes(6);
});
+ });
+
+ describe('success - full data polling', () => {
+ it('sets data when polling is complete', async () => {
+ registerExtension(pollingFullDataExtension);
- it('keeps polling when poll-interval header is provided', async () => {
- registerExtension({
- ...pollingExtension,
- methods: {
- ...pollingExtension.methods,
- fetchCollapsedData() {
- return Promise.resolve({
- data: {},
- headers: { 'poll-interval': 1 },
- status: 204,
- });
- },
- },
- });
await createComponent();
- expect(findWidgetTestExtension().html()).toContain('Test extension loading...');
+
+ api.trackRedisHllUserEvent.mockClear();
+ api.trackRedisCounterEvent.mockClear();
+
+ findExtensionToggleButton().trigger('click');
+
+ // The default working extension is a "warning" type, which generates a second - more specific - telemetry event for expansions
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(2);
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_expand',
+ );
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_expand_warning',
+ );
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(2);
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_count_expand',
+ );
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_count_expand_warning',
+ );
});
});
describe('error', () => {
- let captureException;
-
- beforeEach(() => {
- captureException = jest.spyOn(Sentry, 'captureException');
-
+ it('does not make additional requests after poll has failed', async () => {
registerExtension(pollingErrorExtension);
+ await createComponent();
- createComponent();
+ expect(pollRequest).toHaveBeenCalledTimes(6);
});
- it('does not make additional requests after poll has failed', () => {
- // called two times due to parent component polling (mount) and extension polling
- expect(pollRequest).toHaveBeenCalledTimes(2);
- });
+ it('captures sentry error and displays error when poll has failed', async () => {
+ registerExtension(pollingErrorExtension);
+ await createComponent();
- it('captures sentry error and displays error when poll has failed', () => {
- expect(captureException).toHaveBeenCalledTimes(1);
- expect(captureException).toHaveBeenCalledWith(new Error('Fetch error'));
+ expect(Sentry.captureException).toHaveBeenCalledTimes(5);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error('Fetch error'));
expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('failed');
});
});
});
describe('mock extension errors', () => {
- let captureException;
-
- const itHandlesTheException = () => {
- expect(captureException).toHaveBeenCalledTimes(1);
- expect(captureException).toHaveBeenCalledWith(new Error('Fetch error'));
- expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('failed');
- };
-
- beforeEach(() => {
- captureException = jest.spyOn(Sentry, 'captureException');
- });
-
afterEach(() => {
registeredExtensions.extensions = [];
- captureException = null;
});
it('handles collapsed data fetch errors', async () => {
registerExtension(collapsedDataErrorExtension);
- createComponent();
- await waitForPromises();
+ await createComponent();
expect(
wrapper.find('[data-testid="widget-extension"] [data-testid="toggle-button"]').exists(),
).toBe(false);
- itHandlesTheException();
+ expect(Sentry.captureException).toHaveBeenCalledTimes(5);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error('Fetch error'));
+ expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('failed');
});
it('handles full data fetch errors', async () => {
registerExtension(fullDataErrorExtension);
- createComponent();
- await waitForPromises();
+ await createComponent();
expect(wrapper.findComponent(StatusIcon).props('iconName')).not.toBe('error');
wrapper
@@ -1148,7 +1143,9 @@ describe('MrWidgetOptions', () => {
await nextTick();
await waitForPromises();
- itHandlesTheException();
+ expect(Sentry.captureException).toHaveBeenCalledTimes(1);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error('Fetch error'));
+ expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('failed');
});
});
@@ -1163,11 +1160,11 @@ describe('MrWidgetOptions', () => {
expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_view',
+ 'i_code_review_merge_request_widget_test_extension_view',
);
expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(1);
expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_count_view',
+ 'i_code_review_merge_request_widget_test_extension_count_view',
);
});
@@ -1186,17 +1183,17 @@ describe('MrWidgetOptions', () => {
// The default working extension is a "warning" type, which generates a second - more specific - telemetry event for expansions
expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(2);
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_expand',
+ 'i_code_review_merge_request_widget_test_extension_expand',
);
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_expand_warning',
+ 'i_code_review_merge_request_widget_test_extension_expand_warning',
);
expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(2);
expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_count_expand',
+ 'i_code_review_merge_request_widget_test_extension_count_expand',
);
expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_count_expand_warning',
+ 'i_code_review_merge_request_widget_test_extension_count_expand_warning',
);
});
@@ -1239,11 +1236,11 @@ describe('MrWidgetOptions', () => {
expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_click_full_report',
+ 'i_code_review_merge_request_widget_test_extension_click_full_report',
);
expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(1);
expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
- 'i_merge_request_widget_test_extension_count_click_full_report',
+ 'i_code_review_merge_request_widget_test_extension_count_click_full_report',
);
});
diff --git a/spec/frontend/vue_mr_widget/test_extensions.js b/spec/frontend/vue_mr_widget/test_extensions.js
index 76644e0be77..1977f550577 100644
--- a/spec/frontend/vue_mr_widget/test_extensions.js
+++ b/spec/frontend/vue_mr_widget/test_extensions.js
@@ -109,6 +109,39 @@ export const pollingExtension = {
enablePolling: true,
};
+export const pollingFullDataExtension = {
+ ...workingExtension(),
+ enableExpandedPolling: true,
+ methods: {
+ fetchCollapsedData({ targetProjectFullPath }) {
+ return Promise.resolve({ targetProjectFullPath, count: 1 });
+ },
+ fetchFullData() {
+ return Promise.resolve([
+ {
+ headers: { 'poll-interval': 0 },
+ status: 200,
+ data: {
+ id: 1,
+ text: 'Hello world',
+ icon: {
+ name: EXTENSION_ICONS.failed,
+ },
+ badge: {
+ text: 'Closed',
+ },
+ link: {
+ href: 'https://gitlab.com',
+ text: 'GitLab.com',
+ },
+ actions: [{ text: 'Full report', href: 'https://gitlab.com', target: '_blank' }],
+ },
+ },
+ ]);
+ },
+ },
+};
+
export const fullReportExtension = {
...workingExtension(),
computed: {
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
index 93b59800c27..441e21ee905 100644
--- a/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
@@ -84,15 +84,15 @@ describe('LabelsSelectRoot', () => {
});
describe('if the variant is `sidebar`', () => {
- beforeEach(() => {
+ it('renders SidebarEditableItem component', () => {
createComponent();
- });
- it('renders SidebarEditableItem component', () => {
expect(findSidebarEditableItem().exists()).toBe(true);
});
it('renders correct props for the SidebarEditableItem component', () => {
+ createComponent();
+
expect(findSidebarEditableItem().props()).toMatchObject({
title: wrapper.vm.$options.i18n.widgetTitle,
canEdit: defaultProps.allowEdit,
@@ -135,7 +135,7 @@ describe('LabelsSelectRoot', () => {
it('handles DropdownContents setColor', () => {
findDropdownContents().vm.$emit('setColor', color);
- expect(wrapper.emitted('updateSelectedColor')).toEqual([[color]]);
+ expect(wrapper.emitted('updateSelectedColor')).toEqual([[{ color }]]);
});
});
@@ -157,20 +157,24 @@ describe('LabelsSelectRoot', () => {
createComponent({ propsData: { iid: undefined } });
findDropdownContents().vm.$emit('setColor', color);
- expect(wrapper.emitted('updateSelectedColor')).toEqual([[color]]);
+ expect(wrapper.emitted('updateSelectedColor')).toEqual([[{ color }]]);
});
describe('when updating color for epic', () => {
- beforeEach(() => {
+ const setup = () => {
createComponent();
findDropdownContents().vm.$emit('setColor', color);
- });
+ };
it('sets the loading state', () => {
+ setup();
+
expect(findSidebarEditableItem().props('loading')).toBe(true);
});
it('updates color correctly after successful mutation', async () => {
+ setup();
+
await waitForPromises();
expect(findDropdownValue().props('selectedColor').color).toEqual(
updateColorMutationResponse.data.updateIssuableColor.issuable.color,
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js
index 74f50b878e2..ee4d3a2630a 100644
--- a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js
@@ -1,57 +1,30 @@
-import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { GlDropdown } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { DROPDOWN_VARIANT } from '~/vue_shared/components/color_select_dropdown/constants';
import DropdownContents from '~/vue_shared/components/color_select_dropdown/dropdown_contents.vue';
import DropdownContentsColorView from '~/vue_shared/components/color_select_dropdown/dropdown_contents_color_view.vue';
+import DropdownHeader from '~/vue_shared/components/color_select_dropdown/dropdown_header.vue';
import { color } from './mock_data';
-const showDropdown = jest.fn();
-const focusInput = jest.fn();
-
const defaultProps = {
dropdownTitle: '',
selectedColor: color,
- dropdownButtonText: '',
+ dropdownButtonText: 'Pick a color',
variant: '',
isVisible: false,
};
-const GlDropdownStub = {
- template: `
- <div>
- <slot name="header"></slot>
- <slot></slot>
- </div>
- `,
- methods: {
- show: showDropdown,
- hide: jest.fn(),
- },
-};
-
-const DropdownHeaderStub = {
- template: `
- <div>Hello, I am a header</div>
- `,
- methods: {
- focusInput,
- },
-};
-
describe('DropdownContent', () => {
let wrapper;
const createComponent = ({ propsData = {} } = {}) => {
- wrapper = shallowMount(DropdownContents, {
+ wrapper = mountExtended(DropdownContents, {
propsData: {
...defaultProps,
...propsData,
},
- stubs: {
- GlDropdown: GlDropdownStub,
- DropdownHeader: DropdownHeaderStub,
- },
});
};
@@ -60,16 +33,17 @@ describe('DropdownContent', () => {
});
const findColorView = () => wrapper.findComponent(DropdownContentsColorView);
- const findDropdownHeader = () => wrapper.findComponent(DropdownHeaderStub);
- const findDropdown = () => wrapper.findComponent(GlDropdownStub);
+ const findDropdownHeader = () => wrapper.findComponent(DropdownHeader);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
it('calls dropdown `show` method on `isVisible` prop change', async () => {
createComponent();
+ const spy = jest.spyOn(wrapper.vm.$refs.dropdown, 'show');
await wrapper.setProps({
isVisible: true,
});
- expect(showDropdown).toHaveBeenCalledTimes(1);
+ expect(spy).toHaveBeenCalledTimes(1);
});
it('does not emit `setColor` event on dropdown hide if color did not change', () => {
@@ -110,4 +84,12 @@ describe('DropdownContent', () => {
expect(findDropdownHeader().exists()).toBe(true);
});
+
+ it('handles no selected color', () => {
+ createComponent({ propsData: { selectedColor: {} } });
+
+ expect(wrapper.findByTestId('fallback-button-text').text()).toEqual(
+ defaultProps.dropdownButtonText,
+ );
+ });
});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js
index f22592dd604..5bbdb136353 100644
--- a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js
@@ -33,7 +33,7 @@ describe('DropdownValue', () => {
it.each`
index | cssClass
- ${0} | ${['gl-font-base', 'gl-line-height-24']}
+ ${0} | ${[]}
${1} | ${['hide-collapsed']}
`(
'passes correct props to the ColorItem with CSS class `$cssClass`',
diff --git a/spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js b/spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js
index e3d8bfd22ca..79001b9282f 100644
--- a/spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js
+++ b/spec/frontend/vue_shared/components/deployment_instance/deployment_instance_spec.js
@@ -1,7 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import DeployBoardInstance from '~/vue_shared/components/deployment_instance.vue';
-import { folder } from './mock_data';
describe('Deploy Board Instance', () => {
let wrapper;
@@ -13,7 +12,6 @@ describe('Deploy Board Instance', () => {
...props,
},
provide: {
- glFeatures: { monitorLogging: true },
...provide,
},
});
@@ -25,7 +23,6 @@ describe('Deploy Board Instance', () => {
it('should render a div with the correct css status and tooltip data', () => {
wrapper = createComponent({
- logsPath: folder.logs_path,
tooltipText: 'This is a pod',
});
@@ -43,17 +40,6 @@ describe('Deploy Board Instance', () => {
expect(wrapper.classes('deployment-instance-deploying')).toBe(true);
expect(wrapper.attributes('title')).toEqual('');
});
-
- it('should have a log path computed with a pod name as a parameter', () => {
- wrapper = createComponent({
- logsPath: folder.logs_path,
- podName: 'tanuki-1',
- });
-
- expect(wrapper.vm.computedLogPath).toEqual(
- '/root/review-app/-/logs?environment_name=foo&pod_name=tanuki-1',
- );
- });
});
describe('as a canary deployment', () => {
@@ -76,46 +62,10 @@ describe('Deploy Board Instance', () => {
wrapper.destroy();
});
- it('should not be a link without a logsPath prop', async () => {
- wrapper = createComponent({
- stable: false,
- logsPath: '',
- });
-
- await nextTick();
- expect(wrapper.vm.computedLogPath).toBeNull();
- expect(wrapper.vm.isLink).toBeFalsy();
- });
-
- it('should render a link without href if path is not passed', () => {
- wrapper = createComponent();
-
- expect(wrapper.attributes('href')).toBeUndefined();
- });
-
it('should not have a tooltip', () => {
wrapper = createComponent();
expect(wrapper.attributes('title')).toEqual('');
});
});
-
- describe(':monitor_logging feature flag', () => {
- afterEach(() => {
- wrapper.destroy();
- });
-
- it.each`
- flagState | logsState | expected
- ${true} | ${'shows'} | ${'/root/review-app/-/logs?environment_name=foo&pod_name=tanuki-1'}
- ${false} | ${'hides'} | ${undefined}
- `('$logsState log link when flag state is $flagState', async ({ flagState, expected }) => {
- wrapper = createComponent(
- { logsPath: folder.logs_path, podName: 'tanuki-1' },
- { glFeatures: { monitorLogging: flagState } },
- );
-
- expect(wrapper.attributes('href')).toEqual(expected);
- });
- });
});
diff --git a/spec/frontend/vue_shared/components/deployment_instance/mock_data.js b/spec/frontend/vue_shared/components/deployment_instance/mock_data.js
index 6618c57948c..098787cd1b4 100644
--- a/spec/frontend/vue_shared/components/deployment_instance/mock_data.js
+++ b/spec/frontend/vue_shared/components/deployment_instance/mock_data.js
@@ -140,5 +140,4 @@ export const folder = {
created_at: '2017-02-01T19:42:18.400Z',
updated_at: '2017-02-01T19:42:18.400Z',
rollout_status: {},
- logs_path: '/root/review-app/-/logs?environment_name=foo',
};
diff --git a/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
index d0fa8b8dacb..16f924b44d8 100644
--- a/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
@@ -1,11 +1,9 @@
import { mount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import { compileToFunctions } from 'vue-template-compiler';
-
+import { nextTick } from 'vue';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
-import imageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue';
+import ImageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue';
-describe('ImageDiffViewer', () => {
+describe('ImageDiffViewer component', () => {
const requiredProps = {
diffMode: 'replaced',
newPath: GREEN_BOX_IMAGE_URL,
@@ -17,15 +15,12 @@ describe('ImageDiffViewer', () => {
newSize: 1024,
};
let wrapper;
- let vm;
- function createComponent(props) {
- const ImageDiffViewer = Vue.extend(imageDiffViewer);
- wrapper = mount(ImageDiffViewer, { propsData: props });
- vm = wrapper.vm;
- }
+ const createComponent = (props, slots) => {
+ wrapper = mount(ImageDiffViewer, { propsData: props, slots });
+ };
- const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
+ const triggerEvent = (eventName, el = wrapper.$el, clientX = 0) => {
const event = new MouseEvent(eventName, {
bubbles: true,
cancelable: true,
@@ -51,128 +46,76 @@ describe('ImageDiffViewer', () => {
wrapper.destroy();
});
- it('renders image diff for replaced', async () => {
- createComponent({ ...allProps });
-
- await nextTick();
- const metaInfoElements = vm.$el.querySelectorAll('.image-info');
-
- expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
-
- expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
-
- expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe('2-up');
- expect(vm.$el.querySelector('.view-modes-menu li:nth-child(2)').textContent.trim()).toBe(
- 'Swipe',
- );
-
- expect(vm.$el.querySelector('.view-modes-menu li:nth-child(3)').textContent.trim()).toBe(
- 'Onion skin',
- );
-
- expect(metaInfoElements.length).toBe(2);
- expect(metaInfoElements[0]).toHaveText('2.00 KiB');
- expect(metaInfoElements[1]).toHaveText('1.00 KiB');
+ it('renders image diff for replaced', () => {
+ createComponent(allProps);
+ const metaInfoElements = wrapper.findAll('.image-info');
+
+ expect(wrapper.find('.added img').attributes('src')).toBe(GREEN_BOX_IMAGE_URL);
+ expect(wrapper.find('.deleted img').attributes('src')).toBe(RED_BOX_IMAGE_URL);
+ expect(wrapper.find('.view-modes-menu li.active').text()).toBe('2-up');
+ expect(wrapper.find('.view-modes-menu li:nth-child(2)').text()).toBe('Swipe');
+ expect(wrapper.find('.view-modes-menu li:nth-child(3)').text()).toBe('Onion skin');
+ expect(metaInfoElements).toHaveLength(2);
+ expect(metaInfoElements.at(0).text()).toBe('2.00 KiB');
+ expect(metaInfoElements.at(1).text()).toBe('1.00 KiB');
});
- it('renders image diff for new', async () => {
+ it('renders image diff for new', () => {
createComponent({ ...allProps, diffMode: 'new', oldPath: '' });
- await nextTick();
-
- const metaInfoElement = vm.$el.querySelector('.image-info');
-
- expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
- expect(metaInfoElement).toHaveText('1.00 KiB');
+ expect(wrapper.find('.added img').attributes('src')).toBe(GREEN_BOX_IMAGE_URL);
+ expect(wrapper.find('.image-info').text()).toBe('1.00 KiB');
});
- it('renders image diff for deleted', async () => {
+ it('renders image diff for deleted', () => {
createComponent({ ...allProps, diffMode: 'deleted', newPath: '' });
- await nextTick();
-
- const metaInfoElement = vm.$el.querySelector('.image-info');
-
- expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
- expect(metaInfoElement).toHaveText('2.00 KiB');
+ expect(wrapper.find('.deleted img').attributes('src')).toBe(RED_BOX_IMAGE_URL);
+ expect(wrapper.find('.image-info').text()).toBe('2.00 KiB');
});
- it('renders image diff for renamed', async () => {
- vm = new Vue({
- components: {
- imageDiffViewer,
- },
- data() {
- return {
- ...allProps,
- diffMode: 'renamed',
- };
- },
- ...compileToFunctions(`
- <image-diff-viewer
- :diff-mode="diffMode"
- :new-path="newPath"
- :old-path="oldPath"
- :new-size="newSize"
- :old-size="oldSize"
- >
- <template #image-overlay>
- <span class="overlay">test</span>
- </template>
- </image-diff-viewer>
- `),
- }).$mount();
-
- await nextTick();
-
- const metaInfoElement = vm.$el.querySelector('.image-info');
-
- expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
- expect(vm.$el.querySelector('.overlay')).not.toBe(null);
-
- expect(metaInfoElement).toHaveText('2.00 KiB');
+ it('renders image diff for renamed', () => {
+ createComponent(
+ { ...allProps, diffMode: 'renamed' },
+ { 'image-overlay': '<span class="overlay">test</span>' },
+ );
+
+ expect(wrapper.find('img').attributes('src')).toBe(GREEN_BOX_IMAGE_URL);
+ expect(wrapper.find('.overlay').exists()).toBe(true);
+ expect(wrapper.find('.image-info').text()).toBe('2.00 KiB');
});
describe('swipeMode', () => {
beforeEach(() => {
- createComponent({ ...requiredProps });
-
- return nextTick();
+ createComponent(requiredProps);
});
it('switches to Swipe Mode', async () => {
- vm.$el.querySelector('.view-modes-menu li:nth-child(2)').click();
+ await wrapper.find('.view-modes-menu li:nth-child(2)').trigger('click');
- await nextTick();
- expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe('Swipe');
+ expect(wrapper.find('.view-modes-menu li.active').text()).toBe('Swipe');
});
});
describe('onionSkin', () => {
beforeEach(() => {
createComponent({ ...requiredProps });
-
- return nextTick();
});
it('switches to Onion Skin Mode', async () => {
- vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
+ await wrapper.find('.view-modes-menu li:nth-child(3)').trigger('click');
- await nextTick();
- expect(vm.$el.querySelector('.view-modes-menu li.active').textContent.trim()).toBe(
- 'Onion skin',
- );
+ expect(wrapper.find('.view-modes-menu li.active').text()).toBe('Onion skin');
});
it('has working drag handler', async () => {
- vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
+ await wrapper.find('.view-modes-menu li:nth-child(3)').trigger('click');
+ dragSlider(wrapper.find('.dragger').element, document, 20);
await nextTick();
- dragSlider(vm.$el.querySelector('.dragger'), document, 20);
- await nextTick();
- expect(vm.$el.querySelector('.dragger').style.left).toBe('20px');
- expect(vm.$el.querySelector('.added.frame').style.opacity).toBe('0.2');
+ expect(wrapper.find('.dragger').attributes('style')).toBe('left: 20px;');
+ expect(wrapper.find('.added.frame').attributes('style')).toBe('opacity: 0.2;');
});
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index e3e2ef5610d..86d1f21fd04 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -8,6 +8,8 @@ import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
import ReleaseToken from '~/vue_shared/components/filtered_search_bar/tokens/release_token.vue';
+import CrmContactToken from '~/vue_shared/components/filtered_search_bar/tokens/crm_contact_token.vue';
+import CrmOrganizationToken from '~/vue_shared/components/filtered_search_bar/tokens/crm_organization_token.vue';
export const mockAuthor1 = {
id: 1,
@@ -62,6 +64,128 @@ export const mockMilestones = [
mockEscapedMilestone,
];
+export const mockCrmContacts = [
+ {
+ id: 'gid://gitlab/CustomerRelations::Contact/1',
+ firstName: 'John',
+ lastName: 'Smith',
+ email: 'john@smith.com',
+ },
+ {
+ id: 'gid://gitlab/CustomerRelations::Contact/2',
+ firstName: 'Andy',
+ lastName: 'Green',
+ email: 'andy@green.net',
+ },
+];
+
+export const mockCrmOrganizations = [
+ {
+ id: 'gid://gitlab/CustomerRelations::Organization/1',
+ name: 'First Org Ltd.',
+ },
+ {
+ id: 'gid://gitlab/CustomerRelations::Organization/2',
+ name: 'Organizer S.p.a.',
+ },
+];
+
+export const mockProjectCrmContactsQueryResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 1,
+ group: {
+ __typename: 'Group',
+ id: 1,
+ contacts: {
+ __typename: 'CustomerRelationsContactConnection',
+ nodes: [
+ {
+ __typename: 'CustomerRelationsContact',
+ ...mockCrmContacts[0],
+ },
+ {
+ __typename: 'CustomerRelationsContact',
+ ...mockCrmContacts[1],
+ },
+ ],
+ },
+ },
+ },
+ },
+};
+
+export const mockProjectCrmOrganizationsQueryResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 1,
+ group: {
+ __typename: 'Group',
+ id: 1,
+ organizations: {
+ __typename: 'CustomerRelationsOrganizationConnection',
+ nodes: [
+ {
+ __typename: 'CustomerRelationsOrganization',
+ ...mockCrmOrganizations[0],
+ },
+ {
+ __typename: 'CustomerRelationsOrganization',
+ ...mockCrmOrganizations[1],
+ },
+ ],
+ },
+ },
+ },
+ },
+};
+
+export const mockGroupCrmContactsQueryResponse = {
+ data: {
+ group: {
+ __typename: 'Group',
+ id: 1,
+ contacts: {
+ __typename: 'CustomerRelationsContactConnection',
+ nodes: [
+ {
+ __typename: 'CustomerRelationsContact',
+ ...mockCrmContacts[0],
+ },
+ {
+ __typename: 'CustomerRelationsContact',
+ ...mockCrmContacts[1],
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const mockGroupCrmOrganizationsQueryResponse = {
+ data: {
+ group: {
+ __typename: 'Group',
+ id: 1,
+ organizations: {
+ __typename: 'CustomerRelationsOrganizationConnection',
+ nodes: [
+ {
+ __typename: 'CustomerRelationsOrganization',
+ ...mockCrmOrganizations[0],
+ },
+ {
+ __typename: 'CustomerRelationsOrganization',
+ ...mockCrmOrganizations[1],
+ },
+ ],
+ },
+ },
+ },
+};
+
export const mockEmoji1 = {
name: 'thumbsup',
};
@@ -134,6 +258,28 @@ export const mockReactionEmojiToken = {
fetchEmojis: () => Promise.resolve(mockEmojis),
};
+export const mockCrmContactToken = {
+ type: 'crm_contact',
+ title: 'Contact',
+ icon: 'user',
+ token: CrmContactToken,
+ isProject: false,
+ fullPath: 'group',
+ operators: OPERATOR_IS_ONLY,
+ unique: true,
+};
+
+export const mockCrmOrganizationToken = {
+ type: 'crm_contact',
+ title: 'Organization',
+ icon: 'user',
+ token: CrmOrganizationToken,
+ isProject: false,
+ fullPath: 'group',
+ operators: OPERATOR_IS_ONLY,
+ unique: true,
+};
+
export const mockMembershipToken = {
type: 'with_inherited_permissions',
icon: 'group',
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index ca8cd419d87..a0126c2bd63 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -418,8 +418,6 @@ describe('BaseToken', () => {
});
it('does not emit `fetch-suggestions` event on component after a delay when component emits `input` event', async () => {
- jest.useFakeTimers();
-
findGlFilteredSearchToken().vm.$emit('input', { data: 'foo' });
await nextTick();
@@ -437,8 +435,6 @@ describe('BaseToken', () => {
});
it('emits `fetch-suggestions` event on component after a delay when component emits `input` event', async () => {
- jest.useFakeTimers();
-
findGlFilteredSearchToken().vm.$emit('input', { data: 'foo' });
await nextTick();
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
new file mode 100644
index 00000000000..157e021fc60
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
@@ -0,0 +1,283 @@
+import {
+ GlFilteredSearchSuggestion,
+ GlFilteredSearchTokenSegment,
+ GlDropdownDivider,
+} from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
+import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
+import CrmContactToken from '~/vue_shared/components/filtered_search_bar/tokens/crm_contact_token.vue';
+import searchCrmContactsQuery from '~/vue_shared/components/filtered_search_bar/queries/search_crm_contacts.query.graphql';
+
+import {
+ mockCrmContacts,
+ mockCrmContactToken,
+ mockGroupCrmContactsQueryResponse,
+ mockProjectCrmContactsQueryResponse,
+} from '../mock_data';
+
+jest.mock('~/flash');
+
+const defaultStubs = {
+ Portal: true,
+ BaseToken,
+ GlFilteredSearchSuggestionList: {
+ template: '<div></div>',
+ methods: {
+ getValue: () => '=',
+ },
+ },
+};
+
+describe('CrmContactToken', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+ let fakeApollo;
+
+ const getBaseToken = () => wrapper.findComponent(BaseToken);
+
+ const searchGroupCrmContactsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(mockGroupCrmContactsQueryResponse);
+ const searchProjectCrmContactsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(mockProjectCrmContactsQueryResponse);
+
+ const mountComponent = ({
+ config = mockCrmContactToken,
+ value = { data: '' },
+ active = false,
+ stubs = defaultStubs,
+ listeners = {},
+ queryHandler = searchGroupCrmContactsQueryHandler,
+ } = {}) => {
+ fakeApollo = createMockApollo([[searchCrmContactsQuery, queryHandler]]);
+
+ wrapper = mount(CrmContactToken, {
+ propsData: {
+ config,
+ value,
+ active,
+ cursorPosition: 'start',
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: () => 'custom-class',
+ },
+ stubs,
+ listeners,
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ describe('methods', () => {
+ describe('fetchContacts', () => {
+ describe('for groups', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('calls the apollo query providing the searchString when search term is a string', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', 'foo');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchGroupCrmContactsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'group',
+ isProject: false,
+ searchString: 'foo',
+ searchIds: null,
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmContacts);
+ });
+
+ it('calls the apollo query providing the searchId when search term is a number', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', '5');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchGroupCrmContactsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'group',
+ isProject: false,
+ searchString: null,
+ searchIds: ['gid://gitlab/CustomerRelations::Contact/5'],
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmContacts);
+ });
+ });
+
+ describe('for projects', () => {
+ beforeEach(() => {
+ mountComponent({
+ config: {
+ fullPath: 'project',
+ isProject: true,
+ },
+ queryHandler: searchProjectCrmContactsQueryHandler,
+ });
+ });
+
+ it('calls the apollo query providing the searchString when search term is a string', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', 'foo');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchProjectCrmContactsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'project',
+ isProject: true,
+ searchString: 'foo',
+ searchIds: null,
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmContacts);
+ });
+
+ it('calls the apollo query providing the searchId when search term is a number', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', '5');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchProjectCrmContactsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'project',
+ isProject: true,
+ searchString: null,
+ searchIds: ['gid://gitlab/CustomerRelations::Contact/5'],
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmContacts);
+ });
+ });
+
+ it('calls `createFlash` with flash error message when request fails', async () => {
+ mountComponent();
+
+ jest.spyOn(wrapper.vm.$apollo, 'query').mockRejectedValue({});
+
+ getBaseToken().vm.$emit('fetch-suggestions');
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'There was a problem fetching CRM contacts.',
+ });
+ });
+
+ it('sets `loading` to false when request completes', async () => {
+ mountComponent();
+
+ jest.spyOn(wrapper.vm.$apollo, 'query').mockRejectedValue({});
+
+ getBaseToken().vm.$emit('fetch-suggestions');
+
+ await waitForPromises();
+
+ expect(getBaseToken().props('suggestionsLoading')).toBe(false);
+ });
+ });
+ });
+
+ describe('template', () => {
+ const defaultContacts = DEFAULT_NONE_ANY;
+
+ it('renders base-token component', () => {
+ mountComponent({
+ config: { ...mockCrmContactToken, initialContacts: mockCrmContacts },
+ value: { data: '1' },
+ });
+
+ const baseTokenEl = wrapper.find(BaseToken);
+
+ expect(baseTokenEl.exists()).toBe(true);
+ expect(baseTokenEl.props()).toMatchObject({
+ suggestions: mockCrmContacts,
+ getActiveTokenValue: wrapper.vm.getActiveContact,
+ });
+ });
+
+ it.each(mockCrmContacts)('renders token item when value is selected', (contact) => {
+ mountComponent({
+ config: { ...mockCrmContactToken, initialContacts: mockCrmContacts },
+ value: { data: `${getIdFromGraphQLId(contact.id)}` },
+ });
+
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3); // Contact, =, Contact name
+ expect(tokenSegments.at(2).text()).toBe(`${contact.firstName} ${contact.lastName}`); // Contact name
+ });
+
+ it('renders provided defaultContacts as suggestions', async () => {
+ mountComponent({
+ active: true,
+ config: { ...mockCrmContactToken, defaultContacts },
+ stubs: { Portal: true },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+ await nextTick();
+
+ const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+
+ expect(suggestions).toHaveLength(defaultContacts.length);
+ defaultContacts.forEach((contact, index) => {
+ expect(suggestions.at(index).text()).toBe(contact.text);
+ });
+ });
+
+ it('does not render divider when no defaultContacts', async () => {
+ mountComponent({
+ active: true,
+ config: { ...mockCrmContactToken, defaultContacts: [] },
+ stubs: { Portal: true },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+ await nextTick();
+
+ expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ });
+
+ it('renders `DEFAULT_NONE_ANY` as default suggestions', () => {
+ mountComponent({
+ active: true,
+ config: { ...mockCrmContactToken },
+ stubs: { Portal: true },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+
+ const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+
+ expect(suggestions).toHaveLength(DEFAULT_NONE_ANY.length);
+ DEFAULT_NONE_ANY.forEach((contact, index) => {
+ expect(suggestions.at(index).text()).toBe(contact.text);
+ });
+ });
+
+ it('emits listeners in the base-token', () => {
+ const mockInput = jest.fn();
+ mountComponent({
+ listeners: {
+ input: mockInput,
+ },
+ });
+ wrapper.findComponent(BaseToken).vm.$emit('input', [{ data: 'mockData', operator: '=' }]);
+
+ expect(mockInput).toHaveBeenLastCalledWith([{ data: 'mockData', operator: '=' }]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
new file mode 100644
index 00000000000..977f8bbef61
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
@@ -0,0 +1,282 @@
+import {
+ GlFilteredSearchSuggestion,
+ GlFilteredSearchTokenSegment,
+ GlDropdownDivider,
+} from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
+import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
+import CrmOrganizationToken from '~/vue_shared/components/filtered_search_bar/tokens/crm_organization_token.vue';
+import searchCrmOrganizationsQuery from '~/vue_shared/components/filtered_search_bar/queries/search_crm_organizations.query.graphql';
+
+import {
+ mockCrmOrganizations,
+ mockCrmOrganizationToken,
+ mockGroupCrmOrganizationsQueryResponse,
+ mockProjectCrmOrganizationsQueryResponse,
+} from '../mock_data';
+
+jest.mock('~/flash');
+
+const defaultStubs = {
+ Portal: true,
+ BaseToken,
+ GlFilteredSearchSuggestionList: {
+ template: '<div></div>',
+ methods: {
+ getValue: () => '=',
+ },
+ },
+};
+
+describe('CrmOrganizationToken', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+ let fakeApollo;
+
+ const getBaseToken = () => wrapper.findComponent(BaseToken);
+
+ const searchGroupCrmOrganizationsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(mockGroupCrmOrganizationsQueryResponse);
+ const searchProjectCrmOrganizationsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(mockProjectCrmOrganizationsQueryResponse);
+
+ const mountComponent = ({
+ config = mockCrmOrganizationToken,
+ value = { data: '' },
+ active = false,
+ stubs = defaultStubs,
+ listeners = {},
+ queryHandler = searchGroupCrmOrganizationsQueryHandler,
+ } = {}) => {
+ fakeApollo = createMockApollo([[searchCrmOrganizationsQuery, queryHandler]]);
+ wrapper = mount(CrmOrganizationToken, {
+ propsData: {
+ config,
+ value,
+ active,
+ cursorPosition: 'start',
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: () => 'custom-class',
+ },
+ stubs,
+ listeners,
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ describe('methods', () => {
+ describe('fetchOrganizations', () => {
+ describe('for groups', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('calls the apollo query providing the searchString when search term is a string', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', 'foo');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchGroupCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'group',
+ isProject: false,
+ searchString: 'foo',
+ searchIds: null,
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmOrganizations);
+ });
+
+ it('calls the apollo query providing the searchId when search term is a number', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', '5');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchGroupCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'group',
+ isProject: false,
+ searchString: null,
+ searchIds: ['gid://gitlab/CustomerRelations::Organization/5'],
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmOrganizations);
+ });
+ });
+
+ describe('for projects', () => {
+ beforeEach(() => {
+ mountComponent({
+ config: {
+ fullPath: 'project',
+ isProject: true,
+ },
+ queryHandler: searchProjectCrmOrganizationsQueryHandler,
+ });
+ });
+
+ it('calls the apollo query providing the searchString when search term is a string', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', 'foo');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchProjectCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'project',
+ isProject: true,
+ searchString: 'foo',
+ searchIds: null,
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmOrganizations);
+ });
+
+ it('calls the apollo query providing the searchId when search term is a number', async () => {
+ getBaseToken().vm.$emit('fetch-suggestions', '5');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(searchProjectCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'project',
+ isProject: true,
+ searchString: null,
+ searchIds: ['gid://gitlab/CustomerRelations::Organization/5'],
+ });
+ expect(getBaseToken().props('suggestions')).toEqual(mockCrmOrganizations);
+ });
+ });
+
+ it('calls `createFlash` with flash error message when request fails', async () => {
+ mountComponent();
+
+ jest.spyOn(wrapper.vm.$apollo, 'query').mockRejectedValue({});
+
+ getBaseToken().vm.$emit('fetch-suggestions');
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'There was a problem fetching CRM organizations.',
+ });
+ });
+
+ it('sets `loading` to false when request completes', async () => {
+ mountComponent();
+
+ jest.spyOn(wrapper.vm.$apollo, 'query').mockRejectedValue({});
+
+ getBaseToken().vm.$emit('fetch-suggestions');
+
+ await waitForPromises();
+
+ expect(getBaseToken().props('suggestionsLoading')).toBe(false);
+ });
+ });
+ });
+
+ describe('template', () => {
+ const defaultOrganizations = DEFAULT_NONE_ANY;
+
+ it('renders base-token component', () => {
+ mountComponent({
+ config: { ...mockCrmOrganizationToken, initialOrganizations: mockCrmOrganizations },
+ value: { data: '1' },
+ });
+
+ const baseTokenEl = wrapper.find(BaseToken);
+
+ expect(baseTokenEl.exists()).toBe(true);
+ expect(baseTokenEl.props()).toMatchObject({
+ suggestions: mockCrmOrganizations,
+ getActiveTokenValue: wrapper.vm.getActiveOrganization,
+ });
+ });
+
+ it.each(mockCrmOrganizations)('renders token item when value is selected', (organization) => {
+ mountComponent({
+ config: { ...mockCrmOrganizationToken, initialOrganizations: mockCrmOrganizations },
+ value: { data: `${getIdFromGraphQLId(organization.id)}` },
+ });
+
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3); // Organization, =, Organization name
+ expect(tokenSegments.at(2).text()).toBe(organization.name); // Organization name
+ });
+
+ it('renders provided defaultOrganizations as suggestions', async () => {
+ mountComponent({
+ active: true,
+ config: { ...mockCrmOrganizationToken, defaultOrganizations },
+ stubs: { Portal: true },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+ await nextTick();
+
+ const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+
+ expect(suggestions).toHaveLength(defaultOrganizations.length);
+ defaultOrganizations.forEach((organization, index) => {
+ expect(suggestions.at(index).text()).toBe(organization.text);
+ });
+ });
+
+ it('does not render divider when no defaultOrganizations', async () => {
+ mountComponent({
+ active: true,
+ config: { ...mockCrmOrganizationToken, defaultOrganizations: [] },
+ stubs: { Portal: true },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+ await nextTick();
+
+ expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ });
+
+ it('renders `DEFAULT_NONE_ANY` as default suggestions', () => {
+ mountComponent({
+ active: true,
+ config: { ...mockCrmOrganizationToken },
+ stubs: { Portal: true },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+
+ const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+
+ expect(suggestions).toHaveLength(DEFAULT_NONE_ANY.length);
+ DEFAULT_NONE_ANY.forEach((organization, index) => {
+ expect(suggestions.at(index).text()).toBe(organization.text);
+ });
+ });
+
+ it('emits listeners in the base-token', () => {
+ const mockInput = jest.fn();
+ mountComponent({
+ listeners: {
+ input: mockInput,
+ },
+ });
+ wrapper.findComponent(BaseToken).vm.$emit('input', [{ data: 'mockData', operator: '=' }]);
+
+ expect(mockInput).toHaveBeenLastCalledWith([{ data: 'mockData', operator: '=' }]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index b3376f26a25..85a135d2b89 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -67,11 +67,6 @@ describe('Markdown field component', () => {
enablePreview,
restrictedToolBarItems,
},
- provide: {
- glFeatures: {
- contactsAutocomplete: true,
- },
- },
},
);
}
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
index 5e956d66b6a..bf6c8e8c704 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
@@ -7,16 +7,19 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
<div
class="timeline-icon"
>
- <user-avatar-link-stub
- imgalt=""
- imgcssclasses=""
- imgsize="40"
- imgsrc="mock_path"
- linkhref="/root"
- tooltipplacement="top"
- tooltiptext=""
- username=""
- />
+ <gl-avatar-link-stub
+ class="gl-mr-3"
+ href="/root"
+ >
+ <gl-avatar-stub
+ alt="Root"
+ entityid="0"
+ entityname="root"
+ shape="circle"
+ size="[object Object]"
+ src="mock_path"
+ />
+ </gl-avatar-link-stub>
</div>
<div
diff --git a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
index 6881cb79740..f951cfd5cd9 100644
--- a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
+import { GlAvatar } from '@gitlab/ui';
import Vue from 'vue';
import Vuex from 'vuex';
import IssuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
-import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import { userDataMock } from 'jest/notes/mock_data';
Vue.use(Vuex);
@@ -56,14 +56,14 @@ describe('Issue placeholder note component', () => {
describe('avatar size', () => {
it.each`
- size | line | isOverviewTab
- ${40} | ${null} | ${false}
- ${24} | ${{ line_code: '123' }} | ${false}
- ${40} | ${{ line_code: '123' }} | ${true}
+ size | line | isOverviewTab
+ ${{ default: 24, md: 32 }} | ${null} | ${false}
+ ${24} | ${{ line_code: '123' }} | ${false}
+ ${{ default: 24, md: 32 }} | ${{ line_code: '123' }} | ${true}
`('renders avatar $size for $line and $isOverviewTab', ({ size, line, isOverviewTab }) => {
createComponent(false, { line, isOverviewTab });
- expect(wrapper.findComponent(UserAvatarLink).props('imgSize')).toBe(size);
+ expect(wrapper.findComponent(GlAvatar).props('size')).toEqual(size);
});
});
});
diff --git a/spec/frontend/vue_shared/components/page_size_selector_spec.js b/spec/frontend/vue_shared/components/page_size_selector_spec.js
new file mode 100644
index 00000000000..5ec0b863afd
--- /dev/null
+++ b/spec/frontend/vue_shared/components/page_size_selector_spec.js
@@ -0,0 +1,44 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import PageSizeSelector, { PAGE_SIZES } from '~/vue_shared/components/page_size_selector.vue';
+
+describe('Page size selector component', () => {
+ let wrapper;
+
+ const createWrapper = ({ pageSize = 20 } = {}) => {
+ wrapper = shallowMount(PageSizeSelector, {
+ propsData: { value: pageSize },
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each(PAGE_SIZES)('shows expected text in the dropdown button for page size %s', (pageSize) => {
+ createWrapper({ pageSize });
+
+ expect(findDropdown().props('text')).toBe(`Show ${pageSize} items`);
+ });
+
+ it('shows the expected dropdown items', () => {
+ createWrapper();
+
+ PAGE_SIZES.forEach((pageSize, index) => {
+ expect(findDropdownItems().at(index).text()).toBe(`Show ${pageSize} items`);
+ });
+ });
+
+ it('will emit the new page size when a dropdown item is clicked', () => {
+ createWrapper();
+
+ findDropdownItems().wrappers.forEach((itemWrapper, index) => {
+ itemWrapper.vm.$emit('click');
+
+ expect(wrapper.emitted('input')[index][0]).toBe(PAGE_SIZES[index]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index 8270ff31574..51a936c0509 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -195,7 +195,7 @@ describe('AlertManagementEmptyState', () => {
tabs.forEach((tab, i) => {
const status = ITEMS_STATUS_TABS[i].status.toLowerCase();
expect(tab.attributes('data-testid')).toContain(ITEMS_STATUS_TABS[i].status);
- expect(badges.at(i).text()).toContain(itemsCount[status]);
+ expect(badges.at(i).text()).toContain(itemsCount[status].toString());
});
});
});
diff --git a/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap b/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
index 8ff49271eb5..2ea8985b16a 100644
--- a/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
+++ b/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
@@ -42,6 +42,7 @@ exports[`RunnerAwsDeploymentsModal renders the modal 1`] = `
>
<gl-accordion-item-stub
class="gl-font-weight-normal"
+ headerclass=""
title="More Details"
titlevisible="Less Details"
>
@@ -76,6 +77,7 @@ exports[`RunnerAwsDeploymentsModal renders the modal 1`] = `
>
<gl-accordion-item-stub
class="gl-font-weight-normal"
+ headerclass=""
title="More Details"
titlevisible="Less Details"
>
@@ -110,6 +112,7 @@ exports[`RunnerAwsDeploymentsModal renders the modal 1`] = `
>
<gl-accordion-item-stub
class="gl-font-weight-normal"
+ headerclass=""
title="More Details"
titlevisible="Less Details"
>
@@ -144,6 +147,7 @@ exports[`RunnerAwsDeploymentsModal renders the modal 1`] = `
>
<gl-accordion-item-stub
class="gl-font-weight-normal"
+ headerclass=""
title="More Details"
titlevisible="Less Details"
>
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
index 7173abe1316..a38dcd626f4 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
@@ -79,7 +79,7 @@ describe('RunnerInstructionsModal component', () => {
}
};
- beforeEach(async () => {
+ beforeEach(() => {
runnerPlatformsHandler = jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms);
runnerSetupInstructionsHandler = jest.fn().mockResolvedValue(mockGraphqlInstructions);
});
@@ -259,11 +259,11 @@ describe('RunnerInstructionsModal component', () => {
});
describe('when apollo is loading', () => {
- beforeEach(() => {
+ it('should show a skeleton loader', async () => {
createComponent();
- });
+ await nextTick();
+ await nextTick();
- it('should show a skeleton loader', async () => {
expect(findSkeletonLoader().exists()).toBe(true);
expect(findGlLoadingIcon().exists()).toBe(false);
@@ -275,6 +275,8 @@ describe('RunnerInstructionsModal component', () => {
});
it('once loaded, should not show a loading state', async () => {
+ createComponent();
+
await waitForPromises();
expect(findSkeletonLoader().exists()).toBe(false);
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js
new file mode 100644
index 00000000000..3036ce43888
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js
@@ -0,0 +1,14 @@
+import packageJsonLinker from '~/vue_shared/components/source_viewer/plugins/utils/package_json_linker';
+import linkDependencies from '~/vue_shared/components/source_viewer/plugins/link_dependencies';
+import { PACKAGE_JSON_FILE_TYPE, PACKAGE_JSON_CONTENT } from './mock_data';
+
+jest.mock('~/vue_shared/components/source_viewer/plugins/utils/package_json_linker');
+
+describe('Highlight.js plugin for linking dependencies', () => {
+ const hljsResultMock = { value: 'test' };
+
+ it('calls packageJsonLinker for package_json file types', () => {
+ linkDependencies(hljsResultMock, PACKAGE_JSON_FILE_TYPE, PACKAGE_JSON_CONTENT);
+ expect(packageJsonLinker).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js b/spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js
new file mode 100644
index 00000000000..75659770e2c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js
@@ -0,0 +1,2 @@
+export const PACKAGE_JSON_FILE_TYPE = 'package_json';
+export const PACKAGE_JSON_CONTENT = '{ "dependencies": { "@babel/core": "^7.18.5" } }';
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
new file mode 100644
index 00000000000..ee200747af9
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
@@ -0,0 +1,33 @@
+import {
+ createLink,
+ generateHLJSOpenTag,
+} from '~/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util';
+
+describe('createLink', () => {
+ it('generates a link with the correct attributes', () => {
+ const href = 'http://test.com';
+ const innerText = 'testing';
+ const result = `<a href="${href}" rel="nofollow noreferrer noopener">${innerText}</a>`;
+
+ expect(createLink(href, innerText)).toBe(result);
+ });
+
+ it('escapes the user-controlled content', () => {
+ const unescapedXSS = '<script>XSS</script>';
+ const escapedXSS = '&amp;lt;script&amp;gt;XSS&amp;lt;/script&amp;gt;';
+ const href = `http://test.com/${unescapedXSS}`;
+ const innerText = `testing${unescapedXSS}`;
+ const result = `<a href="http://test.com/${escapedXSS}" rel="nofollow noreferrer noopener">testing${escapedXSS}</a>`;
+
+ expect(createLink(href, innerText)).toBe(result);
+ });
+});
+
+describe('generateHLJSOpenTag', () => {
+ it('generates an open tag with the correct selector', () => {
+ const type = 'string';
+ const result = `<span class="hljs-${type}">&quot;`;
+
+ expect(generateHLJSOpenTag(type)).toBe(result);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/utils/package_json_linker_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/package_json_linker_spec.js
new file mode 100644
index 00000000000..e83c129818c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/package_json_linker_spec.js
@@ -0,0 +1,15 @@
+import packageJsonLinker from '~/vue_shared/components/source_viewer/plugins/utils/package_json_linker';
+import { PACKAGE_JSON_CONTENT } from '../mock_data';
+
+describe('Highlight.js plugin for linking package.json dependencies', () => {
+ it('mutates the input value by wrapping dependency names and versions in anchors', () => {
+ const inputValue =
+ '<span class="hljs-attr">&quot;@babel/core&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;^7.18.5&quot;</span>';
+ const outputValue =
+ '<span class="hljs-attr">&quot;<a href="https://npmjs.com/package/@babel/core" rel="nofollow noreferrer noopener">@babel/core</a>&quot;</span>: <span class="hljs-attr">&quot;<a href="https://npmjs.com/package/@babel/core" rel="nofollow noreferrer noopener">^7.18.5</a>&quot;</span>';
+ const hljsResultMock = { value: inputValue };
+
+ const output = packageJsonLinker(hljsResultMock, PACKAGE_JSON_CONTENT);
+ expect(output).toBe(outputValue);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index bb0945a1f3e..2c03b7aa7d3 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -5,10 +5,16 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
-import { ROUGE_TO_HLJS_LANGUAGE_MAP } from '~/vue_shared/components/source_viewer/constants';
+import {
+ EVENT_ACTION,
+ EVENT_LABEL_VIEWER,
+ EVENT_LABEL_FALLBACK,
+ ROUGE_TO_HLJS_LANGUAGE_MAP,
+} from '~/vue_shared/components/source_viewer/constants';
import waitForPromises from 'helpers/wait_for_promises';
import LineHighlighter from '~/blob/line_highlighter';
import eventHub from '~/notes/event_hub';
+import Tracking from '~/tracking';
jest.mock('~/blob/line_highlighter');
jest.mock('highlight.js/lib/core');
@@ -34,7 +40,8 @@ describe('Source Viewer component', () => {
const chunk2 = generateContent('// Some source code 2', 70);
const content = chunk1 + chunk2;
const path = 'some/path.js';
- const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path };
+ const fileType = 'javascript';
+ const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path, fileType };
const highlightedContent = `<span data-testid='test-highlighted' id='LC1'>${content}</span><span id='LC2'></span>`;
const createComponent = async (blob = {}) => {
@@ -52,17 +59,38 @@ describe('Source Viewer component', () => {
hljs.highlightAuto.mockImplementation(() => ({ value: highlightedContent }));
jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
jest.spyOn(eventHub, '$emit');
+ jest.spyOn(Tracking, 'event');
return createComponent();
});
afterEach(() => wrapper.destroy());
+ describe('event tracking', () => {
+ it('fires a tracking event when the component is created', () => {
+ const eventData = { label: EVENT_LABEL_VIEWER, property: language };
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ });
+
+ it('does not emit an error event when the language is supported', () => {
+ expect(wrapper.emitted('error')).toBeUndefined();
+ });
+
+ it('fires a tracking event and emits an error when the language is not supported', () => {
+ const unsupportedLanguage = 'apex';
+ const eventData = { label: EVENT_LABEL_FALLBACK, property: unsupportedLanguage };
+ createComponent({ language: unsupportedLanguage });
+
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ expect(wrapper.emitted('error')).toHaveLength(1);
+ });
+ });
+
describe('highlight.js', () => {
beforeEach(() => createComponent({ language: mappedLanguage }));
it('registers our plugins for Highlight.js', () => {
- expect(registerPlugins).toHaveBeenCalledWith(hljs);
+ expect(registerPlugins).toHaveBeenCalledWith(hljs, fileType, content);
});
it('registers the language definition', async () => {
@@ -74,6 +102,13 @@ describe('Source Viewer component', () => {
);
});
+ it('registers json language definition if fileType is package_json', async () => {
+ await createComponent({ language: 'json', fileType: 'package_json' });
+ const languageDefinition = await import(`highlight.js/lib/languages/json`);
+
+ expect(hljs.registerLanguage).toHaveBeenCalledWith('json', languageDefinition.default);
+ });
+
it('highlights the first chunk', () => {
expect(hljs.highlight).toHaveBeenCalledWith(chunk1.trim(), { language: mappedLanguage });
});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index a54f3450633..9550368eefc 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -2,7 +2,6 @@ import { GlSkeletonLoader, GlIcon } from '@gitlab/ui';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { AVAILABILITY_STATUS } from '~/set_status_modal/utils';
-import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
import UserPopover from '~/vue_shared/components/user_popover/user_popover.vue';
import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
@@ -48,7 +47,6 @@ describe('User Popover Component', () => {
const findUserStatus = () => wrapper.findByTestId('user-popover-status');
const findTarget = () => document.querySelector('.js-user-link');
- const findUserName = () => wrapper.find(UserNameWithStatus);
const findSecurityBotDocsLink = () => wrapper.findByTestId('user-popover-bot-docs-link');
const findUserLocalTime = () => wrapper.findByTestId('user-popover-local-time');
const findToggleFollowButton = () => wrapper.findByTestId('toggle-follow-button');
@@ -245,9 +243,7 @@ describe('User Popover Component', () => {
createWrapper({ user });
- expect(findUserName().exists()).toBe(true);
- expect(wrapper.text()).toContain(user.name);
- expect(wrapper.text()).toContain('(Busy)');
+ expect(wrapper.findByText('(Busy)').exists()).toBe(true);
});
it('should hide the busy status for any other status', () => {
@@ -258,13 +254,32 @@ describe('User Popover Component', () => {
createWrapper({ user });
- expect(wrapper.text()).not.toContain('(Busy)');
+ expect(wrapper.findByText('(Busy)').exists()).toBe(false);
});
- it('passes `pronouns` prop to `UserNameWithStatus` component', () => {
+ it('shows pronouns when user has them set', () => {
createWrapper();
- expect(findUserName().props('pronouns')).toBe('they/them');
+ expect(wrapper.findByText('(they/them)').exists()).toBe(true);
+ });
+
+ describe.each`
+ pronouns
+ ${undefined}
+ ${null}
+ ${''}
+ ${' '}
+ `('when pronouns are set to $pronouns', ({ pronouns }) => {
+ it('does not render pronouns', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ pronouns,
+ };
+
+ createWrapper({ user });
+
+ expect(wrapper.findByTestId('user-popover-pronouns').exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 70017903079..80f14dffd08 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -39,6 +39,8 @@ describe('IssuableItem', () => {
const originalUrl = gon.gitlab_url;
let wrapper;
+ const findTimestampWrapper = () => wrapper.find('[data-testid="issuable-timestamp"]');
+
beforeEach(() => {
gon.gitlab_url = MOCK_GITLAB_URL;
});
@@ -150,12 +152,54 @@ describe('IssuableItem', () => {
});
});
- describe('updatedAt', () => {
- it('returns string containing timeago string based on `issuable.updatedAt`', () => {
+ describe('timestamp', () => {
+ it('returns timestamp based on `issuable.updatedAt` when the issue is open', () => {
wrapper = createComponent();
- expect(wrapper.vm.updatedAt).toContain('updated');
- expect(wrapper.vm.updatedAt).toContain('ago');
+ expect(findTimestampWrapper().attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
+ });
+
+ it('returns timestamp based on `issuable.closedAt` when the issue is closed', () => {
+ wrapper = createComponent({
+ issuable: { ...mockIssuable, closedAt: '2020-06-18T11:30:00Z', state: 'closed' },
+ });
+
+ expect(findTimestampWrapper().attributes('title')).toBe('Jun 18, 2020 11:30am UTC');
+ });
+
+ it('returns timestamp based on `issuable.updatedAt` when the issue is closed but `issuable.closedAt` is undefined', () => {
+ wrapper = createComponent({
+ issuable: { ...mockIssuable, closedAt: undefined, state: 'closed' },
+ });
+
+ expect(findTimestampWrapper().attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
+ });
+ });
+
+ describe('formattedTimestamp', () => {
+ it('returns timeago string based on `issuable.updatedAt` when the issue is open', () => {
+ wrapper = createComponent();
+
+ expect(findTimestampWrapper().text()).toContain('updated');
+ expect(findTimestampWrapper().text()).toContain('ago');
+ });
+
+ it('returns timeago string based on `issuable.closedAt` when the issue is closed', () => {
+ wrapper = createComponent({
+ issuable: { ...mockIssuable, closedAt: '2020-06-18T11:30:00Z', state: 'closed' },
+ });
+
+ expect(findTimestampWrapper().text()).toContain('closed');
+ expect(findTimestampWrapper().text()).toContain('ago');
+ });
+
+ it('returns timeago string based on `issuable.updatedAt` when the issue is closed but `issuable.closedAt` is undefined', () => {
+ wrapper = createComponent({
+ issuable: { ...mockIssuable, closedAt: undefined, state: 'closed' },
+ });
+
+ expect(findTimestampWrapper().text()).toContain('updated');
+ expect(findTimestampWrapper().text()).toContain('ago');
});
});
@@ -456,18 +500,31 @@ describe('IssuableItem', () => {
it('renders issuable updatedAt info', () => {
wrapper = createComponent();
- const updatedAtEl = wrapper.find('[data-testid="issuable-updated-at"]');
+ const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
- expect(updatedAtEl.attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
- expect(updatedAtEl.text()).toBe(wrapper.vm.updatedAt);
+ expect(timestampEl.attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
+ expect(timestampEl.text()).toBe(wrapper.vm.formattedTimestamp);
});
describe('when issuable is closed', () => {
it('renders issuable card with a closed style', () => {
- wrapper = createComponent({ issuable: { ...mockIssuable, closedAt: '2020-12-10' } });
+ wrapper = createComponent({
+ issuable: { ...mockIssuable, closedAt: '2020-12-10', state: 'closed' },
+ });
expect(wrapper.classes()).toContain('closed');
});
+
+ it('renders issuable closedAt info and does not render updatedAt info', () => {
+ wrapper = createComponent({
+ issuable: { ...mockIssuable, closedAt: '2022-06-18T11:30:00Z', state: 'closed' },
+ });
+
+ const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
+
+ expect(timestampEl.attributes('title')).toBe('Jun 18, 2022 11:30am UTC');
+ expect(timestampEl.text()).toBe(wrapper.vm.formattedTimestamp);
+ });
});
describe('when issuable was created within the past 24 hours', () => {
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
index 66f71c0b028..50e79dbe589 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
@@ -9,6 +9,7 @@ import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vu
import IssuableListRoot from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import IssuableTabs from '~/vue_shared/issuable/list/components/issuable_tabs.vue';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+import PageSizeSelector from '~/vue_shared/components/page_size_selector.vue';
import { mockIssuableListProps, mockIssuables } from '../mock_data';
@@ -44,6 +45,7 @@ describe('IssuableListRoot', () => {
const findIssuableItem = () => wrapper.findComponent(IssuableItem);
const findIssuableTabs = () => wrapper.findComponent(IssuableTabs);
const findVueDraggable = () => wrapper.findComponent(VueDraggable);
+ const findPageSizeSelector = () => wrapper.findComponent(PageSizeSelector);
afterEach(() => {
wrapper.destroy();
@@ -292,6 +294,7 @@ describe('IssuableListRoot', () => {
});
expect(findGlKeysetPagination().exists()).toBe(false);
+ expect(findPageSizeSelector().exists()).toBe(false);
expect(findGlPagination().props()).toMatchObject({
perPage: 20,
value: 1,
@@ -483,4 +486,24 @@ describe('IssuableListRoot', () => {
});
});
});
+
+ describe('page size selector', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: {
+ showPageSizeChangeControls: true,
+ },
+ });
+ });
+
+ it('has the page size change component', async () => {
+ expect(findPageSizeSelector().exists()).toBe(true);
+ });
+
+ it('emits "page-size-change" event when its input is changed', () => {
+ const pageSize = 123;
+ findPageSizeSelector().vm.$emit('input', pageSize);
+ expect(wrapper.emitted('page-size-change')).toEqual([[pageSize]]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
index 5aa67667033..6f62fb77353 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
@@ -70,7 +70,7 @@ describe('IssuableTitle', () => {
expect(titleEl.exists()).toBe(true);
expect(titleEl.html()).toBe(
- '<h1 dir="auto" data-testid="title" class="title qa-title gl-font-size-h-display"><b>Sample</b> title</h1>',
+ '<h1 dir="auto" data-qa-selector="title_content" data-testid="title" class="title gl-font-size-h-display"><b>Sample</b> title</h1>',
);
wrapperWithTitle.destroy();
diff --git a/spec/frontend/work_items/components/item_title_spec.js b/spec/frontend/work_items/components/item_title_spec.js
index 2c3f6ef8634..a55f448c9a2 100644
--- a/spec/frontend/work_items/components/item_title_spec.js
+++ b/spec/frontend/work_items/components/item_title_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import { escape } from 'lodash';
import ItemTitle from '~/work_items/components/item_title.vue';
jest.mock('lodash/escape', () => jest.fn((fn) => fn));
@@ -51,6 +50,5 @@ describe('ItemTitle', () => {
await findInputEl().trigger(sourceEvent);
expect(wrapper.emitted(eventName)).toBeTruthy();
- expect(escape).toHaveBeenCalledWith(mockUpdatedTitle);
});
});
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
index 0552fe5050e..299949a4baa 100644
--- a/spec/frontend/work_items/components/work_item_assignees_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -1,52 +1,90 @@
-import { GlLink, GlTokenSelector } from '@gitlab/ui';
-import { nextTick } from 'vue';
+import { GlLink, GlTokenSelector, GlSkeletonLoader } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import { stripTypenames } from 'helpers/graphql_helpers';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import userSearchQuery from '~/graphql_shared/queries/users_search.query.graphql';
+import currentUserQuery from '~/graphql_shared/queries/current_user.query.graphql';
+import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
-import localUpdateWorkItemMutation from '~/work_items/graphql/local_update_work_item.mutation.graphql';
-
-const mockAssignees = [
- {
- __typename: 'UserCore',
- id: 'gid://gitlab/User/1',
- avatarUrl: '',
- webUrl: '',
- name: 'John Doe',
- username: 'doe_I',
- },
- {
- __typename: 'UserCore',
- id: 'gid://gitlab/User/2',
- avatarUrl: '',
- webUrl: '',
- name: 'Marcus Rutherford',
- username: 'ruthfull',
- },
-];
+import { i18n, TASK_TYPE_NAME, TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import { temporaryConfig, resolvers } from '~/work_items/graphql/provider';
+import {
+ projectMembersResponseWithCurrentUser,
+ mockAssignees,
+ workItemQueryResponse,
+ currentUserResponse,
+ currentUserNullResponse,
+ projectMembersResponseWithoutCurrentUser,
+} from '../mock_data';
-const workItemId = 'gid://gitlab/WorkItem/1';
+Vue.use(VueApollo);
-const mutate = jest.fn();
+const workItemId = 'gid://gitlab/WorkItem/1';
+const dropdownItems = projectMembersResponseWithCurrentUser.data.workspace.users.nodes;
describe('WorkItemAssignees component', () => {
let wrapper;
const findAssigneeLinks = () => wrapper.findAllComponents(GlLink);
const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findEmptyState = () => wrapper.findByTestId('empty-state');
+ const findAssignSelfButton = () => wrapper.findByTestId('assign-self');
+ const findAssigneesTitle = () => wrapper.findByTestId('assignees-title');
+
+ const successSearchQueryHandler = jest
+ .fn()
+ .mockResolvedValue(projectMembersResponseWithCurrentUser);
+ const successCurrentUserQueryHandler = jest.fn().mockResolvedValue(currentUserResponse);
+ const noCurrentUserQueryHandler = jest.fn().mockResolvedValue(currentUserNullResponse);
+
+ const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
+
+ const createComponent = ({
+ assignees = mockAssignees,
+ searchQueryHandler = successSearchQueryHandler,
+ currentUserQueryHandler = successCurrentUserQueryHandler,
+ allowsMultipleAssignees = true,
+ canUpdate = true,
+ } = {}) => {
+ const apolloProvider = createMockApollo(
+ [
+ [userSearchQuery, searchQueryHandler],
+ [currentUserQuery, currentUserQueryHandler],
+ ],
+ resolvers,
+ {
+ typePolicies: temporaryConfig.cacheConfig.typePolicies,
+ },
+ );
+
+ apolloProvider.clients.defaultClient.writeQuery({
+ query: workItemQuery,
+ variables: {
+ id: workItemId,
+ },
+ data: workItemQueryResponse.data,
+ });
- const createComponent = ({ assignees = mockAssignees } = {}) => {
wrapper = mountExtended(WorkItemAssignees, {
+ provide: {
+ fullPath: 'test-project-path',
+ },
propsData: {
assignees,
workItemId,
- },
- mocks: {
- $apollo: {
- mutate,
- },
+ allowsMultipleAssignees,
+ workItemType: TASK_TYPE_NAME,
+ canUpdate,
},
attachTo: document.body,
+ apolloProvider,
});
};
@@ -54,39 +92,316 @@ describe('WorkItemAssignees component', () => {
wrapper.destroy();
});
- it('should pass the correct data-user-id attribute', () => {
+ it('passes the correct data-user-id attribute', () => {
createComponent();
expect(findAssigneeLinks().at(0).attributes('data-user-id')).toBe('1');
});
- describe('when there are assignees', () => {
+ it('container does not have shadow by default', () => {
+ createComponent();
+ expect(findTokenSelector().props('containerClass')).toBe('gl-shadow-none!');
+ });
+
+ it('container has shadow after focusing token selector', async () => {
+ createComponent();
+ findTokenSelector().vm.$emit('focus');
+ await nextTick();
+
+ expect(findTokenSelector().props('containerClass')).toBe('');
+ });
+
+ it('focuses token selector on token selector input event', async () => {
+ createComponent();
+ findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
+ await nextTick();
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTokenSelector().element.contains(document.activeElement)).toBe(true);
+ });
+
+ it('calls a mutation on clicking outside the token selector', async () => {
+ createComponent();
+ findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
+ findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+ await waitForPromises();
+
+ expect(findTokenSelector().props('selectedTokens')).toEqual([mockAssignees[0]]);
+ });
+
+ it('passes `false` to `viewOnly` token selector prop if user can update assignees', () => {
+ createComponent();
+
+ expect(findTokenSelector().props('viewOnly')).toBe(false);
+ });
+
+ it('passes `true` to `viewOnly` token selector prop if user can not update assignees', () => {
+ createComponent({ canUpdate: false });
+
+ expect(findTokenSelector().props('viewOnly')).toBe(true);
+ });
+
+ describe('when searching for users', () => {
beforeEach(() => {
createComponent();
});
- it('should focus token selector on token removal', async () => {
- findTokenSelector().vm.$emit('token-remove', mockAssignees[0].id);
+ it('does not start user search by default', () => {
+ expect(findTokenSelector().props('loading')).toBe(false);
+ expect(findTokenSelector().props('dropdownItems')).toEqual([]);
+ });
+
+ it('starts user search on hovering for more than 250ms', async () => {
+ findTokenSelector().trigger('mouseover');
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await nextTick();
- expect(findEmptyState().exists()).toBe(false);
- expect(findTokenSelector().element.contains(document.activeElement)).toBe(true);
+ expect(findTokenSelector().props('loading')).toBe(true);
});
- it('should call a mutation on clicking outside the token selector', async () => {
- findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
- findTokenSelector().vm.$emit('token-remove');
+ it('starts user search on focusing token selector', async () => {
+ findTokenSelector().vm.$emit('focus');
await nextTick();
- expect(mutate).not.toHaveBeenCalled();
- findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+ expect(findTokenSelector().props('loading')).toBe(true);
+ });
+
+ it('does not start searching if token-selector was hovered for less than 250ms', async () => {
+ findTokenSelector().trigger('mouseover');
+ jest.advanceTimersByTime(100);
+ await nextTick();
+
+ expect(findTokenSelector().props('loading')).toBe(false);
+ });
+
+ it('does not start searching if cursor was moved out from token selector before 250ms passed', async () => {
+ findTokenSelector().trigger('mouseover');
+ jest.advanceTimersByTime(100);
+
+ findTokenSelector().trigger('mouseout');
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ await nextTick();
+
+ expect(findTokenSelector().props('loading')).toBe(false);
+ });
+
+ it('shows skeleton loader on dropdown when loading users', async () => {
+ findTokenSelector().vm.$emit('focus');
await nextTick();
- expect(mutate).toHaveBeenCalledWith({
- mutation: localUpdateWorkItemMutation,
- variables: {
- input: { id: workItemId, assigneeIds: [mockAssignees[0].id] },
- },
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('shows correct users list in dropdown when loaded', async () => {
+ findTokenSelector().vm.$emit('focus');
+ await nextTick();
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+
+ await waitForPromises();
+
+ expect(findSkeletonLoader().exists()).toBe(false);
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(2);
+ });
+
+ it('should search for users with correct key after text input', async () => {
+ const searchKey = 'Hello';
+
+ findTokenSelector().vm.$emit('focus');
+ findTokenSelector().vm.$emit('text-input', searchKey);
+ await waitForPromises();
+
+ expect(successSearchQueryHandler).toHaveBeenCalledWith(
+ expect.objectContaining({ search: searchKey }),
+ );
+ });
+ });
+
+ it('emits error event if search users query fails', async () => {
+ createComponent({ searchQueryHandler: errorHandler });
+ findTokenSelector().vm.$emit('focus');
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[i18n.fetchError]]);
+ });
+
+ describe('when assigning to current user', () => {
+ it('does not show `Assign myself` button if current user is loading', () => {
+ createComponent();
+ findTokenSelector().trigger('mouseover');
+
+ expect(findAssignSelfButton().exists()).toBe(false);
+ });
+
+ it('does not show `Assign myself` button if work item has assignees', async () => {
+ createComponent();
+ await waitForPromises();
+ findTokenSelector().trigger('mouseover');
+
+ expect(findAssignSelfButton().exists()).toBe(false);
+ });
+
+ it('does now show `Assign myself` button if user is not logged in', async () => {
+ createComponent({ currentUserQueryHandler: noCurrentUserQueryHandler, assignees: [] });
+ await waitForPromises();
+ findTokenSelector().trigger('mouseover');
+
+ expect(findAssignSelfButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when user is logged in and there are no assignees', () => {
+ beforeEach(() => {
+ createComponent({ assignees: [] });
+ return waitForPromises();
+ });
+
+ it('renders `Assign myself` button', async () => {
+ findTokenSelector().trigger('mouseover');
+ expect(findAssignSelfButton().exists()).toBe(true);
+ });
+
+ it('calls update work item assignees mutation with current user as a variable on button click', () => {
+ // TODO: replace this test as soon as we have a real mutation implemented
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockImplementation(jest.fn());
+
+ findTokenSelector().trigger('mouseover');
+ findAssignSelfButton().vm.$emit('click', new MouseEvent('click'));
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith(
+ expect.objectContaining({
+ variables: {
+ input: {
+ assignees: [stripTypenames(currentUserResponse.data.currentUser)],
+ id: workItemId,
+ },
+ },
+ }),
+ );
+ });
+ });
+
+ it('moves current user to the top of dropdown items if user is a project member', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findTokenSelector().props('dropdownItems')[0]).toEqual(
+ expect.objectContaining({
+ ...stripTypenames(currentUserResponse.data.currentUser),
+ }),
+ );
+ });
+
+ describe('when current user is not in the list of project members', () => {
+ const searchQueryHandler = jest
+ .fn()
+ .mockResolvedValue(projectMembersResponseWithoutCurrentUser);
+
+ beforeEach(() => {
+ createComponent({ searchQueryHandler });
+ return waitForPromises();
+ });
+
+ it('adds current user to the top of dropdown items', () => {
+ expect(findTokenSelector().props('dropdownItems')[0]).toEqual(
+ stripTypenames(currentUserResponse.data.currentUser),
+ );
+ });
+
+ it('does not add current user if search is not empty', async () => {
+ findTokenSelector().vm.$emit('text-input', 'test');
+ await waitForPromises();
+
+ expect(findTokenSelector().props('dropdownItems')[0]).not.toEqual(
+ stripTypenames(currentUserResponse.data.currentUser),
+ );
+ });
+ });
+
+ it('has `Assignee` label when only one assignee is present', () => {
+ createComponent({ assignees: [mockAssignees[0]] });
+
+ expect(findAssigneesTitle().text()).toBe('Assignee');
+ });
+
+ it('has `Assignees` label if more than one assignee is present', () => {
+ createComponent();
+
+ expect(findAssigneesTitle().text()).toBe('Assignees');
+ });
+
+ describe('when multiple assignees are allowed', () => {
+ beforeEach(() => {
+ createComponent({ allowsMultipleAssignees: true, assignees: [] });
+ return waitForPromises();
+ });
+
+ it('has `Add assignees` text on placeholder', () => {
+ expect(findEmptyState().text()).toContain('Add assignees');
+ });
+
+ it('adds multiple assignees when token-selector provides multiple values', async () => {
+ findTokenSelector().vm.$emit('input', dropdownItems);
+ await nextTick();
+
+ expect(findTokenSelector().props('selectedTokens')).toHaveLength(2);
+ });
+ });
+
+ describe('when multiple assignees are not allowed', () => {
+ beforeEach(() => {
+ createComponent({ allowsMultipleAssignees: false, assignees: [] });
+ return waitForPromises();
+ });
+
+ it('has `Add assignee` text on placeholder', () => {
+ expect(findEmptyState().text()).toContain('Add assignee');
+ expect(findEmptyState().text()).not.toContain('Add assignees');
+ });
+
+ it('adds a single assignee token-selector provides multiple values', async () => {
+ findTokenSelector().vm.$emit('input', dropdownItems);
+ await nextTick();
+
+ expect(findTokenSelector().props('selectedTokens')).toHaveLength(1);
+ });
+
+ it('removes shadow after token-selector input', async () => {
+ findTokenSelector().vm.$emit('input', dropdownItems);
+ await nextTick();
+
+ expect(findTokenSelector().props('containerClass')).toBe('gl-shadow-none!');
+ });
+ });
+
+ describe('tracking', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ trackingSpy = null;
+ });
+
+ it('does not track updating assignees until token selector blur event', async () => {
+ findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
+ await waitForPromises();
+
+ expect(trackingSpy).not.toHaveBeenCalled();
+ });
+
+ it('tracks editing the assignees on token selector blur', async () => {
+ findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
+ findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+ await waitForPromises();
+
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_assignees', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_assignees',
+ property: 'type_Task',
});
});
});
diff --git a/spec/frontend/work_items/components/work_item_detail_modal_spec.js b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
index d55ba318e46..70b1261bdb7 100644
--- a/spec/frontend/work_items/components/work_item_detail_modal_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
@@ -66,6 +66,7 @@ describe('WorkItemDetailModal component', () => {
createComponent();
expect(findWorkItemDetail().props()).toEqual({
+ isModal: true,
workItemId: '1',
workItemParentId: '2',
});
@@ -98,6 +99,15 @@ describe('WorkItemDetailModal component', () => {
expect(wrapper.emitted('close')).toBeTruthy();
});
+ it('hides the modal when WorkItemDetail emits `close` event', () => {
+ createComponent();
+ const closeSpy = jest.spyOn(wrapper.vm.$refs.modal, 'hide');
+
+ findWorkItemDetail().vm.$emit('close');
+
+ expect(closeSpy).toHaveBeenCalled();
+ });
+
describe('delete work item', () => {
it('emits workItemDeleted and closes modal', async () => {
createComponent();
diff --git a/spec/frontend/work_items/components/work_item_information_spec.js b/spec/frontend/work_items/components/work_item_information_spec.js
new file mode 100644
index 00000000000..d5f6921c2bc
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_information_spec.js
@@ -0,0 +1,48 @@
+import { mount } from '@vue/test-utils';
+import { GlAlert, GlLink } from '@gitlab/ui';
+import WorkItemInformation from '~/work_items/components/work_item_information.vue';
+import { helpPagePath } from '~/helpers/help_page_helper';
+
+const createComponent = () => mount(WorkItemInformation);
+
+describe('Work item information alert', () => {
+ let wrapper;
+ const tasksHelpPath = helpPagePath('user/tasks');
+ const workItemsHelpPath = helpPagePath('development/work_items');
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findHelpLink = () => wrapper.findComponent(GlLink);
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should be visible', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('should emit `work-item-banner-dismissed` event when cross icon is clicked', () => {
+ findAlert().vm.$emit('dismiss');
+ expect(wrapper.emitted('work-item-banner-dismissed').length).toBe(1);
+ });
+
+ it('the alert variant should be tip', () => {
+ expect(findAlert().props('variant')).toBe('tip');
+ });
+
+ it('should have the correct text for primary button and link', () => {
+ expect(findAlert().props('title')).toBe(WorkItemInformation.i18n.tasksInformationTitle);
+ expect(findAlert().props('primaryButtonText')).toBe(
+ WorkItemInformation.i18n.learnTasksButtonText,
+ );
+ expect(findAlert().props('primaryButtonLink')).toBe(tasksHelpPath);
+ });
+
+ it('should have the correct link to work item link', () => {
+ expect(findHelpLink().exists()).toBe(true);
+ expect(findHelpLink().attributes('href')).toBe(workItemsHelpPath);
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_labels_spec.js b/spec/frontend/work_items/components/work_item_labels_spec.js
new file mode 100644
index 00000000000..1734b901d1a
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_labels_spec.js
@@ -0,0 +1,171 @@
+import { GlTokenSelector, GlSkeletonLoader } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import labelSearchQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/project_labels.query.graphql';
+import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
+import { i18n } from '~/work_items/constants';
+import { temporaryConfig, resolvers } from '~/work_items/graphql/provider';
+import { projectLabelsResponse, mockLabels, workItemQueryResponse } from '../mock_data';
+
+Vue.use(VueApollo);
+
+const workItemId = 'gid://gitlab/WorkItem/1';
+
+describe('WorkItemLabels component', () => {
+ let wrapper;
+
+ const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+
+ const findEmptyState = () => wrapper.findByTestId('empty-state');
+
+ const successSearchQueryHandler = jest.fn().mockResolvedValue(projectLabelsResponse);
+ const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
+
+ const createComponent = ({
+ labels = mockLabels,
+ canUpdate = true,
+ searchQueryHandler = successSearchQueryHandler,
+ } = {}) => {
+ const apolloProvider = createMockApollo([[labelSearchQuery, searchQueryHandler]], resolvers, {
+ typePolicies: temporaryConfig.cacheConfig.typePolicies,
+ });
+
+ apolloProvider.clients.defaultClient.writeQuery({
+ query: workItemQuery,
+ variables: {
+ id: workItemId,
+ },
+ data: workItemQueryResponse.data,
+ });
+
+ wrapper = mountExtended(WorkItemLabels, {
+ provide: {
+ fullPath: 'test-project-path',
+ },
+ propsData: {
+ labels,
+ workItemId,
+ canUpdate,
+ },
+ attachTo: document.body,
+ apolloProvider,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('focuses token selector on token selector input event', async () => {
+ createComponent();
+ findTokenSelector().vm.$emit('input', [mockLabels[0]]);
+ await nextTick();
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTokenSelector().element.contains(document.activeElement)).toBe(true);
+ });
+
+ it('does not start search by default', () => {
+ createComponent();
+
+ expect(findTokenSelector().props('loading')).toBe(false);
+ expect(findTokenSelector().props('dropdownItems')).toEqual([]);
+ });
+
+ it('starts search on hovering for more than 250ms', async () => {
+ createComponent();
+ findTokenSelector().trigger('mouseover');
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ await nextTick();
+
+ expect(findTokenSelector().props('loading')).toBe(true);
+ });
+
+ it('starts search on focusing token selector', async () => {
+ createComponent();
+ findTokenSelector().vm.$emit('focus');
+ await nextTick();
+
+ expect(findTokenSelector().props('loading')).toBe(true);
+ });
+
+ it('does not start searching if token-selector was hovered for less than 250ms', async () => {
+ createComponent();
+ findTokenSelector().trigger('mouseover');
+ jest.advanceTimersByTime(100);
+ await nextTick();
+
+ expect(findTokenSelector().props('loading')).toBe(false);
+ });
+
+ it('does not start searching if cursor was moved out from token selector before 250ms passed', async () => {
+ createComponent();
+ findTokenSelector().trigger('mouseover');
+ jest.advanceTimersByTime(100);
+
+ findTokenSelector().trigger('mouseout');
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ await nextTick();
+
+ expect(findTokenSelector().props('loading')).toBe(false);
+ });
+
+ it('shows skeleton loader on dropdown when loading', async () => {
+ createComponent();
+ findTokenSelector().vm.$emit('focus');
+ await nextTick();
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('shows list in dropdown when loaded', async () => {
+ createComponent();
+ findTokenSelector().vm.$emit('focus');
+ await nextTick();
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+
+ await waitForPromises();
+
+ expect(findSkeletonLoader().exists()).toBe(false);
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(2);
+ });
+
+ it.each([true, false])(
+ 'passes canUpdate=%s prop to view-only of token-selector',
+ async (canUpdate) => {
+ createComponent({ canUpdate });
+
+ await waitForPromises();
+
+ expect(findTokenSelector().props('viewOnly')).toBe(!canUpdate);
+ },
+ );
+
+ it('emits error event if search query fails', async () => {
+ createComponent({ searchQueryHandler: errorHandler });
+ findTokenSelector().vm.$emit('focus');
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[i18n.fetchError]]);
+ });
+
+ it('should search for with correct key after text input', async () => {
+ const searchKey = 'Hello';
+
+ createComponent();
+ findTokenSelector().vm.$emit('focus');
+ findTokenSelector().vm.$emit('text-input', searchKey);
+ await waitForPromises();
+
+ expect(successSearchQueryHandler).toHaveBeenCalledWith(
+ expect.objectContaining({ search: searchKey }),
+ );
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
new file mode 100644
index 00000000000..93bf7286aa7
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -0,0 +1,65 @@
+import Vue from 'vue';
+import { GlForm, GlFormCombobox } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import WorkItemLinksForm from '~/work_items/components/work_item_links/work_item_links_form.vue';
+import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import { availableWorkItemsResponse, updateWorkItemMutationResponse } from '../../mock_data';
+
+Vue.use(VueApollo);
+
+describe('WorkItemLinksForm', () => {
+ let wrapper;
+
+ const updateMutationResolver = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
+
+ const createComponent = async ({ listResponse = availableWorkItemsResponse } = {}) => {
+ wrapper = shallowMountExtended(WorkItemLinksForm, {
+ apolloProvider: createMockApollo([
+ [projectWorkItemsQuery, jest.fn().mockResolvedValue(listResponse)],
+ [updateWorkItemMutation, updateMutationResolver],
+ ]),
+ propsData: { issuableGid: 'gid://gitlab/WorkItem/1' },
+ provide: {
+ projectPath: 'project/path',
+ },
+ });
+
+ await waitForPromises();
+ };
+
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findCombobox = () => wrapper.findComponent(GlFormCombobox);
+ const findAddChildButton = () => wrapper.findByTestId('add-child-button');
+
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders form', () => {
+ expect(findForm().exists()).toBe(true);
+ });
+
+ it('passes available work items as prop when typing in combobox', async () => {
+ findCombobox().vm.$emit('input', 'Task');
+ await waitForPromises();
+
+ expect(findCombobox().exists()).toBe(true);
+ expect(findCombobox().props('tokenList').length).toBe(2);
+ });
+
+ it('selects and add child', async () => {
+ findCombobox().vm.$emit('input', availableWorkItemsResponse.data.workspace.workItems.edges[0]);
+
+ findAddChildButton().vm.$emit('click');
+ await waitForPromises();
+ expect(updateMutationResolver).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
new file mode 100644
index 00000000000..f8471b7f167
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
@@ -0,0 +1,141 @@
+import Vue from 'vue';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { cloneDeep } from 'lodash';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import WorkItemLinksMenu from '~/work_items/components/work_item_links/work_item_links_menu.vue';
+import changeWorkItemParentMutation from '~/work_items/graphql/change_work_item_parent_link.mutation.graphql';
+import getWorkItemLinksQuery from '~/work_items/graphql/work_item_links.query.graphql';
+import { WIDGET_TYPE_HIERARCHY } from '~/work_items/constants';
+import { workItemHierarchyResponse, changeWorkItemParentMutationResponse } from '../../mock_data';
+
+Vue.use(VueApollo);
+
+const PARENT_ID = 'gid://gitlab/WorkItem/1';
+const WORK_ITEM_ID = 'gid://gitlab/WorkItem/3';
+
+describe('WorkItemLinksMenu', () => {
+ let wrapper;
+ let mockApollo;
+
+ const $toast = {
+ show: jest.fn(),
+ };
+
+ const createComponent = async ({
+ data = {},
+ mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse),
+ } = {}) => {
+ mockApollo = createMockApollo([
+ [getWorkItemLinksQuery, jest.fn().mockResolvedValue(workItemHierarchyResponse)],
+ [changeWorkItemParentMutation, mutationHandler],
+ ]);
+
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: getWorkItemLinksQuery,
+ variables: {
+ id: PARENT_ID,
+ },
+ data: workItemHierarchyResponse.data,
+ });
+
+ wrapper = shallowMountExtended(WorkItemLinksMenu, {
+ data() {
+ return {
+ ...data,
+ };
+ },
+ propsData: {
+ workItemId: WORK_ITEM_ID,
+ parentWorkItemId: PARENT_ID,
+ },
+ apolloProvider: mockApollo,
+ mocks: {
+ $toast,
+ },
+ });
+
+ await waitForPromises();
+ };
+
+ const findDropdown = () => wrapper.find(GlDropdown);
+ const findRemoveDropdownItem = () => wrapper.find(GlDropdownItem);
+
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mockApollo = null;
+ });
+
+ it('renders dropdown and dropdown items', () => {
+ expect(findDropdown().exists()).toBe(true);
+ expect(findRemoveDropdownItem().exists()).toBe(true);
+ });
+
+ it('calls correct mutation with correct variables', async () => {
+ const mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse);
+
+ createComponent({ mutationHandler });
+
+ findRemoveDropdownItem().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect(mutationHandler).toHaveBeenCalledWith({
+ id: WORK_ITEM_ID,
+ parentId: null,
+ });
+ });
+
+ it('shows toast when mutation succeeds', async () => {
+ const mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse);
+
+ createComponent({ mutationHandler });
+
+ findRemoveDropdownItem().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect($toast.show).toHaveBeenCalledWith('Child removed', {
+ action: { onClick: expect.anything(), text: 'Undo' },
+ });
+ });
+
+ it('updates the cache when mutation succeeds', async () => {
+ const mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse);
+
+ createComponent({ mutationHandler });
+
+ mockApollo.clients.defaultClient.cache.readQuery = jest.fn(
+ () => workItemHierarchyResponse.data,
+ );
+
+ mockApollo.clients.defaultClient.cache.writeQuery = jest.fn();
+
+ findRemoveDropdownItem().vm.$emit('click');
+
+ await waitForPromises();
+
+ // Remove the work item from parent's children
+ const resp = cloneDeep(workItemHierarchyResponse);
+ const index = resp.data.workItem.widgets
+ .find((widget) => widget.type === WIDGET_TYPE_HIERARCHY)
+ .children.nodes.findIndex((child) => child.id === WORK_ITEM_ID);
+ resp.data.workItem.widgets
+ .find((widget) => widget.type === WIDGET_TYPE_HIERARCHY)
+ .children.nodes.splice(index, 1);
+
+ expect(mockApollo.clients.defaultClient.cache.writeQuery).toHaveBeenCalledWith(
+ expect.objectContaining({
+ query: expect.anything(),
+ variables: { id: PARENT_ID },
+ data: resp.data,
+ }),
+ );
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
index 774e9198992..2ec9b1ec0ac 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
@@ -51,6 +51,20 @@ describe('WorkItemLinks', () => {
expect(findLinksBody().exists()).toBe(false);
});
+ describe('add link form', () => {
+ it('displays form on click add button and hides form on cancel', async () => {
+ findToggleAddFormButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findAddLinksForm().exists()).toBe(true);
+
+ findAddLinksForm().vm.$emit('cancel');
+ await nextTick();
+
+ expect(findAddLinksForm().exists()).toBe(false);
+ });
+ });
+
describe('when no child links', () => {
beforeEach(async () => {
await createComponent({ response: workItemHierarchyEmptyResponse });
@@ -59,22 +73,6 @@ describe('WorkItemLinks', () => {
it('displays empty state if there are no children', () => {
expect(findEmptyState().exists()).toBe(true);
});
-
- describe('add link form', () => {
- it('displays form on click add button and hides form on cancel', async () => {
- expect(findEmptyState().exists()).toBe(true);
-
- findToggleAddFormButton().vm.$emit('click');
- await nextTick();
-
- expect(findAddLinksForm().exists()).toBe(true);
-
- findAddLinksForm().vm.$emit('cancel');
- await nextTick();
-
- expect(findAddLinksForm().exists()).toBe(false);
- });
- });
});
it('renders all hierarchy widget children', () => {
diff --git a/spec/frontend/work_items/components/work_item_weight_spec.js b/spec/frontend/work_items/components/work_item_weight_spec.js
index 80a1d032ad7..c3bbea26cda 100644
--- a/spec/frontend/work_items/components/work_item_weight_spec.js
+++ b/spec/frontend/work_items/components/work_item_weight_spec.js
@@ -1,21 +1,51 @@
-import { shallowMount } from '@vue/test-utils';
+import { GlForm, GlFormInput } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { mockTracking } from 'helpers/tracking_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { __ } from '~/locale';
import WorkItemWeight from '~/work_items/components/work_item_weight.vue';
+import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import localUpdateWorkItemMutation from '~/work_items/graphql/local_update_work_item.mutation.graphql';
-describe('WorkItemAssignees component', () => {
+describe('WorkItemWeight component', () => {
let wrapper;
- const createComponent = ({ weight, hasIssueWeightsFeature = true } = {}) => {
- wrapper = shallowMount(WorkItemWeight, {
+ const mutateSpy = jest.fn();
+ const workItemId = 'gid://gitlab/WorkItem/1';
+ const workItemType = 'Task';
+
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findInput = () => wrapper.findComponent(GlFormInput);
+
+ const createComponent = ({
+ canUpdate = false,
+ hasIssueWeightsFeature = true,
+ isEditing = false,
+ weight,
+ } = {}) => {
+ wrapper = mountExtended(WorkItemWeight, {
propsData: {
+ canUpdate,
weight,
+ workItemId,
+ workItemType,
},
provide: {
hasIssueWeightsFeature,
},
+ mocks: {
+ $apollo: {
+ mutate: mutateSpy,
+ },
+ },
});
+
+ if (isEditing) {
+ findInput().vm.$emit('focus');
+ }
};
- describe('weight licensed feature', () => {
+ describe('`issue_weights` licensed feature', () => {
describe.each`
description | hasIssueWeightsFeature | exists
${'when available'} | ${true} | ${true}
@@ -24,23 +54,111 @@ describe('WorkItemAssignees component', () => {
it(hasIssueWeightsFeature ? 'renders component' : 'does not render component', () => {
createComponent({ hasIssueWeightsFeature });
- expect(wrapper.find('div').exists()).toBe(exists);
+ expect(findForm().exists()).toBe(exists);
});
});
});
- describe('weight text', () => {
- describe.each`
- description | weight | text
- ${'renders 1'} | ${1} | ${'1'}
- ${'renders 0'} | ${0} | ${'0'}
- ${'renders None'} | ${null} | ${'None'}
- ${'renders None'} | ${undefined} | ${'None'}
- `('when weight is $weight', ({ description, weight, text }) => {
- it(description, () => {
- createComponent({ weight });
-
- expect(wrapper.text()).toContain(text);
+ describe('weight input', () => {
+ it('has "Weight" label', () => {
+ createComponent();
+
+ expect(wrapper.findByLabelText(__('Weight')).exists()).toBe(true);
+ });
+
+ describe('placeholder attribute', () => {
+ describe.each`
+ description | isEditing | canUpdate | value
+ ${'when not editing and cannot update'} | ${false} | ${false} | ${__('None')}
+ ${'when editing and cannot update'} | ${true} | ${false} | ${__('None')}
+ ${'when not editing and can update'} | ${false} | ${true} | ${__('None')}
+ ${'when editing and can update'} | ${true} | ${true} | ${__('Enter a number')}
+ `('$description', ({ isEditing, canUpdate, value }) => {
+ it(`has a value of "${value}"`, async () => {
+ createComponent({ canUpdate, isEditing });
+ await nextTick();
+
+ expect(findInput().attributes('placeholder')).toBe(value);
+ });
+ });
+ });
+
+ describe('readonly attribute', () => {
+ describe.each`
+ description | canUpdate | value
+ ${'when cannot update'} | ${false} | ${'readonly'}
+ ${'when can update'} | ${true} | ${undefined}
+ `('$description', ({ canUpdate, value }) => {
+ it(`renders readonly=${value}`, () => {
+ createComponent({ canUpdate });
+
+ expect(findInput().attributes('readonly')).toBe(value);
+ });
+ });
+ });
+
+ describe('type attribute', () => {
+ describe.each`
+ description | isEditing | canUpdate | type
+ ${'when not editing and cannot update'} | ${false} | ${false} | ${'text'}
+ ${'when editing and cannot update'} | ${true} | ${false} | ${'text'}
+ ${'when not editing and can update'} | ${false} | ${true} | ${'text'}
+ ${'when editing and can update'} | ${true} | ${true} | ${'number'}
+ `('$description', ({ isEditing, canUpdate, type }) => {
+ it(`has a value of "${type}"`, async () => {
+ createComponent({ canUpdate, isEditing });
+ await nextTick();
+
+ expect(findInput().attributes('type')).toBe(type);
+ });
+ });
+ });
+
+ describe('value attribute', () => {
+ describe.each`
+ weight | value
+ ${1} | ${'1'}
+ ${0} | ${'0'}
+ ${null} | ${''}
+ ${undefined} | ${''}
+ `('when `weight` prop is "$weight"', ({ weight, value }) => {
+ it(`value is "${value}"`, () => {
+ createComponent({ weight });
+
+ expect(findInput().element.value).toBe(value);
+ });
+ });
+ });
+
+ describe('when blurred', () => {
+ it('calls a mutation to update the weight', () => {
+ const weight = 0;
+ createComponent({ isEditing: true, weight });
+
+ findInput().trigger('blur');
+
+ expect(mutateSpy).toHaveBeenCalledWith({
+ mutation: localUpdateWorkItemMutation,
+ variables: {
+ input: {
+ id: workItemId,
+ weight,
+ },
+ },
+ });
+ });
+
+ it('tracks updating the weight', () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ createComponent();
+
+ findInput().trigger('blur');
+
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_weight', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_weight',
+ property: 'type_Task',
+ });
});
});
});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index bf3f4e1364d..0359caf7116 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -1,3 +1,22 @@
+export const mockAssignees = [
+ {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ avatarUrl: '',
+ webUrl: '',
+ name: 'John Doe',
+ username: 'doe_I',
+ },
+ {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/2',
+ avatarUrl: '',
+ webUrl: '',
+ name: 'Marcus Rutherford',
+ username: 'ruthfull',
+ },
+];
+
export const workItemQueryResponse = {
data: {
workItem: {
@@ -23,6 +42,32 @@ export const workItemQueryResponse = {
descriptionHtml:
'<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
},
+ {
+ __typename: 'WorkItemWidgetAssignees',
+ type: 'ASSIGNEES',
+ allowsMultipleAssignees: true,
+ assignees: {
+ nodes: mockAssignees,
+ },
+ },
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ type: 'HIERARCHY',
+ parent: {
+ id: 'gid://gitlab/Issue/1',
+ iid: '5',
+ title: 'Parent title',
+ },
+ children: {
+ edges: [
+ {
+ node: {
+ id: 'gid://gitlab/WorkItem/444',
+ },
+ },
+ ],
+ },
+ },
],
},
},
@@ -47,13 +92,28 @@ export const updateWorkItemMutationResponse = {
deleteWorkItem: false,
updateWorkItem: false,
},
- widgets: [],
+ widgets: [
+ {
+ children: {
+ edges: [
+ {
+ node: 'gid://gitlab/WorkItem/444',
+ },
+ ],
+ },
+ },
+ ],
},
},
},
};
-export const workItemResponseFactory = ({ canUpdate } = {}) => ({
+export const workItemResponseFactory = ({
+ canUpdate = false,
+ allowsMultipleAssignees = true,
+ assigneesWidgetPresent = true,
+ parent = null,
+} = {}) => ({
data: {
workItem: {
__typename: 'WorkItem',
@@ -78,6 +138,30 @@ export const workItemResponseFactory = ({ canUpdate } = {}) => ({
descriptionHtml:
'<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
},
+ assigneesWidgetPresent
+ ? {
+ __typename: 'WorkItemWidgetAssignees',
+ type: 'ASSIGNEES',
+ allowsMultipleAssignees,
+ assignees: {
+ nodes: mockAssignees,
+ },
+ }
+ : { type: 'MOCK TYPE' },
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ type: 'HIERARCHY',
+ children: {
+ edges: [
+ {
+ node: {
+ id: 'gid://gitlab/WorkItem/444',
+ },
+ },
+ ],
+ },
+ parent,
+ },
],
},
},
@@ -140,13 +224,45 @@ export const createWorkItemFromTaskMutationResponse = {
__typename: 'WorkItemCreateFromTaskPayload',
errors: [],
workItem: {
- descriptionHtml: '<p>New description</p>',
- id: 'gid://gitlab/WorkItem/13',
__typename: 'WorkItem',
+ description: 'New description',
+ id: 'gid://gitlab/WorkItem/1',
+ title: 'Updated title',
+ state: 'OPEN',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Task',
+ },
userPermissions: {
deleteWorkItem: false,
updateWorkItem: false,
},
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetDescription',
+ type: 'DESCRIPTION',
+ description: 'New description',
+ descriptionHtml: '<p>New description</p>',
+ },
+ ],
+ },
+ newWorkItem: {
+ __typename: 'WorkItem',
+ id: 'gid://gitlab/WorkItem/1000000',
+ title: 'Updated title',
+ state: 'OPEN',
+ description: '',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Task',
+ },
+ userPermissions: {
+ deleteWorkItem: false,
+ updateWorkItem: false,
+ },
+ widgets: [],
},
},
},
@@ -275,3 +391,171 @@ export const workItemHierarchyResponse = {
},
},
};
+
+export const changeWorkItemParentMutationResponse = {
+ data: {
+ workItemUpdate: {
+ workItem: {
+ id: 'gid://gitlab/WorkItem/2',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ __typename: 'WorkItemType',
+ },
+ title: 'Foo',
+ state: 'OPEN',
+ __typename: 'WorkItem',
+ },
+ errors: [],
+ __typename: 'WorkItemUpdatePayload',
+ },
+ },
+};
+
+export const availableWorkItemsResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ edges: [
+ {
+ node: {
+ id: 'gid://gitlab/WorkItem/458',
+ title: 'Task 1',
+ state: 'OPEN',
+ },
+ },
+ {
+ node: {
+ id: 'gid://gitlab/WorkItem/459',
+ title: 'Task 2',
+ state: 'OPEN',
+ },
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const projectMembersResponseWithCurrentUser = {
+ data: {
+ workspace: {
+ id: '1',
+ __typename: 'Project',
+ users: {
+ nodes: [
+ {
+ id: 'user-2',
+ user: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/5',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+ },
+ },
+ {
+ id: 'user-1',
+ user: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const projectMembersResponseWithoutCurrentUser = {
+ data: {
+ workspace: {
+ id: '1',
+ __typename: 'Project',
+ users: {
+ nodes: [
+ {
+ id: 'user-2',
+ user: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/5',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+ },
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const currentUserResponse = {
+ data: {
+ currentUser: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ },
+ },
+};
+
+export const currentUserNullResponse = {
+ data: {
+ currentUser: null,
+ },
+};
+
+export const mockLabels = [
+ {
+ __typename: 'Label',
+ id: 'gid://gitlab/Label/1',
+ title: 'Label 1',
+ description: '',
+ color: '#f00',
+ textColor: '#00f',
+ },
+ {
+ __typename: 'Label',
+ id: 'gid://gitlab/Label/2',
+ title: 'Label 2',
+ description: '',
+ color: '#b00',
+ textColor: '#00b',
+ },
+];
+
+export const projectLabelsResponse = {
+ data: {
+ workspace: {
+ id: '1',
+ __typename: 'Project',
+ labels: {
+ nodes: mockLabels,
+ },
+ },
+ },
+};
+
+export const mockParent = {
+ parent: {
+ id: 'gid://gitlab/Issue/1',
+ iid: '5',
+ title: 'Parent title',
+ },
+};
diff --git a/spec/frontend/work_items/pages/create_work_item_spec.js b/spec/frontend/work_items/pages/create_work_item_spec.js
index e89477ed599..fed8be3783a 100644
--- a/spec/frontend/work_items/pages/create_work_item_spec.js
+++ b/spec/frontend/work_items/pages/create_work_item_spec.js
@@ -9,11 +9,7 @@ import ItemTitle from '~/work_items/components/item_title.vue';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
import createWorkItemMutation from '~/work_items/graphql/create_work_item.mutation.graphql';
import createWorkItemFromTaskMutation from '~/work_items/graphql/create_work_item_from_task.mutation.graphql';
-import {
- projectWorkItemTypesQueryResponse,
- createWorkItemMutationResponse,
- createWorkItemFromTaskMutationResponse,
-} from '../mock_data';
+import { projectWorkItemTypesQueryResponse, createWorkItemMutationResponse } from '../mock_data';
jest.mock('~/lib/utils/uuids', () => ({ uuids: () => ['testuuid'] }));
@@ -25,9 +21,6 @@ describe('Create work item component', () => {
const querySuccessHandler = jest.fn().mockResolvedValue(projectWorkItemTypesQueryResponse);
const createWorkItemSuccessHandler = jest.fn().mockResolvedValue(createWorkItemMutationResponse);
- const createWorkItemFromTaskSuccessHandler = jest
- .fn()
- .mockResolvedValue(createWorkItemFromTaskMutationResponse);
const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
const findAlert = () => wrapper.findComponent(GlAlert);
@@ -122,49 +115,6 @@ describe('Create work item component', () => {
});
});
- describe('when displayed in a modal', () => {
- beforeEach(() => {
- createComponent({
- props: {
- isModal: true,
- },
- mutationHandler: createWorkItemFromTaskSuccessHandler,
- });
- });
-
- it('emits `closeModal` event on Cancel button click', () => {
- findCancelButton().vm.$emit('click');
-
- expect(wrapper.emitted('closeModal')).toEqual([[]]);
- });
-
- it('emits `onCreate` on successful mutation', async () => {
- findTitleInput().vm.$emit('title-input', 'Test title');
-
- wrapper.find('form').trigger('submit');
- await waitForPromises();
-
- expect(wrapper.emitted('onCreate')).toEqual([['<p>New description</p>']]);
- });
-
- it('does not right margin for create button', () => {
- expect(findCreateButton().classes()).not.toContain('gl-mr-3');
- });
-
- it('adds right margin for cancel button', () => {
- expect(findCancelButton().classes()).toContain('gl-mr-3');
- });
-
- it('adds padding for content', () => {
- expect(findContent().classes('gl-px-5')).toBe(true);
- });
-
- it('defaults type to `Task`', async () => {
- await waitForPromises();
- expect(findSelect().attributes('value')).toBe('gid://gitlab/WorkItems::Type/3');
- });
- });
-
it('displays a loading icon inside dropdown when work items query is loading', () => {
createComponent();
diff --git a/spec/frontend/work_items/pages/work_item_detail_spec.js b/spec/frontend/work_items/pages/work_item_detail_spec.js
index b9724034cb4..43869468ad0 100644
--- a/spec/frontend/work_items/pages/work_item_detail_spec.js
+++ b/spec/frontend/work_items/pages/work_item_detail_spec.js
@@ -1,26 +1,36 @@
-import { GlAlert, GlSkeletonLoader } from '@gitlab/ui';
+import { GlAlert, GlSkeletonLoader, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
import WorkItemDescription from '~/work_items/components/work_item_description.vue';
import WorkItemState from '~/work_items/components/work_item_state.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
+import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import WorkItemWeight from '~/work_items/components/work_item_weight.vue';
+import WorkItemInformation from '~/work_items/components/work_item_information.vue';
import { i18n } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemTitleSubscription from '~/work_items/graphql/work_item_title.subscription.graphql';
import { temporaryConfig } from '~/work_items/graphql/provider';
-import { workItemTitleSubscriptionResponse, workItemQueryResponse } from '../mock_data';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import {
+ workItemTitleSubscriptionResponse,
+ workItemResponseFactory,
+ mockParent,
+} from '../mock_data';
describe('WorkItemDetail component', () => {
let wrapper;
+ useLocalStorageSpy();
Vue.use(VueApollo);
+ const workItemQueryResponse = workItemResponseFactory();
const successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
const initialSubscriptionHandler = jest.fn().mockResolvedValue(workItemTitleSubscriptionResponse);
@@ -30,9 +40,17 @@ describe('WorkItemDetail component', () => {
const findWorkItemState = () => wrapper.findComponent(WorkItemState);
const findWorkItemDescription = () => wrapper.findComponent(WorkItemDescription);
const findWorkItemAssignees = () => wrapper.findComponent(WorkItemAssignees);
+ const findWorkItemLabels = () => wrapper.findComponent(WorkItemLabels);
const findWorkItemWeight = () => wrapper.findComponent(WorkItemWeight);
+ const findParent = () => wrapper.find('[data-testid="work-item-parent"]');
+ const findParentButton = () => findParent().findComponent(GlButton);
+ const findCloseButton = () => wrapper.find('[data-testid="work-item-close"]');
+ const findWorkItemType = () => wrapper.find('[data-testid="work-item-type"]');
+ const findWorkItemInformationAlert = () => wrapper.findComponent(WorkItemInformation);
+ const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
const createComponent = ({
+ isModal = false,
workItemId = workItemQueryResponse.data.workItem.id,
handler = successHandler,
subscriptionHandler = initialSubscriptionHandler,
@@ -50,7 +68,7 @@ describe('WorkItemDetail component', () => {
typePolicies: includeWidgets ? temporaryConfig.cacheConfig.typePolicies : {},
},
),
- propsData: { workItemId },
+ propsData: { isModal, workItemId },
provide: {
glFeatures: {
workItemsMvc2: workItemsMvc2Enabled,
@@ -98,6 +116,36 @@ describe('WorkItemDetail component', () => {
});
});
+ describe('close button', () => {
+ describe('when isModal prop is false', () => {
+ it('does not render', async () => {
+ createComponent({ isModal: false });
+ await waitForPromises();
+
+ expect(findCloseButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when isModal prop is true', () => {
+ it('renders', async () => {
+ createComponent({ isModal: true });
+ await waitForPromises();
+
+ expect(findCloseButton().props('icon')).toBe('close');
+ expect(findCloseButton().attributes('aria-label')).toBe('Close');
+ });
+
+ it('emits `close` event when clicked', async () => {
+ createComponent({ isModal: true });
+ await waitForPromises();
+
+ findCloseButton().vm.$emit('click');
+
+ expect(wrapper.emitted('close')).toEqual([[]]);
+ });
+ });
+ });
+
describe('description', () => {
it('does not show description widget if loading description fails', () => {
createComponent();
@@ -107,13 +155,56 @@ describe('WorkItemDetail component', () => {
it('shows description widget if description loads', async () => {
createComponent();
-
await waitForPromises();
expect(findWorkItemDescription().exists()).toBe(true);
});
});
+ describe('secondary breadcrumbs', () => {
+ it('does not show secondary breadcrumbs by default', () => {
+ createComponent();
+
+ expect(findParent().exists()).toBe(false);
+ });
+
+ it('does not show secondary breadcrumbs if there is not a parent', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findParent().exists()).toBe(false);
+ });
+
+ it('shows work item type if there is not a parent', async () => {
+ createComponent();
+
+ await waitForPromises();
+ expect(findWorkItemType().exists()).toBe(true);
+ });
+
+ describe('with parent', () => {
+ beforeEach(() => {
+ const parentResponse = workItemResponseFactory(mockParent);
+ createComponent({ handler: jest.fn().mockResolvedValue(parentResponse) });
+
+ return waitForPromises();
+ });
+
+ it('shows secondary breadcrumbs if there is a parent', () => {
+ expect(findParent().exists()).toBe(true);
+ });
+
+ it('does not show work item type', async () => {
+ expect(findWorkItemType().exists()).toBe(false);
+ });
+
+ it('sets the parent breadcrumb URL', () => {
+ expect(findParentButton().attributes().href).toBe('../../issues/5');
+ });
+ });
+ });
+
it('shows an error message when the work item query was unsuccessful', async () => {
const errorHandler = jest.fn().mockRejectedValue('Oops');
createComponent({ handler: errorHandler });
@@ -145,7 +236,6 @@ describe('WorkItemDetail component', () => {
it('renders assignees component when assignees widget is returned from the API', async () => {
createComponent({
workItemsMvc2Enabled: true,
- includeWidgets: true,
});
await waitForPromises();
@@ -155,7 +245,9 @@ describe('WorkItemDetail component', () => {
it('does not render assignees component when assignees widget is not returned from the API', async () => {
createComponent({
workItemsMvc2Enabled: true,
- includeWidgets: false,
+ handler: jest
+ .fn()
+ .mockResolvedValue(workItemResponseFactory({ assigneesWidgetPresent: false })),
});
await waitForPromises();
@@ -170,6 +262,19 @@ describe('WorkItemDetail component', () => {
expect(findWorkItemAssignees().exists()).toBe(false);
});
+ describe('labels widget', () => {
+ it.each`
+ description | includeWidgets | exists
+ ${'renders when widget is returned from API'} | ${true} | ${true}
+ ${'does not render when widget is not returned from API'} | ${false} | ${false}
+ `('$description', async ({ includeWidgets, exists }) => {
+ createComponent({ includeWidgets, workItemsMvc2Enabled: true });
+ await waitForPromises();
+
+ expect(findWorkItemLabels().exists()).toBe(exists);
+ });
+ });
+
describe('weight widget', () => {
describe('when work_items_mvc_2 feature flag is enabled', () => {
describe.each`
@@ -201,4 +306,22 @@ describe('WorkItemDetail component', () => {
});
});
});
+
+ describe('work item information', () => {
+ beforeEach(() => {
+ createComponent();
+ return waitForPromises();
+ });
+
+ it('is visible when viewed for the first time and sets localStorage value', async () => {
+ localStorage.clear();
+ expect(findWorkItemInformationAlert().exists()).toBe(true);
+ expect(findLocalStorageSync().props('value')).toBe(true);
+ });
+
+ it('is not visible after reading local storage input', async () => {
+ await findLocalStorageSync().vm.$emit('input', false);
+ expect(findWorkItemInformationAlert().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/work_items/pages/work_item_root_spec.js b/spec/frontend/work_items/pages/work_item_root_spec.js
index 3c5da94114e..d9372f2bcf0 100644
--- a/spec/frontend/work_items/pages/work_item_root_spec.js
+++ b/spec/frontend/work_items/pages/work_item_root_spec.js
@@ -52,6 +52,7 @@ describe('Work items root component', () => {
createComponent();
expect(findWorkItemDetail().props()).toEqual({
+ isModal: false,
workItemId: 'gid://gitlab/WorkItem/1',
workItemParentId: null,
});
diff --git a/spec/frontend_integration/diffs/diffs_interopability_spec.js b/spec/frontend_integration/diffs/diffs_interopability_spec.js
index 064e3d21180..8e9bc4f0a5f 100644
--- a/spec/frontend_integration/diffs/diffs_interopability_spec.js
+++ b/spec/frontend_integration/diffs/diffs_interopability_spec.js
@@ -1,6 +1,7 @@
import { waitFor } from '@testing-library/dom';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import initDiffsApp from '~/diffs';
import { createStore } from '~/mr_notes/stores';
import {
@@ -74,6 +75,10 @@ const startDiffsApp = () => {
describe('diffs third party interoperability', () => {
let vm;
+ beforeEach(() => {
+ stubPerformanceWebAPI();
+ });
+
afterEach(() => {
vm.$destroy();
document.body.innerHTML = '';
diff --git a/spec/frontend_integration/ide/ide_integration_spec.js b/spec/frontend_integration/ide/ide_integration_spec.js
index a002ce91deb..da48c600764 100644
--- a/spec/frontend_integration/ide/ide_integration_spec.js
+++ b/spec/frontend_integration/ide/ide_integration_spec.js
@@ -3,8 +3,9 @@ import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { setTestTimeout } from 'helpers/timeout';
import waitForPromises from 'helpers/wait_for_promises';
import { waitForText } from 'helpers/wait_for_text';
-import { createCommitId } from 'test_helpers/factories/commit_id';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
+import { createCommitId } from 'test_helpers/factories/commit_id';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import * as ideHelper from './helpers/ide_helper';
import startWebIDE from './helpers/start';
@@ -15,6 +16,7 @@ describe('WebIDE', () => {
let container;
beforeEach(() => {
+ stubPerformanceWebAPI();
// For some reason these tests were timing out in CI.
// We will investigate in https://gitlab.com/gitlab-org/gitlab/-/issues/298714
setTestTimeout(20000);
diff --git a/spec/frontend_integration/ide/user_opens_file_spec.js b/spec/frontend_integration/ide/user_opens_file_spec.js
index c3131f6ad45..af6e2f3b44b 100644
--- a/spec/frontend_integration/ide/user_opens_file_spec.js
+++ b/spec/frontend_integration/ide/user_opens_file_spec.js
@@ -1,6 +1,7 @@
import { screen } from '@testing-library/dom';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import * as ideHelper from './helpers/ide_helper';
import startWebIDE from './helpers/start';
@@ -11,6 +12,7 @@ describe('IDE: User opens a file in the Web IDE', () => {
let container;
beforeEach(async () => {
+ stubPerformanceWebAPI();
setHTMLFixture('<div class="webide-container"></div>');
container = document.querySelector('.webide-container');
diff --git a/spec/frontend_integration/ide/user_opens_ide_spec.js b/spec/frontend_integration/ide/user_opens_ide_spec.js
index b2b85452451..552888f04a5 100644
--- a/spec/frontend_integration/ide/user_opens_ide_spec.js
+++ b/spec/frontend_integration/ide/user_opens_ide_spec.js
@@ -1,6 +1,7 @@
import { screen } from '@testing-library/dom';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import * as ideHelper from './helpers/ide_helper';
import startWebIDE from './helpers/start';
@@ -11,6 +12,8 @@ describe('IDE: User opens IDE', () => {
let container;
beforeEach(() => {
+ stubPerformanceWebAPI();
+
setHTMLFixture('<div class="webide-container"></div>');
container = document.querySelector('.webide-container');
});
diff --git a/spec/frontend_integration/ide/user_opens_mr_spec.js b/spec/frontend_integration/ide/user_opens_mr_spec.js
index 084aae9f297..af0276a5055 100644
--- a/spec/frontend_integration/ide/user_opens_mr_spec.js
+++ b/spec/frontend_integration/ide/user_opens_mr_spec.js
@@ -2,6 +2,7 @@ import { basename } from 'path';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { getMergeRequests, getMergeRequestWithChanges } from 'test_helpers/fixtures';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
+import { stubPerformanceWebAPI } from 'helpers/performance';
import * as ideHelper from './helpers/ide_helper';
import startWebIDE from './helpers/start';
@@ -16,6 +17,8 @@ describe('IDE: User opens Merge Request', () => {
let changes;
beforeEach(async () => {
+ stubPerformanceWebAPI();
+
const [{ iid: mrId }] = getMergeRequests();
changes = getRelevantChanges();
diff --git a/spec/frontend_integration/snippets/snippets_notes_spec.js b/spec/frontend_integration/snippets/snippets_notes_spec.js
new file mode 100644
index 00000000000..fdd3289bf58
--- /dev/null
+++ b/spec/frontend_integration/snippets/snippets_notes_spec.js
@@ -0,0 +1,62 @@
+import $ from 'jquery';
+import axios from '~/lib/utils/axios_utils';
+import initGFMInput from '~/behaviors/markdown/gfm_auto_complete';
+import initDeprecatedNotes from '~/init_deprecated_notes';
+import { loadHTMLFixture } from 'helpers/fixtures';
+
+describe('Integration Snippets notes', () => {
+ beforeEach(async () => {
+ loadHTMLFixture('snippets/show.html');
+
+ // Check if we have to Load GFM Input
+ const $gfmInputs = $('.js-gfm-input:not(.js-gfm-input-initialized)');
+ initGFMInput($gfmInputs);
+
+ initDeprecatedNotes();
+ });
+
+ describe('emoji autocomplete', () => {
+ const findNoteTextarea = () => document.getElementById('note_note');
+ const findAtViewEmojiMenu = () => document.getElementById('at-view-58');
+ const findAtwhoResult = () => {
+ return Array.from(findAtViewEmojiMenu().querySelectorAll('li')).map((x) =>
+ x.innerText.trim(),
+ );
+ };
+ const fillNoteTextarea = (val) => {
+ const textarea = findNoteTextarea();
+
+ textarea.dispatchEvent(new Event('focus'));
+ textarea.value = val;
+ textarea.dispatchEvent(new Event('input'));
+ textarea.dispatchEvent(new Event('click'));
+ };
+
+ it.each([
+ [
+ ':heart',
+ ['heart', 'heart decoration', 'heart with arrow', 'heart with ribbon', 'heart_exclamation'],
+ ],
+ [':red', ['red apple', 'red_car', 'red_circle', 'credit card', 'tired face']],
+ [
+ ':circle',
+ // TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/347549
+ // These autocompleted results aren't very good. The autocompletion should be improved.
+ [
+ 'circled ideograph accept',
+ 'circled ideograph advantage',
+ 'circled ideograph congratulation',
+ 'circled ideograph secret',
+ 'circled latin capital letter m',
+ ],
+ ],
+ ])('shows a correct list of matching emojis when user enters %s', async (input, expected) => {
+ fillNoteTextarea(input);
+
+ await axios.waitForAll();
+
+ const result = findAtwhoResult();
+ expect(result).toEqual(expected);
+ });
+ });
+});
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/emojis.js b/spec/frontend_integration/test_helpers/mock_server/routes/emojis.js
new file mode 100644
index 00000000000..64e9006a710
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/emojis.js
@@ -0,0 +1,9 @@
+import { Response } from 'miragejs';
+import emojis from 'public/-/emojis/2/emojis.json';
+import { EMOJI_VERSION } from '~/emoji';
+
+export default (server) => {
+ server.get(`/-/emojis/${EMOJI_VERSION}/emojis.json`, () => {
+ return new Response(200, {}, emojis);
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/index.js b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
index 48eff2702dd..571a592456d 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/index.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
@@ -6,6 +6,7 @@ export default (server) => {
require('./repository'),
require('./ci'),
require('./diffs'),
+ require('./emojis'),
require('./404'),
].forEach(({ default: setup }) => {
setup(server);
diff --git a/spec/graphql/mutations/design_management/delete_spec.rb b/spec/graphql/mutations/design_management/delete_spec.rb
index 93fff5e5103..79196d4965d 100644
--- a/spec/graphql/mutations/design_management/delete_spec.rb
+++ b/spec/graphql/mutations/design_management/delete_spec.rb
@@ -86,9 +86,11 @@ RSpec.describe Mutations::DesignManagement::Delete do
end
end
- it 'runs no more than 29 queries' do
+ it 'runs no more than 30 queries' do
+ allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
+
filenames.each(&:present?) # ignore setup
- # Queries: as of 2021-07-22
+ # Queries: as of 2022-06-15
# -------------
# 01. routing query
# 02. find project by id
@@ -106,20 +108,21 @@ RSpec.describe Mutations::DesignManagement::Delete do
# 15. current designs by filename and issue
# 16, 17 project.authorizations for user (same query as 5)
# 18. find route by id and source_type
+ # 19. find plan for standard context
# ------------- our queries are below:
- # 19. start transaction 1
- # 20. start transaction 2
- # 21. find version by sha and issue
- # 22. exists version with sha and issue?
- # 23. leave transaction 2
- # 24. create version with sha and issue
- # 25. create design-version links
- # 26. validate version.actions.present?
- # 27. validate version.issue.present?
- # 28. validate version.sha is unique
- # 29. leave transaction 1
+ # 20. start transaction 1
+ # 21. start transaction 2
+ # 22. find version by sha and issue
+ # 23. exists version with sha and issue?
+ # 24. leave transaction 2
+ # 25. create version with sha and issue
+ # 26. create design-version links
+ # 27. validate version.actions.present?
+ # 28. validate version.issue.present?
+ # 29. validate version.sha is unique
+ # 30. leave transaction 1
#
- expect { run_mutation }.not_to exceed_query_limit(29)
+ expect { run_mutation }.not_to exceed_query_limit(30)
end
end
diff --git a/spec/graphql/mutations/pages/mark_onboarding_complete_spec.rb b/spec/graphql/mutations/pages/mark_onboarding_complete_spec.rb
new file mode 100644
index 00000000000..c4ceecb9d46
--- /dev/null
+++ b/spec/graphql/mutations/pages/mark_onboarding_complete_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Pages::MarkOnboardingComplete do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:owner) { create(:user) }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+
+ let(:mutation_arguments) do
+ {
+ project_path: project.full_path
+ }
+ end
+
+ before_all do
+ project.add_owner(owner)
+ project.add_developer(developer)
+ end
+
+ describe '#resolve' do
+ subject(:resolve) do
+ mutation.resolve(**mutation_arguments)
+ end
+
+ context 'when the current user has access to update pages' do
+ let(:current_user) { owner }
+
+ it 'calls mark_pages_onboarding_complete on the project' do
+ allow_next_instance_of(::Project) do |project|
+ expect(project).to receive(:mark_pages_onboarding_complete)
+ end
+ end
+
+ it 'returns onboarding_complete state' do
+ expect(resolve).to include(onboarding_complete: true)
+ end
+
+ it 'returns no errors' do
+ expect(resolve).to include(errors: [])
+ end
+ end
+
+ context "when the current user doesn't have access to update pages" do
+ let(:current_user) { developer }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ Gitlab::Graphql::Errors::ResourceNotAvailable,
+ Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR
+ )
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
index 59616815de0..ac7cef20df4 100644
--- a/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Resolvers::Ci::JobTokenScopeResolver do
describe '#resolve' do
context 'with access to scope' do
before do
- project.add_user(current_user, :maintainer)
+ project.add_member(current_user, :maintainer)
end
it 'returns nil when scope is not enabled' do
@@ -51,7 +51,7 @@ RSpec.describe Resolvers::Ci::JobTokenScopeResolver do
context 'without access to scope' do
before do
- project.add_user(current_user, :developer)
+ project.add_member(current_user, :developer)
end
it 'generates an error' do
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
index b1f5f7b3e43..8586d359336 100644
--- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver do
{
active: true,
status: 'active',
+ upgrade_status: 'recommended',
type: :instance_type,
tag_list: ['active_runner'],
search: 'abc',
@@ -63,6 +64,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver do
{
active: true,
status_status: 'active',
+ upgrade_status: 'recommended',
type_type: :instance_type,
tag_name: ['active_runner'],
preload: { tag_name: nil },
diff --git a/spec/graphql/resolvers/container_repositories_resolver_spec.rb b/spec/graphql/resolvers/container_repositories_resolver_spec.rb
index d7aa761320f..ed922259903 100644
--- a/spec/graphql/resolvers/container_repositories_resolver_spec.rb
+++ b/spec/graphql/resolvers/container_repositories_resolver_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Resolvers::ContainerRepositoriesResolver do
context 'with authorized user' do
before do
- group.add_user(user, :maintainer)
+ group.add_member(user, :maintainer)
end
context 'when the object is a project' do
diff --git a/spec/graphql/resolvers/todo_resolver_spec.rb b/spec/graphql/resolvers/todos_resolver_spec.rb
index 0760935a2fe..40ca2de0385 100644
--- a/spec/graphql/resolvers/todo_resolver_spec.rb
+++ b/spec/graphql/resolvers/todos_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::TodoResolver do
+RSpec.describe Resolvers::TodosResolver do
include GraphqlHelpers
include DesignManagementTestHelpers
diff --git a/spec/graphql/resolvers/users/groups_resolver_spec.rb b/spec/graphql/resolvers/users/groups_resolver_spec.rb
index bbe9b6371cf..1e0e001fbf7 100644
--- a/spec/graphql/resolvers/users/groups_resolver_spec.rb
+++ b/spec/graphql/resolvers/users/groups_resolver_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Resolvers::Users::GroupsResolver do
let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer') }
let_it_be(:public_developer_group) { create(:group, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') }
let_it_be(:public_maintainer_group) { create(:group, name: 'a public maintainer', path: 'a-public-maintainer') }
+ let_it_be(:public_owner_group) { create(:group, name: 'a public owner', path: 'a-public-owner') }
subject(:resolved_items) { resolve_groups(args: group_arguments, current_user: current_user, obj: resolver_object) }
@@ -24,6 +25,7 @@ RSpec.describe Resolvers::Users::GroupsResolver do
private_maintainer_group.add_maintainer(user)
public_developer_group.add_developer(user)
public_maintainer_group.add_maintainer(user)
+ public_owner_group.add_owner(user)
end
context 'when resolver object is current user' do
@@ -34,6 +36,7 @@ RSpec.describe Resolvers::Users::GroupsResolver do
is_expected.to match(
[
public_maintainer_group,
+ public_owner_group,
private_maintainer_group,
public_developer_group
]
@@ -41,10 +44,25 @@ RSpec.describe Resolvers::Users::GroupsResolver do
end
end
+ context 'when permission is :transfer_projects' do
+ let(:group_arguments) { { permission_scope: :transfer_projects } }
+
+ specify do
+ is_expected.to match(
+ [
+ public_maintainer_group,
+ public_owner_group,
+ private_maintainer_group
+ ]
+ )
+ end
+ end
+
specify do
is_expected.to match(
[
public_maintainer_group,
+ public_owner_group,
private_maintainer_group,
public_developer_group,
guest_group
@@ -82,6 +100,7 @@ RSpec.describe Resolvers::Users::GroupsResolver do
is_expected.to match(
[
public_maintainer_group,
+ public_owner_group,
private_maintainer_group,
public_developer_group,
guest_group
diff --git a/spec/graphql/types/ci/detailed_status_type_spec.rb b/spec/graphql/types/ci/detailed_status_type_spec.rb
index 0c05227aec2..686461cb9a5 100644
--- a/spec/graphql/types/ci/detailed_status_type_spec.rb
+++ b/spec/graphql/types/ci/detailed_status_type_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Types::Ci::DetailedStatusType do
:label, :text, :tooltip, :action)
end
- let_it_be(:stage) { create(:ci_stage_entity, status: :skipped) }
+ let_it_be(:stage) { create(:ci_stage, status: :skipped) }
describe 'id field' do
it 'correctly renders the field' do
diff --git a/spec/graphql/types/ci/job_token_scope_type_spec.rb b/spec/graphql/types/ci/job_token_scope_type_spec.rb
index 43225b2089b..c1a3c4dd54d 100644
--- a/spec/graphql/types/ci/job_token_scope_type_spec.rb
+++ b/spec/graphql/types/ci/job_token_scope_type_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe GitlabSchema.types['CiJobTokenScopeType'] do
context 'with access to scope' do
before do
- project.add_user(current_user, :maintainer)
+ project.add_member(current_user, :maintainer)
end
context 'when multiple projects in the allow list' do
@@ -46,7 +46,7 @@ RSpec.describe GitlabSchema.types['CiJobTokenScopeType'] do
context 'when linked projects are readable' do
before do
- link.target_project.add_user(current_user, :developer)
+ link.target_project.add_member(current_user, :developer)
end
it 'returns readable projects in scope' do
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index 655c3636883..bc9e64282bc 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Types::Ci::JobType do
id
kind
manual_job
+ manual_variables
name
needs
pipeline
@@ -33,6 +34,7 @@ RSpec.describe Types::Ci::JobType do
refName
refPath
retryable
+ retried
scheduledAt
schedulingType
shortSha
diff --git a/spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb b/spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb
index 81a852471b9..03c784dcbe7 100644
--- a/spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb
+++ b/spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb
@@ -3,11 +3,22 @@
require 'spec_helper'
RSpec.describe Types::Ci::RunnerUpgradeStatusTypeEnum do
+ let(:model_only_enum_values) { %w[not_processed] }
+ let(:expected_graphql_source_values) do
+ Ci::RunnerVersion.statuses.keys - model_only_enum_values
+ end
+
specify { expect(described_class.graphql_name).to eq('CiRunnerUpgradeStatusType') }
- it 'exposes all upgrade status values' do
- expect(described_class.values.keys).to eq(
- ['UNKNOWN'] + ::Gitlab::Ci::RunnerUpgradeCheck::STATUSES.map { |sym, _| sym.to_s.upcase }
+ it 'exposes all upgrade status values except not_processed' do
+ expect(described_class.values.keys).to match_array(
+ expected_graphql_source_values
+ .map(&:upcase)
+ .map { |v| v == 'INVALID_VERSION' ? 'INVALID' : v }
)
end
+
+ it 'exposes all upgrade status values except enum-only values' do
+ expect(described_class.values.values.map(&:value).map(&:to_s)).to match_array(expected_graphql_source_values)
+ end
end
diff --git a/spec/graphql/types/ci/status_action_type_spec.rb b/spec/graphql/types/ci/status_action_type_spec.rb
index 4c467bf240e..3f4b52610ae 100644
--- a/spec/graphql/types/ci/status_action_type_spec.rb
+++ b/spec/graphql/types/ci/status_action_type_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Types::Ci::StatusActionType do
describe 'id field' do
it 'correctly renders the field' do
- stage = build(:ci_stage_entity, status: :skipped)
+ stage = build(:ci_stage, status: :skipped)
status = stage.detailed_status(stage.pipeline.user)
expected_id = "#{stage.class.name}-#{status.id}"
diff --git a/spec/graphql/types/ci/variable_type_enum_spec.rb b/spec/graphql/types/ci/variable_type_enum_spec.rb
new file mode 100644
index 00000000000..5604caebfff
--- /dev/null
+++ b/spec/graphql/types/ci/variable_type_enum_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiVariableType'] do
+ it 'matches the keys of Ci::Variable.variable_types' do
+ expect(described_class.values.keys).to contain_exactly('ENV_VAR', 'FILE')
+ end
+end
diff --git a/spec/graphql/types/ci/variable_type_spec.rb b/spec/graphql/types/ci/variable_type_spec.rb
new file mode 100644
index 00000000000..a81e6adbab6
--- /dev/null
+++ b/spec/graphql/types/ci/variable_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiVariable'] do
+ it 'contains attributes related to CI variables' do
+ expect(described_class).to have_graphql_fields(
+ :id, :key, :value, :variable_type, :protected, :masked, :raw, :environment_scope
+ )
+ end
+end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 82703948cea..69c7eaf111f 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe GitlabSchema.types['Group'] do
dependency_proxy_blob_count dependency_proxy_total_size
dependency_proxy_image_prefix dependency_proxy_image_ttl_policy
shared_runners_setting timelogs organizations contacts work_item_types
- recent_issue_boards
+ recent_issue_boards ci_variables
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/issue_type_enum_spec.rb b/spec/graphql/types/issue_type_enum_spec.rb
index 131e92aa5ed..8f4b6f3bf74 100644
--- a/spec/graphql/types/issue_type_enum_spec.rb
+++ b/spec/graphql/types/issue_type_enum_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Types::IssueTypeEnum do
specify { expect(described_class.graphql_name).to eq('IssueType') }
- it 'exposes all the existing issue type values except for task' do
+ it 'exposes all the existing issue type values' do
expect(described_class.values.keys).to match_array(
- %w[ISSUE INCIDENT TEST_CASE REQUIREMENT]
+ %w[ISSUE INCIDENT TEST_CASE REQUIREMENT TASK]
)
end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 2e994bf7820..ed93d31da0f 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -36,7 +36,8 @@ RSpec.describe GitlabSchema.types['Project'] do
pipeline_analytics squash_read_only sast_ci_configuration
cluster_agent cluster_agents agent_configurations
ci_template timelogs merge_commit_template squash_commit_template work_item_types
- recent_issue_boards ci_config_path_or_default packages_cleanup_policy
+ recent_issue_boards ci_config_path_or_default packages_cleanup_policy ci_variables
+ recent_issue_boards ci_config_path_or_default ci_variables
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 8b8c44c10f6..514d24a209e 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe GitlabSchema.types['Query'] do
board_list
topics
gitpod_enabled
+ ci_variables
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
diff --git a/spec/graphql/types/release_type_spec.rb b/spec/graphql/types/release_type_spec.rb
index 0c05a68c5a6..a1dc8850f94 100644
--- a/spec/graphql/types/release_type_spec.rb
+++ b/spec/graphql/types/release_type_spec.rb
@@ -11,7 +11,8 @@ RSpec.describe GitlabSchema.types['Release'] do
description description_html
name milestones evidences author commit
assets links
- created_at released_at
+ created_at released_at upcoming_release
+ historical_release
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/work_items/widget_interface_spec.rb b/spec/graphql/types/work_items/widget_interface_spec.rb
index ee40bcc10ca..caf986c961f 100644
--- a/spec/graphql/types/work_items/widget_interface_spec.rb
+++ b/spec/graphql/types/work_items/widget_interface_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Types::WorkItems::WidgetInterface do
where(:widget_class, :widget_type_name) do
WorkItems::Widgets::Description | Types::WorkItems::Widgets::DescriptionType
WorkItems::Widgets::Hierarchy | Types::WorkItems::Widgets::HierarchyType
+ WorkItems::Widgets::Assignees | Types::WorkItems::Widgets::AssigneesType
end
with_them do
diff --git a/spec/graphql/types/work_items/widgets/assignees_type_spec.rb b/spec/graphql/types/work_items/widgets/assignees_type_spec.rb
new file mode 100644
index 00000000000..816e66f1db1
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/assignees_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::Widgets::AssigneesType do
+ it 'exposes the expected fields' do
+ expected_fields = %i[assignees allows_multiple_assignees can_invite_members type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/work_items/widgets/description_input_type_spec.rb b/spec/graphql/types/work_items/widgets/description_input_type_spec.rb
new file mode 100644
index 00000000000..81c64bc38ab
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/description_input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::WorkItems::Widgets::DescriptionInputType do
+ it { expect(described_class.graphql_name).to eq('WorkItemWidgetDescriptionInput') }
+
+ it { expect(described_class.arguments.keys).to match_array(%w[description]) }
+end
diff --git a/spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb b/spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb
new file mode 100644
index 00000000000..6221580605e
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::WorkItems::Widgets::HierarchyUpdateInputType do
+ it { expect(described_class.graphql_name).to eq('WorkItemWidgetHierarchyUpdateInput') }
+
+ it { expect(described_class.arguments.keys).to match_array(%w[parentId childrenIds]) }
+end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 192e48f43e5..9c0f8b77d45 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -221,48 +221,56 @@ RSpec.describe AvatarsHelper do
stub_application_setting(gravatar_enabled?: true)
end
- it 'returns a generic avatar when email is blank' do
- expect(helper.gravatar_icon('')).to match_asset_path(described_class::DEFAULT_AVATAR_PATH)
- end
+ context 'with FIPS not enabled', fips_mode: false do
+ it 'returns a generic avatar when email is blank' do
+ expect(helper.gravatar_icon('')).to match_asset_path(described_class::DEFAULT_AVATAR_PATH)
+ end
- it 'returns a valid Gravatar URL' do
- stub_config_setting(https: false)
+ it 'returns a valid Gravatar URL' do
+ stub_config_setting(https: false)
- expect(helper.gravatar_icon(user_email))
- .to match('https://www.gravatar.com/avatar/b58c6f14d292556214bd64909bcdb118')
- end
+ expect(helper.gravatar_icon(user_email))
+ .to match('https://www.gravatar.com/avatar/b58c6f14d292556214bd64909bcdb118')
+ end
- it 'uses HTTPs when configured' do
- stub_config_setting(https: true)
+ it 'uses HTTPs when configured' do
+ stub_config_setting(https: true)
- expect(helper.gravatar_icon(user_email))
- .to match('https://secure.gravatar.com')
- end
+ expect(helper.gravatar_icon(user_email))
+ .to match('https://secure.gravatar.com')
+ end
- it 'returns custom gravatar path when gravatar_url is set' do
- stub_gravatar_setting(plain_url: 'http://example.local/?s=%{size}&hash=%{hash}')
+ it 'returns custom gravatar path when gravatar_url is set' do
+ stub_gravatar_setting(plain_url: 'http://example.local/?s=%{size}&hash=%{hash}')
- expect(gravatar_icon(user_email, 20))
- .to eq('http://example.local/?s=40&hash=b58c6f14d292556214bd64909bcdb118')
- end
+ expect(gravatar_icon(user_email, 20))
+ .to eq('http://example.local/?s=40&hash=b58c6f14d292556214bd64909bcdb118')
+ end
- it 'accepts a custom size argument' do
- expect(helper.gravatar_icon(user_email, 64)).to include '?s=128'
- end
+ it 'accepts a custom size argument' do
+ expect(helper.gravatar_icon(user_email, 64)).to include '?s=128'
+ end
- it 'defaults size to 40@2x when given an invalid size' do
- expect(helper.gravatar_icon(user_email, nil)).to include '?s=80'
- end
+ it 'defaults size to 40@2x when given an invalid size' do
+ expect(helper.gravatar_icon(user_email, nil)).to include '?s=80'
+ end
- it 'accepts a scaling factor' do
- expect(helper.gravatar_icon(user_email, 40, 3)).to include '?s=120'
- end
+ it 'accepts a scaling factor' do
+ expect(helper.gravatar_icon(user_email, 40, 3)).to include '?s=120'
+ end
- it 'ignores case and surrounding whitespace' do
- normal = helper.gravatar_icon('foo@example.com')
- upcase = helper.gravatar_icon(' FOO@EXAMPLE.COM ')
+ it 'ignores case and surrounding whitespace' do
+ normal = helper.gravatar_icon('foo@example.com')
+ upcase = helper.gravatar_icon(' FOO@EXAMPLE.COM ')
- expect(normal).to eq upcase
+ expect(normal).to eq upcase
+ end
+ end
+
+ context 'with FIPS enabled', :fips_mode do
+ it 'returns a generic avatar' do
+ expect(helper.gravatar_icon(user_email)).to match_asset_path(described_class::DEFAULT_AVATAR_PATH)
+ end
end
end
end
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index 8366506aa45..bc9e47a4ca1 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -48,6 +48,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"ci-config-path": project.ci_config_path_or_default,
"ci-examples-help-page-path" => help_page_path('ci/examples/index'),
"ci-help-page-path" => help_page_path('ci/index'),
+ "ci-lint-path" => project_ci_lint_path(project),
"default-branch" => project.default_branch_or_main,
"empty-state-illustration-path" => 'illustrations/empty.svg',
"initial-branch-name" => nil,
@@ -62,6 +63,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-full-path" => project.full_path,
"project-namespace" => project.namespace.full_path,
"runner-help-page-path" => help_page_path('ci/runners/index'),
+ "simulate-pipeline-help-page-path" => help_page_path('ci/lint', anchor: 'simulate-a-pipeline'),
"total-branches" => project.repository.branches.length,
"validate-tab-illustration-path" => 'illustrations/validate.svg',
"yml-help-page-path" => help_page_path('ci/yaml/index')
@@ -77,6 +79,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"ci-config-path": project.ci_config_path_or_default,
"ci-examples-help-page-path" => help_page_path('ci/examples/index'),
"ci-help-page-path" => help_page_path('ci/index'),
+ "ci-lint-path" => project_ci_lint_path(project),
"default-branch" => project.default_branch_or_main,
"empty-state-illustration-path" => 'illustrations/empty.svg',
"initial-branch-name" => nil,
@@ -91,6 +94,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-full-path" => project.full_path,
"project-namespace" => project.namespace.full_path,
"runner-help-page-path" => help_page_path('ci/runners/index'),
+ "simulate-pipeline-help-page-path" => help_page_path('ci/lint', anchor: 'simulate-a-pipeline'),
"total-branches" => 0,
"validate-tab-illustration-path" => 'illustrations/validate.svg',
"yml-help-page-path" => help_page_path('ci/yaml/index')
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 961e7688202..b5b572e9719 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe CommitsHelper do
it 'renders the correct select-rendered button' do
expect(node[:title]).to eq('Display rendered diff')
expect(node['data-file-hash']).to eq('abc')
- expect(node['data-diff-toggle-entity']).to eq('toShowBtn')
+ expect(node['data-diff-toggle-entity']).to eq('renderedButton')
expect(node.xpath("//a/svg")[0]["data-testid"]).to eq('doc-text-icon')
end
end
@@ -105,7 +105,7 @@ RSpec.describe CommitsHelper do
it 'renders the correct select-raw button' do
expect(node[:title]).to eq('Display raw diff')
expect(node['data-file-hash']).to eq('abc')
- expect(node['data-diff-toggle-entity']).to eq('toHideBtn')
+ expect(node['data-diff-toggle-entity']).to eq('rawButton')
expect(node.xpath("//a/svg")[0]["data-testid"]).to eq('doc-code-icon')
end
end
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index cf16807723b..93efce6b58b 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -470,22 +470,69 @@ RSpec.describe DiffHelper do
end
describe '#conflicts' do
- let(:merge_request) { instance_double(MergeRequest) }
+ let(:merge_request) { instance_double(MergeRequest, cannot_be_merged?: true) }
+ let(:merge_ref_head_diff) { true }
+ let(:can_be_resolved_in_ui?) { true }
+ let(:allow_tree_conflicts) { false }
+ let(:files) { [instance_double(Gitlab::Conflict::File, path: 'a')] }
+ let(:exception) { nil }
before do
allow(helper).to receive(:merge_request).and_return(merge_request)
- allow(helper).to receive(:options).and_return(merge_ref_head_diff: true)
+ allow(helper).to receive(:options).and_return(merge_ref_head_diff: merge_ref_head_diff)
+
+ allow_next_instance_of(MergeRequests::Conflicts::ListService, merge_request, allow_tree_conflicts: allow_tree_conflicts) do |svc|
+ allow(svc).to receive(:can_be_resolved_in_ui?).and_return(can_be_resolved_in_ui?)
+
+ if exception.present?
+ allow(svc).to receive_message_chain(:conflicts, :files).and_raise(exception)
+ else
+ allow(svc).to receive_message_chain(:conflicts, :files).and_return(files)
+ end
+ end
end
- context 'when Gitlab::Git::Conflict::Resolver::ConflictSideMissing exception is raised' do
- before do
- allow_next_instance_of(MergeRequests::Conflicts::ListService, merge_request, allow_tree_conflicts: true) do |svc|
- allow(svc).to receive_message_chain(:conflicts, :files).and_raise(Gitlab::Git::Conflict::Resolver::ConflictSideMissing)
+ it 'returns list of conflicts indexed by path' do
+ expect(helper.conflicts).to eq('a' => files.first)
+ end
+
+ context 'when merge_ref_head_diff option is false' do
+ let(:merge_ref_head_diff) { false }
+
+ it 'returns nil' do
+ expect(helper.conflicts).to be_nil
+ end
+ end
+
+ context 'when merge request can be merged' do
+ let(:merge_request) { instance_double(MergeRequest, cannot_be_merged?: false) }
+
+ it 'returns nil' do
+ expect(helper.conflicts).to be_nil
+ end
+ end
+
+ context 'when conflicts cannot be resolved in UI' do
+ let(:can_be_resolved_in_ui?) { false }
+
+ it 'returns nil' do
+ expect(helper.conflicts).to be_nil
+ end
+
+ context 'when allow_tree_conflicts is true' do
+ let(:allow_tree_conflicts) { true }
+
+ it 'returns list of conflicts' do
+ expect(helper.conflicts(allow_tree_conflicts: allow_tree_conflicts)).to eq('a' => files.first)
end
end
+ end
+
+ context 'when Gitlab::Git::Conflict::Resolver::ConflictSideMissing exception is raised' do
+ let(:exception) { Gitlab::Git::Conflict::Resolver::ConflictSideMissing }
it 'returns an empty hash' do
- expect(helper.conflicts(allow_tree_conflicts: true)).to eq({})
+ expect(helper.conflicts).to eq({})
end
end
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 220e154aad8..04653d9ff03 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe EmailsHelper do
end
describe 'notification_reason_text' do
- subject { helper.notification_reason_text(reason_code) }
+ subject { helper.notification_reason_text(reason: reason_code) }
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index e4d4f18ad68..c1eaf1b1bcd 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -129,7 +129,6 @@ RSpec.describe EnvironmentsHelper do
"environment_name": environment.name,
"environments_path": api_v4_projects_environments_path(id: project.id),
"environment_id": environment.id,
- "cluster_applications_documentation_path" => help_page_path('user/clusters/integrations.md', anchor: 'elastic-stack-cluster-integration'),
"clusters_path": project_clusters_path(project, format: :json)
}
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index d308df3a017..89c26c21338 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -44,6 +44,7 @@ RSpec.describe Groups::GroupMembersHelper do
members: present_members(members_collection),
invited: present_members(invited),
access_requests: present_members(access_requests),
+ banned: [],
include_relations: [:inherited, :direct],
search: nil
)
@@ -117,6 +118,7 @@ RSpec.describe Groups::GroupMembersHelper do
members: present_members(members_collection),
invited: present_members(invited),
access_requests: present_members(access_requests),
+ banned: [],
include_relations: include_relations,
search: nil
)
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index bcbe571db5e..d00cd8f1d6b 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -355,8 +355,8 @@ RSpec.describe GroupsHelper do
end
end
- describe '#show_thanks_for_purchase_banner?' do
- subject { helper.show_thanks_for_purchase_banner? }
+ describe '#show_thanks_for_purchase_alert?' do
+ subject { helper.show_thanks_for_purchase_alert? }
it 'returns true with purchased_quantity present in params' do
allow(controller).to receive(:params) { { purchased_quantity: '1' } }
diff --git a/spec/helpers/learn_gitlab_helper_spec.rb b/spec/helpers/learn_gitlab_helper_spec.rb
index 9fce7495b5a..7c9dfd6b5be 100644
--- a/spec/helpers/learn_gitlab_helper_spec.rb
+++ b/spec/helpers/learn_gitlab_helper_spec.rb
@@ -92,38 +92,6 @@ RSpec.describe LearnGitlabHelper do
it_behaves_like 'has all data'
- it 'sets correct paths' do
- expect(onboarding_actions_data).to match({
- trial_started: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/2\z})
- ),
- pipeline_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/7\z})
- ),
- code_owners_enabled: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/10\z})
- ),
- required_mr_approvals_enabled: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/11\z})
- ),
- issue_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues\z})
- ),
- git_write: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab\z})
- ),
- user_added: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/project_members\z})
- ),
- merge_request_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/merge_requests\z})
- ),
- security_scan_enabled: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/security/configuration\z})
- )
- })
- end
-
it 'sets correct completion statuses' do
expect(onboarding_actions_data).to match({
issue_created: a_hash_including(completed: false),
@@ -137,5 +105,58 @@ RSpec.describe LearnGitlabHelper do
security_scan_enabled: a_hash_including(completed: false)
})
end
+
+ describe 'security_actions_continuous_onboarding experiment' do
+ let(:base_paths) do
+ {
+ trial_started: a_hash_including(url: %r{/learn_gitlab/-/issues/2\z}),
+ pipeline_created: a_hash_including(url: %r{/learn_gitlab/-/issues/7\z}),
+ code_owners_enabled: a_hash_including(url: %r{/learn_gitlab/-/issues/10\z}),
+ required_mr_approvals_enabled: a_hash_including(url: %r{/learn_gitlab/-/issues/11\z}),
+ issue_created: a_hash_including(url: %r{/learn_gitlab/-/issues\z}),
+ git_write: a_hash_including(url: %r{/learn_gitlab\z}),
+ user_added: a_hash_including(url: %r{/learn_gitlab/-/project_members\z}),
+ merge_request_created: a_hash_including(url: %r{/learn_gitlab/-/merge_requests\z})
+ }
+ end
+
+ context 'when control' do
+ before do
+ stub_experiments(security_actions_continuous_onboarding: :control)
+ end
+
+ it 'sets correct paths' do
+ expect(onboarding_actions_data).to match(
+ base_paths.merge(
+ security_scan_enabled: a_hash_including(
+ url: %r{/learn_gitlab/-/security/configuration\z}
+ )
+ )
+ )
+ end
+ end
+
+ context 'when candidate' do
+ before do
+ stub_experiments(security_actions_continuous_onboarding: :candidate)
+ end
+
+ it 'sets correct paths' do
+ expect(onboarding_actions_data).to match(
+ base_paths.merge(
+ license_scanning_run: a_hash_including(
+ url: described_class::LICENSE_SCANNING_RUN_URL
+ ),
+ secure_dependency_scanning_run: a_hash_including(
+ url: project_security_configuration_path(project, anchor: 'dependency-scanning')
+ ),
+ secure_dast_run: a_hash_including(
+ url: project_security_configuration_path(project, anchor: 'dast')
+ )
+ )
+ )
+ end
+ end
+ end
end
end
diff --git a/spec/helpers/namespace_storage_limit_alert_helper_spec.rb b/spec/helpers/namespace_storage_limit_alert_helper_spec.rb
deleted file mode 100644
index ab3cf96edef..00000000000
--- a/spec/helpers/namespace_storage_limit_alert_helper_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe NamespaceStorageLimitAlertHelper do
- describe '#display_namespace_storage_limit_alert!' do
- it 'is defined in CE' do
- expect { helper.display_namespace_storage_limit_alert! }.not_to raise_error
- end
- end
-end
diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb
index 4f32ac5b5c6..2fe237fb996 100644
--- a/spec/helpers/nav/new_dropdown_helper_spec.rb
+++ b/spec/helpers/nav/new_dropdown_helper_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Nav::NewDropdownHelper do
end
it 'has title' do
- expect(subject[:title]).to eq('Create new')
+ expect(subject[:title]).to eq('Create new...')
end
context 'when current_user is nil (anonymous)' do
diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb
index d04aa9a9d04..2b2dad286c7 100644
--- a/spec/helpers/projects/pipeline_helper_spec.rb
+++ b/spec/helpers/projects/pipeline_helper_spec.rb
@@ -11,7 +11,11 @@ RSpec.describe Projects::PipelineHelper do
let_it_be(:pipeline) { Ci::PipelinePresenter.new(raw_pipeline, current_user: user)}
describe '#js_pipeline_tabs_data' do
- subject(:pipeline_tabs_data) { helper.js_pipeline_tabs_data(project, pipeline) }
+ before do
+ project.add_developer(user)
+ end
+
+ subject(:pipeline_tabs_data) { helper.js_pipeline_tabs_data(project, pipeline, user) }
it 'returns pipeline tabs data' do
expect(pipeline_tabs_data).to include({
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
index 2414a1782c5..844c33de635 100644
--- a/spec/helpers/projects/project_members_helper_spec.rb
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe Projects::ProjectMembersHelper do
describe 'project members' do
let_it_be(:members) { create_list(:project_member, 2, project: project) }
- let_it_be(:group_links) { create_list(:project_group_link, 1, project: project) }
let_it_be(:invited) { create_list(:project_member, 2, :invited, project: project) }
let_it_be(:access_requests) { create_list(:project_member, 2, :access_request, project: project) }
@@ -26,9 +25,10 @@ RSpec.describe Projects::ProjectMembersHelper do
helper.project_members_app_data_json(
project,
members: present_members(members_collection),
- group_links: group_links,
invited: present_members(invited),
- access_requests: present_members(access_requests)
+ access_requests: present_members(access_requests),
+ include_relations: [:inherited, :direct],
+ search: nil
)
)
end
@@ -84,6 +84,70 @@ RSpec.describe Projects::ProjectMembersHelper do
expect(subject['user']['pagination']).to match(expected)
end
end
+
+ context 'group links' do
+ let_it_be(:shared_with_group) { create(:group) }
+ let_it_be(:group_link) { create(:project_group_link, project: project, group: shared_with_group) }
+
+ before do
+ allow(helper).to receive(:project_group_link_path).with(project, ':id').and_return('/foo-group/foo-project/-/group_links/:id')
+ end
+
+ it 'sets `group.members` property that matches json schema' do
+ expect(subject['group']['members'].to_json).to match_schema('group_link/project_group_links')
+ end
+
+ it 'sets `member_path` property' do
+ expect(subject['group']['member_path']).to eq('/foo-group/foo-project/-/group_links/:id')
+ end
+
+ context 'inherited' do
+ let_it_be(:shared_with_group_1) { create(:group) }
+ let_it_be(:shared_with_group_2) { create(:group) }
+ let_it_be(:shared_with_group_3) { create(:group) }
+ let_it_be(:shared_with_group_4) { create(:group) }
+ let_it_be(:shared_with_group_5) { create(:group) }
+ let_it_be(:top_group) { create(:group) }
+ let_it_be(:sub_group) { create(:group, parent: top_group) }
+ let_it_be(:project) { create(:project, group: sub_group) }
+ let_it_be(:group_link_1) { create(:group_group_link, shared_group: top_group, shared_with_group: shared_with_group_1, group_access: Gitlab::Access::GUEST) }
+ let_it_be(:group_link_2) { create(:group_group_link, shared_group: top_group, shared_with_group: shared_with_group_4, group_access: Gitlab::Access::GUEST) }
+ let_it_be(:group_link_3) { create(:group_group_link, shared_group: top_group, shared_with_group: shared_with_group_5, group_access: Gitlab::Access::DEVELOPER) }
+ let_it_be(:group_link_4) { create(:group_group_link, shared_group: sub_group, shared_with_group: shared_with_group_2, group_access: Gitlab::Access::DEVELOPER) }
+ let_it_be(:group_link_5) { create(:group_group_link, shared_group: sub_group, shared_with_group: shared_with_group_4, group_access: Gitlab::Access::DEVELOPER) }
+ let_it_be(:group_link_6) { create(:group_group_link, shared_group: sub_group, shared_with_group: shared_with_group_5, group_access: Gitlab::Access::GUEST) }
+ let_it_be(:group_link_7) { create(:project_group_link, project: project, group: shared_with_group_1, group_access: Gitlab::Access::DEVELOPER) }
+ let_it_be(:group_link_8) { create(:project_group_link, project: project, group: shared_with_group_2, group_access: Gitlab::Access::GUEST) }
+ let_it_be(:group_link_9) { create(:project_group_link, project: project, group: shared_with_group_3, group_access: Gitlab::Access::REPORTER) }
+
+ subject do
+ Gitlab::Json.parse(
+ helper.project_members_app_data_json(
+ project,
+ members: present_members(members_collection),
+ invited: present_members(invited),
+ access_requests: present_members(access_requests),
+ include_relations: include_relations,
+ search: nil
+ )
+ )
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:include_relations, :result) do
+ [:inherited, :direct] | lazy { [group_link_7, group_link_4, group_link_9, group_link_5, group_link_3].map(&:id) }
+ [:inherited] | lazy { [group_link_1, group_link_4, group_link_5, group_link_3].map(&:id) }
+ [:direct] | lazy { [group_link_7, group_link_8, group_link_9].map(&:id) }
+ end
+
+ with_them do
+ it 'returns correct group links' do
+ expect(subject['group']['members'].map { |link| link['id'] }).to match_array(result)
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 4502729866c..b7cc8c217a4 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -698,7 +698,7 @@ RSpec.describe ProjectsHelper do
def grant_user_access(project, user, access)
case access
when :developer, :maintainer
- project.add_user(user, access)
+ project.add_member(user, access)
when :owner
project.namespace.update!(owner: user)
end
@@ -969,6 +969,10 @@ RSpec.describe ProjectsHelper do
containerRegistryAccessLevel: project.project_feature.container_registry_access_level
)
end
+
+ it 'includes membersPagePath' do
+ expect(subject).to include(membersPagePath: project_project_members_path(project))
+ end
end
describe '#project_classes' do
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index b7493e84c6a..59a92c067f4 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -64,7 +64,9 @@ RSpec.describe ReleasesHelper do
release_assets_docs_path
manage_milestones_path
new_milestone_path
- edit_release_docs_path)
+ upcoming_release_docs_path
+ edit_release_docs_path
+ delete_release_docs_path)
expect(helper.data_for_edit_release_page.keys).to match_array(keys)
end
@@ -76,6 +78,7 @@ RSpec.describe ReleasesHelper do
group_id
group_milestones_available
project_path
+ tag_name
releases_page_path
markdown_preview_path
markdown_docs_path
@@ -83,6 +86,7 @@ RSpec.describe ReleasesHelper do
manage_milestones_path
new_milestone_path
default_branch
+ upcoming_release_docs_path
edit_release_docs_path)
expect(helper.data_for_new_release_page.keys).to match_array(keys)
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 4117d577f20..1ead1fc9b8b 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -741,7 +741,7 @@ RSpec.describe SearchHelper do
let(:for_group) { true }
it 'adds the :group and :group_metadata correctly to hash' do
- expect(header_search_context[:group]).to eq({ id: group.id, name: group.name })
+ expect(header_search_context[:group]).to eq({ id: group.id, name: group.name, full_name: group.full_name })
expect(header_search_context[:group_metadata]).to eq(group_metadata)
end
diff --git a/spec/helpers/sessions_helper_spec.rb b/spec/helpers/sessions_helper_spec.rb
index fd3d7100ba1..15424425060 100644
--- a/spec/helpers/sessions_helper_spec.rb
+++ b/spec/helpers/sessions_helper_spec.rb
@@ -50,4 +50,51 @@ RSpec.describe SessionsHelper do
expect(helper.unconfirmed_email?).to be_falsey
end
end
+
+ describe '#send_rate_limited?' do
+ let_it_be(:user) { build(:user) }
+
+ subject { helper.send_rate_limited?(user) }
+
+ before do
+ allow(::Gitlab::ApplicationRateLimiter)
+ .to receive(:peek)
+ .with(:email_verification_code_send, scope: user)
+ .and_return(rate_limited)
+ end
+
+ context 'when rate limited' do
+ let(:rate_limited) { true }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when not rate limited' do
+ let(:rate_limited) { false }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#obfuscated_email' do
+ subject { helper.obfuscated_email(email) }
+
+ context 'when an email address is normal length' do
+ let(:email) { 'alex@gitlab.com' }
+
+ it { is_expected.to eq('al**@g*****.com') }
+ end
+
+ context 'when an email address contains multiple top level domains' do
+ let(:email) { 'alex@gl.co.uk' }
+
+ it { is_expected.to eq('al**@g****.uk') }
+ end
+
+ context 'when an email address is very short' do
+ let(:email) { 'a@b' }
+
+ it { is_expected.to eq('a@b') }
+ end
+ end
end
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index c2c508cf485..4b46bf169e0 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -51,14 +51,14 @@ RSpec.describe StorageHelper do
end
end
- describe "storage_enforcement_banner", :saas do
+ describe "storage_enforcement_banner" do
let_it_be_with_refind(:current_user) { create(:user) }
let_it_be(:free_group) { create(:group) }
let_it_be(:paid_group) { create(:group) }
before do
- allow(helper).to receive(:can?).with(current_user, :admin_namespace, free_group).and_return(true)
- allow(helper).to receive(:can?).with(current_user, :admin_namespace, paid_group).and_return(true)
+ allow(helper).to receive(:can?).with(current_user, :maintain_namespace, free_group).and_return(true)
+ allow(helper).to receive(:can?).with(current_user, :maintain_namespace, paid_group).and_return(true)
allow(helper).to receive(:current_user) { current_user }
allow(paid_group).to receive(:paid?).and_return(true)
@@ -84,7 +84,13 @@ RSpec.describe StorageHelper do
end
it 'returns nil when current_user do not have access usage quotas page' do
- allow(helper).to receive(:can?).with(current_user, :admin_namespace, free_group).and_return(false)
+ allow(helper).to receive(:can?).with(current_user, :maintain_namespace, free_group).and_return(false)
+
+ expect(helper.storage_enforcement_banner_info(free_group)).to be(nil)
+ end
+
+ it 'returns nil when namespace_storage_limit_show_preenforcement_banner FF is disabled' do
+ stub_feature_flags(namespace_storage_limit_show_preenforcement_banner: false)
expect(helper.storage_enforcement_banner_info(free_group)).to be(nil)
end
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index 922fb1d7c92..bbabfedc3ee 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe TodosHelper do
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
- let_it_be(:issue) { create(:issue, title: 'Issue 1') }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, title: 'Issue 1', project: project) }
let_it_be(:design) { create(:design, issue: issue) }
let_it_be(:note) do
create(:note,
@@ -16,7 +17,7 @@ RSpec.describe TodosHelper do
let_it_be(:design_todo) do
create(:todo, :mentioned,
user: user,
- project: issue.project,
+ project: project,
target: design,
author: author,
note: note)
@@ -27,6 +28,15 @@ RSpec.describe TodosHelper do
create(:todo, target: alert)
end
+ let_it_be(:task_todo) do
+ task = create(:work_item, :task, project: project)
+ create(:todo, target: task, target_type: task.class.name, project: project)
+ end
+
+ let_it_be(:issue_todo) do
+ create(:todo, target: issue)
+ end
+
describe '#todos_count_format' do
it 'shows fuzzy count for 100 or more items' do
expect(helper.todos_count_format(100)).to eq '99+'
@@ -113,27 +123,62 @@ RSpec.describe TodosHelper do
)
end
end
+
+ context 'when given a task' do
+ let(:todo) { task_todo }
+
+ it 'responds with an appropriate path' do
+ path = helper.todo_target_path(todo)
+
+ expect(path).to eq("/#{todo.project.full_path}/-/work_items/#{todo.target.id}")
+ end
+ end
+
+ context 'when given an issue with a note anchor' do
+ let(:todo) { create(:todo, project: issue.project, target: issue, note: note) }
+
+ it 'responds with an appropriate path' do
+ path = helper.todo_target_path(todo)
+
+ expect(path).to eq("/#{issue.project.full_path}/-/issues/#{issue.iid}##{dom_id(note)}")
+ end
+ end
end
describe '#todo_target_type_name' do
+ subject { helper.todo_target_type_name(todo) }
+
context 'when given a design todo' do
let(:todo) { design_todo }
- it 'responds with an appropriate target type name' do
- name = helper.todo_target_type_name(todo)
-
- expect(name).to eq('design')
- end
+ it { is_expected.to eq('design') }
end
context 'when given an alert todo' do
let(:todo) { alert_todo }
- it 'responds with an appropriate target type name' do
- name = helper.todo_target_type_name(todo)
+ it { is_expected.to eq('alert') }
+ end
+
+ context 'when given a task todo' do
+ let(:todo) { task_todo }
- expect(name).to eq('alert')
+ it { is_expected.to eq('task') }
+ end
+
+ context 'when given an issue todo' do
+ let(:todo) { issue_todo }
+
+ it { is_expected.to eq('issue') }
+ end
+
+ context 'when given a merge request todo' do
+ let(:todo) do
+ merge_request = create(:merge_request, source_project: project)
+ create(:todo, target: merge_request)
end
+
+ it { is_expected.to eq('merge request') }
end
end
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 026432adf99..c40284ee933 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -3,63 +3,12 @@
require 'spec_helper'
RSpec.describe TreeHelper do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:sha) { 'c1c67abbaf91f624347bb3ae96eabe3a1b742478' }
let_it_be(:user) { create(:user) }
- def create_file(filename)
- project.repository.create_file(
- project.creator,
- filename,
- 'test this',
- message: "Automatically created file #{filename}",
- branch_name: 'master'
- )
- end
-
- describe 'flatten_tree' do
- let(:tree) { repository.tree(sha, 'files') }
- let(:root_path) { 'files' }
- let(:tree_item) { tree.entries.find { |entry| entry.path == path } }
-
- subject { flatten_tree(root_path, tree_item) }
-
- context "on a directory containing more than one file/directory" do
- let(:path) { 'files/html' }
-
- it "returns the directory name" do
- expect(subject).to match('html')
- end
- end
-
- context "on a directory containing only one directory" do
- let(:path) { 'files/flat' }
-
- it "returns the flattened path" do
- expect(subject).to match('flat/path/correct')
- end
-
- context "with a nested root path" do
- let(:root_path) { 'files/flat' }
-
- it "returns the flattened path with the root path suffix removed" do
- expect(subject).to match('path/correct')
- end
- end
- end
-
- context 'when the root path contains a plus character' do
- let(:root_path) { 'gtk/C++' }
- let(:tree_item) { double(flat_path: 'gtk/C++/glade') }
-
- it 'returns the flattened path' do
- expect(subject).to eq('glade')
- end
- end
- end
-
describe '#commit_in_single_accessible_branch' do
it 'escapes HTML from the branch name' do
helper.instance_variable_set(:@branch_name, "<script>alert('escape me!');</script>")
@@ -163,6 +112,7 @@ RSpec.describe TreeHelper do
context 'user does not have write access but a personal fork exists' do
include ProjectForksHelper
+ let(:project) { create(:project, :repository) }
let(:forked_project) { create(:project, :repository, namespace: user.namespace) }
before do
diff --git a/spec/helpers/users/callouts_helper_spec.rb b/spec/helpers/users/callouts_helper_spec.rb
index 71a8d340b30..2c148aabead 100644
--- a/spec/helpers/users/callouts_helper_spec.rb
+++ b/spec/helpers/users/callouts_helper_spec.rb
@@ -222,4 +222,60 @@ RSpec.describe Users::CalloutsHelper do
it { is_expected.to be true }
end
end
+
+ describe '#web_hook_disabled_dismissed?' do
+ context 'without a project' do
+ it 'is false' do
+ expect(helper).not_to be_web_hook_disabled_dismissed(nil)
+ end
+ end
+
+ context 'with a project' do
+ let_it_be(:project) { create(:project) }
+
+ context 'the web-hook failure callout has never been dismissed' do
+ it 'is false' do
+ expect(helper).not_to be_web_hook_disabled_dismissed(project)
+ end
+ end
+
+ context 'the web-hook failure callout has been dismissed', :freeze_time do
+ before do
+ create(:namespace_callout,
+ feature_name: described_class::WEB_HOOK_DISABLED,
+ user: user,
+ namespace: project.namespace,
+ dismissed_at: 1.week.ago)
+ end
+
+ it 'is true' do
+ expect(helper).to be_web_hook_disabled_dismissed(project)
+ end
+
+ context 'when there was an older failure', :clean_gitlab_redis_shared_state do
+ let(:key) { "web_hooks:last_failure:project-#{project.id}" }
+
+ before do
+ Gitlab::Redis::SharedState.with { |r| r.set(key, 1.month.ago.iso8601) }
+ end
+
+ it 'is true' do
+ expect(helper).to be_web_hook_disabled_dismissed(project)
+ end
+ end
+
+ context 'when there has been a more recent failure', :clean_gitlab_redis_shared_state do
+ let(:key) { "web_hooks:last_failure:project-#{project.id}" }
+
+ before do
+ Gitlab::Redis::SharedState.with { |r| r.set(key, 1.day.ago.iso8601) }
+ end
+
+ it 'is false' do
+ expect(helper).not_to be_web_hook_disabled_dismissed(project)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/helpers/web_hooks/web_hooks_helper_spec.rb b/spec/helpers/web_hooks/web_hooks_helper_spec.rb
new file mode 100644
index 00000000000..473f33a982f
--- /dev/null
+++ b/spec/helpers/web_hooks/web_hooks_helper_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::WebHooksHelper do
+ let_it_be_with_reload(:project) { create(:project) }
+
+ let(:current_user) { nil }
+ let(:callout_dismissed) { false }
+ let(:web_hooks_disable_failed) { false }
+ let(:webhooks_failed_callout) { false }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:web_hook_disabled_dismissed?).with(project).and_return(callout_dismissed)
+
+ stub_feature_flags(
+ webhooks_failed_callout: webhooks_failed_callout,
+ web_hooks_disable_failed: web_hooks_disable_failed
+ )
+ end
+
+ shared_context 'user is logged in' do
+ let(:current_user) { create(:user) }
+ end
+
+ shared_context 'webhooks_failed_callout is enabled' do
+ let(:webhooks_failed_callout) { true }
+ end
+
+ shared_context 'webhooks_failed_callout is enabled for this project' do
+ let(:webhooks_failed_callout) { project }
+ end
+
+ shared_context 'web_hooks_disable_failed is enabled' do
+ let(:web_hooks_disable_failed) { true }
+ end
+
+ shared_context 'web_hooks_disable_failed is enabled for this project' do
+ let(:web_hooks_disable_failed) { project }
+ end
+
+ shared_context 'the user has permission' do
+ before do
+ project.add_maintainer(current_user)
+ end
+ end
+
+ shared_context 'the user dismissed the callout' do
+ let(:callout_dismissed) { true }
+ end
+
+ shared_context 'a hook has failed' do
+ before do
+ create(:project_hook, :permanently_disabled, project: project)
+ end
+ end
+
+ describe '#show_project_hook_failed_callout?' do
+ context 'all conditions are met' do
+ include_context 'user is logged in'
+ include_context 'webhooks_failed_callout is enabled'
+ include_context 'web_hooks_disable_failed is enabled'
+ include_context 'the user has permission'
+ include_context 'a hook has failed'
+
+ it 'is true' do
+ expect(helper).to be_show_project_hook_failed_callout(project: project)
+ end
+
+ it 'caches the DB calls until the TTL', :use_clean_rails_memory_store_caching, :request_store do
+ helper.show_project_hook_failed_callout?(project: project)
+
+ travel_to((described_class::EXPIRY_TTL - 1.second).from_now) do
+ expect do
+ helper.show_project_hook_failed_callout?(project: project)
+ end.not_to exceed_query_limit(0)
+ end
+
+ travel_to((described_class::EXPIRY_TTL + 1.second).from_now) do
+ expect do
+ helper.show_project_hook_failed_callout?(project: project)
+ end.to exceed_query_limit(0)
+ end
+ end
+ end
+
+ context 'all conditions are met, project scoped flags' do
+ include_context 'user is logged in'
+ include_context 'webhooks_failed_callout is enabled for this project'
+ include_context 'web_hooks_disable_failed is enabled for this project'
+ include_context 'the user has permission'
+ include_context 'a hook has failed'
+
+ it 'is true' do
+ expect(helper).to be_show_project_hook_failed_callout(project: project)
+ end
+ end
+
+ context 'one condition is not met' do
+ contexts = [
+ 'user is logged in',
+ 'webhooks_failed_callout is enabled',
+ 'web_hooks_disable_failed is enabled',
+ 'the user has permission',
+ 'a hook has failed'
+ ]
+
+ contexts.each do |name|
+ context "namely #{name}" do
+ contexts.each { |ctx| include_context(ctx) unless ctx == name }
+
+ it 'is false' do
+ expect(helper).not_to be_show_project_hook_failed_callout(project: project)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/initializers/00_connection_logger_spec.rb b/spec/initializers/00_connection_logger_spec.rb
deleted file mode 100644
index 8b288b463c4..00000000000
--- a/spec/initializers/00_connection_logger_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ActiveRecord::ConnectionAdapters::PostgreSQLAdapter do # rubocop:disable RSpec/FilePath
- before do
- allow(PG).to receive(:connect)
- end
-
- let(:conn_params) { PG::Connection.conndefaults_hash }
-
- context 'when warn_on_new_connection is enabled' do
- before do
- described_class.warn_on_new_connection = true
- end
-
- it 'warns on new connection' do
- expect(ActiveSupport::Deprecation)
- .to receive(:warn).with(/Database connection should not be called during initializers/, anything)
-
- expect(PG).to receive(:connect).with(conn_params)
-
- described_class.new_client(conn_params)
- end
- end
-
- context 'when warn_on_new_connection is disabled' do
- before do
- described_class.warn_on_new_connection = false
- end
-
- it 'does not warn on new connection' do
- expect(ActiveSupport::Deprecation).not_to receive(:warn)
- expect(PG).to receive(:connect).with(conn_params)
-
- described_class.new_client(conn_params)
- end
- end
-end
diff --git a/spec/initializers/0_log_deprecations_spec.rb b/spec/initializers/0_log_deprecations_spec.rb
index 35bceb2f132..f5065126eaf 100644
--- a/spec/initializers/0_log_deprecations_spec.rb
+++ b/spec/initializers/0_log_deprecations_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe '0_log_deprecations' do
+ def setup_other_deprecations
+ Warning.process(__FILE__) { :default }
+ end
+
def load_initializer
load Rails.root.join('config/initializers/0_log_deprecations.rb')
end
@@ -11,16 +15,20 @@ RSpec.describe '0_log_deprecations' do
before do
stub_env('GITLAB_LOG_DEPRECATIONS', env_var)
+ setup_other_deprecations
load_initializer
end
after do
- # reset state changed by initializer
- Warning.clear
ActiveSupport::Notifications.unsubscribe('deprecation.rails')
end
- context 'for Ruby deprecations' do
+ around do |example|
+ # reset state changed by initializer
+ Warning.clear(&example)
+ end
+
+ describe 'Ruby deprecations' do
context 'when catching deprecations through Kernel#warn' do
it 'also logs them to deprecation logger' do
expect(Gitlab::DeprecationJsonLogger).to receive(:info).with(
@@ -32,7 +40,7 @@ RSpec.describe '0_log_deprecations' do
end
end
- context 'for other messages from Kernel#warn' do
+ describe 'other messages from Kernel#warn' do
it 'does not log them to deprecation logger' do
expect(Gitlab::DeprecationJsonLogger).not_to receive(:info)
@@ -51,7 +59,7 @@ RSpec.describe '0_log_deprecations' do
end
end
- context 'for Rails deprecations' do
+ describe 'Rails deprecations' do
it 'logs them to deprecation logger' do
expect(Gitlab::DeprecationJsonLogger).to receive(:info).with(
message: match(/^DEPRECATION WARNING: ABC will be removed/),
diff --git a/spec/initializers/100_patch_omniauth_oauth2_spec.rb b/spec/initializers/100_patch_omniauth_oauth2_spec.rb
index c30a1cdeafa..36a14816b7e 100644
--- a/spec/initializers/100_patch_omniauth_oauth2_spec.rb
+++ b/spec/initializers/100_patch_omniauth_oauth2_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'OmniAuth::Strategies::OAuth2' do
it 'verifies the gem version' do
current_version = OmniAuth::OAuth2::VERSION
- expected_version = '1.7.2'
+ expected_version = '1.7.3'
expect(current_version).to eq(expected_version), <<~EOF
New version #{current_version} of the `omniauth-oauth2` gem detected!
diff --git a/spec/initializers/1_acts_as_taggable_spec.rb b/spec/initializers/1_acts_as_taggable_spec.rb
new file mode 100644
index 00000000000..f9ccc9718d5
--- /dev/null
+++ b/spec/initializers/1_acts_as_taggable_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'ActsAsTaggableOn::Tag' do
+ describe '.find_or_create_all_with_like_by_name' do
+ let(:tags) { %w[tag] }
+
+ subject(:find_or_create) { ActsAsTaggableOn::Tag.find_or_create_all_with_like_by_name(tags) }
+
+ it 'creates a tag' do
+ expect { find_or_create }.to change(ActsAsTaggableOn::Tag, :count).by(1)
+ end
+
+ it 'returns the Tag record' do
+ results = find_or_create
+
+ expect(results.size).to eq(1)
+ expect(results.first).to be_an_instance_of(ActsAsTaggableOn::Tag)
+ expect(results.first.name).to eq('tag')
+ end
+
+ context 'some tags already existing' do
+ let(:tags) { %w[tag preexisting_tag tag2] }
+
+ before_all do
+ ActsAsTaggableOn::Tag.create!(name: 'preexisting_tag')
+ end
+
+ it 'creates only the missing tag' do
+ expect(ActsAsTaggableOn::Tag).to receive(:insert_all)
+ .with([{ name: 'tag' }, { name: 'tag2' }], unique_by: :name)
+ .and_call_original
+
+ expect { find_or_create }.to change(ActsAsTaggableOn::Tag, :count).by(2)
+ end
+
+ it 'returns the Tag records' do
+ results = find_or_create
+
+ expect(results.map(&:name)).to match_array(tags)
+ end
+ end
+
+ context 'all tags already existing' do
+ let(:tags) { %w[preexisting_tag preexisting_tag2] }
+
+ before_all do
+ ActsAsTaggableOn::Tag.create!(name: 'preexisting_tag')
+ ActsAsTaggableOn::Tag.create!(name: 'preexisting_tag2')
+ end
+
+ it 'does not create new tags' do
+ expect { find_or_create }.not_to change(ActsAsTaggableOn::Tag, :count)
+ end
+
+ it 'returns the Tag records' do
+ results = find_or_create
+
+ expect(results.map(&:name)).to match_array(tags)
+ end
+ end
+ end
+end
diff --git a/spec/initializers/enumerator_next_patch_spec.rb b/spec/initializers/enumerator_next_patch_spec.rb
new file mode 100644
index 00000000000..99e73af5e86
--- /dev/null
+++ b/spec/initializers/enumerator_next_patch_spec.rb
@@ -0,0 +1,167 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Enumerator#next patch fix' do
+ describe 'Enumerator' do
+ RSpec::Matchers.define :contain_unique_method_calls_in_order do |expected|
+ attr_reader :actual
+
+ match do |actual|
+ @actual_err = actual
+ regexps = expected.map { |method_name| { name: method_name, regexp: make_regexp(method_name) } }
+ @actual = actual.backtrace.filter_map do |line|
+ regexp = regexps.find { |r| r[:regexp].match? line }
+
+ regexp[:name] if regexp
+ end
+
+ expected == @actual
+ end
+
+ diffable
+
+ failure_message do
+ "#{super()}\n\nFull error backtrace:\n #{@actual_err.backtrace.join("\n ")}"
+ end
+
+ private
+
+ def make_regexp(method_name)
+ Regexp.new("/spec/initializers/enumerator_next_patch_spec\\.rb:[0-9]+:in `#{method_name}'$")
+ end
+ end
+
+ def have_been_raised_by_next_and_not_fixed_up
+ contain_unique_method_calls_in_order %w(call_enum_method)
+ end
+
+ def have_been_raised_by_enum_object_and_fixed_up
+ contain_unique_method_calls_in_order %w(make_error call_enum_method)
+ end
+
+ def have_been_raised_by_nested_next_and_fixed_up
+ contain_unique_method_calls_in_order %w(call_nested_next call_enum_method)
+ end
+
+ methods = [
+ {
+ name: 'next',
+ expected_value: 'Test value'
+ },
+ {
+ name: 'next_values',
+ expected_value: ['Test value']
+ },
+ {
+ name: 'peek',
+ expected_value: 'Test value'
+ },
+ {
+ name: 'peek_values',
+ expected_value: ['Test value']
+ }
+ ]
+
+ methods.each do |method|
+ describe "##{method[:name]}" do
+ def call_enum_method
+ enumerator.send(method_name)
+ end
+
+ let(:method_name) { method[:name] }
+
+ subject { call_enum_method }
+
+ describe 'normal yield' do
+ let(:enumerator) { Enumerator.new { |yielder| yielder << 'Test value' } }
+
+ it 'returns yielded value' do
+ is_expected.to eq(method[:expected_value])
+ end
+ end
+
+ describe 'end of iteration' do
+ let(:enumerator) { Enumerator.new { |_| } }
+
+ it 'does not fix up StopIteration' do
+ expect { subject }.to raise_error do |err|
+ expect(err).to be_a(StopIteration)
+ expect(err).to have_been_raised_by_next_and_not_fixed_up
+ end
+ end
+
+ context 'nested enum object' do
+ def call_nested_next
+ nested_enumerator.next
+ end
+
+ let(:nested_enumerator) { Enumerator.new { |_| } }
+ let(:enumerator) { Enumerator.new { |yielder| yielder << call_nested_next } }
+
+ it 'fixes up StopIteration thrown by another instance of #next' do
+ expect { subject }.to raise_error do |err|
+ expect(err).to be_a(StopIteration)
+ expect(err).to have_been_raised_by_nested_next_and_fixed_up
+ end
+ end
+ end
+ end
+
+ describe 'arguments error' do
+ def call_enum_method
+ enumerator.send(method_name, 'extra argument')
+ end
+
+ let(:enumerator) { Enumerator.new { |_| } }
+
+ it 'does not fix up ArgumentError' do
+ expect { subject }.to raise_error do |err|
+ expect(err).to be_a(ArgumentError)
+ expect(err).to have_been_raised_by_next_and_not_fixed_up
+ end
+ end
+ end
+
+ describe 'error' do
+ let(:enumerator) { Enumerator.new { |_| raise error } }
+ let(:error) { make_error }
+
+ it 'fixes up StopIteration' do
+ def make_error
+ StopIteration.new.tap { |err| err.set_backtrace(caller) }
+ end
+
+ expect { subject }.to raise_error do |err|
+ expect(err).to be(error)
+ expect(err).to have_been_raised_by_enum_object_and_fixed_up
+ end
+ end
+
+ it 'fixes up ArgumentError' do
+ def make_error
+ ArgumentError.new.tap { |err| err.set_backtrace(caller) }
+ end
+
+ expect { subject }.to raise_error do |err|
+ expect(err).to be(error)
+ expect(err).to have_been_raised_by_enum_object_and_fixed_up
+ end
+ end
+
+ it 'adds backtrace from other errors' do
+ def make_error
+ StandardError.new('This is a test').tap { |err| err.set_backtrace(caller) }
+ end
+
+ expect { subject }.to raise_error do |err|
+ expect(err).to be(error)
+ expect(err).to have_been_raised_by_enum_object_and_fixed_up
+ expect(err.message).to eq('This is a test')
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/initializers/set_active_support_hash_digest_class_spec.rb b/spec/initializers/set_active_support_hash_digest_class_spec.rb
deleted file mode 100644
index 256e8a1f218..00000000000
--- a/spec/initializers/set_active_support_hash_digest_class_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'setting ActiveSupport::Digest.hash_digest_class' do
- it 'sets overrides config.active_support.hash_digest_class' do
- expect(ActiveSupport::Digest.hash_digest_class).to eq(Gitlab::HashDigest::Facade)
- end
-end
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index 37277e7dcbd..6801d16d13e 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -66,4 +66,30 @@ RSpec.describe API::Ci::Helpers::Runner do
expect(helper.current_runner).to eq(runner)
end
end
+
+ describe '#track_runner_authentication', :prometheus do
+ subject { helper.track_runner_authentication }
+
+ let(:runner) { create(:ci_runner, token: 'foo') }
+
+ it 'increments gitlab_ci_runner_authentication_success_total' do
+ allow(helper).to receive(:params).and_return(token: runner.token)
+
+ success_counter = ::Gitlab::Ci::Runner::Metrics.runner_authentication_success_counter
+ failure_counter = ::Gitlab::Ci::Runner::Metrics.runner_authentication_failure_counter
+ expect { subject }.to change { success_counter.get(runner_type: 'instance_type') }.by(1)
+ .and not_change { success_counter.get(runner_type: 'project_type') }
+ .and not_change { failure_counter.get }
+ end
+
+ it 'increments gitlab_ci_runner_authentication_failure_total' do
+ allow(helper).to receive(:params).and_return(token: 'invalid')
+
+ success_counter = ::Gitlab::Ci::Runner::Metrics.runner_authentication_success_counter
+ failure_counter = ::Gitlab::Ci::Runner::Metrics.runner_authentication_failure_counter
+ expect { subject }.to change { failure_counter.get }.by(1)
+ .and not_change { success_counter.get(runner_type: 'instance_type') }
+ .and not_change { success_counter.get(runner_type: 'project_type') }
+ end
+ end
end
diff --git a/spec/lib/api/entities/ci/job_request/service_spec.rb b/spec/lib/api/entities/ci/job_request/service_spec.rb
new file mode 100644
index 00000000000..47c2c4e04c9
--- /dev/null
+++ b/spec/lib/api/entities/ci/job_request/service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ci::JobRequest::Service do
+ let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }]}
+ let(:service) do
+ instance_double(
+ ::Gitlab::Ci::Build::Image,
+ name: 'image_name',
+ entrypoint: ['foo'],
+ ports: ports,
+ pull_policy: ['if-not-present'],
+ alias: 'alias',
+ command: 'command',
+ variables: [{ key: 'key', value: 'value' }]
+ )
+ end
+
+ let(:entity) { described_class.new(service) }
+
+ subject(:result) { entity.as_json }
+
+ it 'exposes attributes' do
+ expect(result).to eq(
+ name: 'image_name',
+ entrypoint: ['foo'],
+ ports: ports,
+ pull_policy: ['if-not-present'],
+ alias: 'alias',
+ command: 'command',
+ variables: [{ key: 'key', value: 'value' }]
+ )
+ end
+
+ context 'when the ports param is nil' do
+ let(:ports) { nil }
+
+ it 'does not return the ports' do
+ expect(subject[:ports]).to be_nil
+ end
+ end
+
+ context 'when the FF ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it { is_expected.not_to have_key(:pull_policy) }
+ end
+end
diff --git a/spec/lib/api/entities/deploy_key_spec.rb b/spec/lib/api/entities/deploy_key_spec.rb
index 6427d6eac8f..50a27418488 100644
--- a/spec/lib/api/entities/deploy_key_spec.rb
+++ b/spec/lib/api/entities/deploy_key_spec.rb
@@ -15,8 +15,15 @@ RSpec.describe API::Entities::DeployKey do
title: deploy_key.title,
created_at: deploy_key.created_at,
expires_at: deploy_key.expires_at,
- key: deploy_key.key
+ key: deploy_key.key,
+ fingerprint_sha256: deploy_key.fingerprint_sha256
)
+
+ is_expected.to include(fingerprint: deploy_key.fingerprint) unless Gitlab::FIPS.enabled?
+ end
+
+ context 'when in FIPS mode', :fips_mode do
+ it { is_expected.not_to have_key(:fingerprint) }
end
end
end
diff --git a/spec/lib/api/helpers/pagination_strategies_spec.rb b/spec/lib/api/helpers/pagination_strategies_spec.rb
index e8a4243b407..16cc10182b0 100644
--- a/spec/lib/api/helpers/pagination_strategies_spec.rb
+++ b/spec/lib/api/helpers/pagination_strategies_spec.rb
@@ -55,9 +55,10 @@ RSpec.describe API::Helpers::PaginationStrategies do
allow(subject).to receive(:keyset_pagination_enabled?).and_return(false)
end
- context 'when keyset pagination is available for the relation' do
+ context 'when keyset pagination is available and enforced for the relation' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available_for_type?).and_return(true)
+ allow(Gitlab::Pagination::CursorBasedKeyset).to receive(:enforced_for_type?).and_return(true)
end
context 'when a request scope is given' do
@@ -70,6 +71,18 @@ RSpec.describe API::Helpers::PaginationStrategies do
subject.paginator(relation, request_scope)
end
+
+ context 'when keyset pagination is not enforced' do
+ before do
+ allow(Gitlab::Pagination::CursorBasedKeyset).to receive(:enforced_for_type?).and_return(false)
+ end
+
+ it 'returns no errors' do
+ expect(subject).not_to receive(:error!)
+
+ subject.paginator(relation, request_scope)
+ end
+ end
end
context 'when the scope limit is not exceeded' do
diff --git a/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb b/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb
index 12ed47a1025..b3157dd15fb 100644
--- a/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb
+++ b/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb
@@ -4,6 +4,8 @@ require 'spec_helper'
RSpec.describe Atlassian::JiraConnect::Jwt::Asymmetric do
describe '#valid?' do
+ let_it_be(:private_key) { OpenSSL::PKey::RSA.generate 3072 }
+
subject(:asymmetric_jwt) { described_class.new(jwt, verification_claims) }
let(:verification_claims) { jwt_claims }
@@ -12,7 +14,6 @@ RSpec.describe Atlassian::JiraConnect::Jwt::Asymmetric do
let(:client_key) { '1234' }
let(:public_key_id) { '123e4567-e89b-12d3-a456-426614174000' }
let(:jwt_headers) { { kid: public_key_id } }
- let(:private_key) { OpenSSL::PKey::RSA.generate 2048 }
let(:jwt) { JWT.encode(jwt_claims, private_key, 'RS256', jwt_headers) }
let(:public_key) { private_key.public_key }
let(:install_keys_url) { "https://connect-install-keys.atlassian.com/#{public_key_id}" }
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index ab198fcbe1f..3a9c4dfe3fb 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -16,8 +16,8 @@ RSpec.describe Backup::GitalyBackup do
let(:expected_env) do
{
- 'SSL_CERT_FILE' => OpenSSL::X509::DEFAULT_CERT_FILE,
- 'SSL_CERT_DIR' => OpenSSL::X509::DEFAULT_CERT_DIR
+ 'SSL_CERT_FILE' => Gitlab::X509::Certificate.default_cert_file,
+ 'SSL_CERT_DIR' => Gitlab::X509::Certificate.default_cert_dir
}.merge(ENV)
end
diff --git a/spec/lib/banzai/filter/footnote_filter_spec.rb b/spec/lib/banzai/filter/footnote_filter_spec.rb
index 5ac7d3af733..26bca571fdc 100644
--- a/spec/lib/banzai/filter/footnote_filter_spec.rb
+++ b/spec/lib/banzai/filter/footnote_filter_spec.rb
@@ -56,6 +56,19 @@ RSpec.describe Banzai::Filter::FootnoteFilter do
it 'properly adds the necessary ids and classes' do
expect(doc.to_html).to eq filtered_footnote.strip
end
+
+ context 'when GITLAB_TEST_FOOTNOTE_ID is set' do
+ let(:test_footnote_id) { '42' }
+ let(:identifier) { test_footnote_id }
+
+ before do
+ stub_env('GITLAB_TEST_FOOTNOTE_ID', test_footnote_id)
+ end
+
+ it 'uses the test footnote ID instead of a random number' do
+ expect(doc.to_html).to eq filtered_footnote.strip
+ end
+ end
end
context 'when detecting footnotes' do
diff --git a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
index 3459784708f..e8ef4e7f6e3 100644
--- a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Banzai::ReferenceParser::SnippetParser do
end
before do
- project.add_user(project_member, :developer)
+ project.add_member(project_member, :developer)
end
describe '#nodes_visible_to_user' do
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index c42ca9bef3b..d775cf6b026 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
describe '#transform' do
- let_it_be(:user) { create(:user) }
let_it_be(:parent) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let_it_be(:entity) do
- create(
+ let(:bulk_import) { build_stubbed(:bulk_import) }
+
+ let(:entity) do
+ build_stubbed(
:bulk_import_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
@@ -18,8 +18,8 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
)
end
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:tracker) { build_stubbed(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:data) do
{
@@ -87,14 +87,63 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
end
context 'when destination namespace is empty' do
- it 'does not set parent id' do
- entity.update!(destination_namespace: '')
+ before do
+ entity.destination_namespace = ''
+ end
+ it 'does not set parent id' do
transformed_data = subject.transform(context, data)
expect(transformed_data).not_to have_key('parent_id')
end
end
end
+
+ describe 'group name transformation' do
+ context 'when destination namespace is empty' do
+ before do
+ entity.destination_namespace = ''
+ end
+
+ it 'does not transform name' do
+ transformed_data = subject.transform(context, data)
+
+ expect(transformed_data['name']).to eq('Source Group Name')
+ end
+ end
+
+ context 'when destination namespace is present' do
+ context 'when destination namespace does not have a group with same name' do
+ it 'does not transform name' do
+ transformed_data = subject.transform(context, data)
+
+ expect(transformed_data['name']).to eq('Source Group Name')
+ end
+ end
+
+ context 'when destination namespace already have a group with the same name' do
+ before do
+ create(:group, parent: parent, name: 'Source Group Name', path: 'group_1')
+ create(:group, parent: parent, name: 'Source Group Name(1)', path: 'group_2')
+ create(:group, parent: parent, name: 'Source Group Name(2)', path: 'group_3')
+ create(:group, parent: parent, name: 'Source Group Name(1)(1)', path: 'group_4')
+ end
+
+ it 'makes the name unique by appeding a counter', :aggregate_failures do
+ transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name'))
+ expect(transformed_data['name']).to eq('Source Group Name(3)')
+
+ transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name(2)'))
+ expect(transformed_data['name']).to eq('Source Group Name(2)(1)')
+
+ transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name(1)'))
+ expect(transformed_data['name']).to eq('Source Group Name(1)(2)')
+
+ transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name(1)(1)'))
+ expect(transformed_data['name']).to eq('Source Group Name(1)(1)(1)')
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 7235b7c95cd..810271818ae 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
Class.new do
def initialize(options = {}); end
- def transform(context); end
+ def transform(context, data); end
end
end
@@ -23,7 +23,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
Class.new do
def initialize(options = {}); end
- def load(context); end
+ def load(context, data); end
end
end
@@ -44,11 +44,73 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
let_it_be_with_reload(:entity) { create(:bulk_import_entity) }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker, extra: :data) }
+
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker, extra: :data) }
subject { BulkImports::MyPipeline.new(context) }
+ shared_examples 'failed pipeline' do |exception_class, exception_message|
+ it 'logs import failure' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:error)
+ .with(
+ log_params(
+ context,
+ pipeline_step: :extractor,
+ pipeline_class: 'BulkImports::MyPipeline',
+ exception_class: exception_class,
+ exception_message: exception_message
+ )
+ )
+ end
+
+ expect { subject.run }
+ .to change(entity.failures, :count).by(1)
+
+ failure = entity.failures.first
+
+ expect(failure).to be_present
+ expect(failure.pipeline_class).to eq('BulkImports::MyPipeline')
+ expect(failure.pipeline_step).to eq('extractor')
+ expect(failure.exception_class).to eq(exception_class)
+ expect(failure.exception_message).to eq(exception_message)
+ end
+
+ context 'when pipeline is marked to abort on failure' do
+ before do
+ BulkImports::MyPipeline.abort_on_failure!
+ end
+
+ it 'logs a warn message and marks entity and tracker as failed' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:warn)
+ .with(
+ log_params(
+ context,
+ message: 'Aborting entity migration due to pipeline failure',
+ pipeline_class: 'BulkImports::MyPipeline'
+ )
+ )
+ end
+
+ subject.run
+
+ expect(entity.failed?).to eq(true)
+ expect(tracker.failed?).to eq(true)
+ end
+ end
+
+ context 'when pipeline is not marked to abort on failure' do
+ it 'does not mark entity as failed' do
+ subject.run
+
+ expect(tracker.failed?).to eq(true)
+ expect(entity.failed?).to eq(false)
+ end
+ end
+ end
+
describe 'pipeline runner' do
context 'when entity is not marked as failed' do
it 'runs pipeline extractor, transformer, loader' do
@@ -145,70 +207,65 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
end
- context 'when exception is raised' do
+ context 'when the exception BulkImports::NetworkError is raised' do
before do
allow_next_instance_of(BulkImports::Extractor) do |extractor|
- allow(extractor).to receive(:extract).with(context).and_raise(StandardError, 'Error!')
+ allow(extractor).to receive(:extract).with(context).and_raise(
+ BulkImports::NetworkError.new(
+ 'Net::ReadTimeout',
+ response: instance_double(HTTParty::Response, code: reponse_status_code, headers: {})
+ )
+ )
end
end
- it 'logs import failure' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error)
- .with(
- log_params(
- context,
- pipeline_step: :extractor,
- pipeline_class: 'BulkImports::MyPipeline',
- exception_class: 'StandardError',
- exception_message: 'Error!'
- )
- )
- end
+ context 'when exception is retriable' do
+ let(:reponse_status_code) { 429 }
- expect { subject.run }
- .to change(entity.failures, :count).by(1)
+ it 'raises the exception BulkImports::RetryPipelineError' do
+ expect { subject.run }.to raise_error(BulkImports::RetryPipelineError)
+ end
+ end
- failure = entity.failures.first
+ context 'when exception is not retriable' do
+ let(:reponse_status_code) { 503 }
- expect(failure).to be_present
- expect(failure.pipeline_class).to eq('BulkImports::MyPipeline')
- expect(failure.pipeline_step).to eq('extractor')
- expect(failure.exception_class).to eq('StandardError')
- expect(failure.exception_message).to eq('Error!')
+ it_behaves_like 'failed pipeline', 'BulkImports::NetworkError', 'Net::ReadTimeout'
end
+ end
- context 'when pipeline is marked to abort on failure' do
- before do
- BulkImports::MyPipeline.abort_on_failure!
- end
-
- it 'logs a warn message and marks entity as failed' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:warn)
- .with(
- log_params(
- context,
- message: 'Pipeline failed',
- pipeline_class: 'BulkImports::MyPipeline'
+ context 'when a retriable BulkImports::NetworkError exception is raised while extracting the next page' do
+ before do
+ call_count = 0
+ allow_next_instance_of(BulkImports::Extractor) do |extractor|
+ allow(extractor).to receive(:extract).with(context).twice do
+ if call_count.zero?
+ call_count += 1
+ extracted_data(has_next_page: true)
+ else
+ raise(
+ BulkImports::NetworkError.new(
+ response: instance_double(HTTParty::Response, code: 429, headers: {})
)
)
+ end
end
-
- subject.run
-
- expect(entity.status_name).to eq(:failed)
- expect(tracker.status_name).to eq(:failed)
end
end
- context 'when pipeline is not marked to abort on failure' do
- it 'does not mark entity as failed' do
- subject.run
+ it 'raises the exception BulkImports::RetryPipelineError' do
+ expect { subject.run }.to raise_error(BulkImports::RetryPipelineError)
+ end
+ end
- expect(entity.failed?).to eq(false)
+ context 'when the exception StandardError is raised' do
+ before do
+ allow_next_instance_of(BulkImports::Extractor) do |extractor|
+ allow(extractor).to receive(:extract).with(context).and_raise(StandardError, 'Error!')
end
end
+
+ it_behaves_like 'failed pipeline', 'StandardError', 'Error!'
end
end
diff --git a/spec/lib/bulk_imports/retry_pipeline_error_spec.rb b/spec/lib/bulk_imports/retry_pipeline_error_spec.rb
new file mode 100644
index 00000000000..9d96407b03a
--- /dev/null
+++ b/spec/lib/bulk_imports/retry_pipeline_error_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::RetryPipelineError do
+ describe '#retry_delay' do
+ it 'returns retry_delay' do
+ exception = described_class.new('Error!', 60)
+
+ expect(exception.retry_delay).to eq(60)
+ end
+ end
+end
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index 16d2c42f332..f2c627734a3 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -316,6 +316,17 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
it { is_expected.to eq(nil) }
end
+
+ context 'with uppercase path' do
+ let(:path) { 'foo/Bar' }
+
+ before do
+ expect(Auth::ContainerRegistryAuthenticationService).to receive(:pull_nested_repositories_access_token).with(path.downcase).and_return(token)
+ stub_repository_details(path, sizing: :self_with_descendants, status_code: 200, respond_with: response)
+ end
+
+ it { is_expected.to eq(555) }
+ end
end
def stub_pre_import(path, status_code, pre:)
diff --git a/spec/lib/error_tracking/collector/dsn_spec.rb b/spec/lib/error_tracking/collector/dsn_spec.rb
deleted file mode 100644
index 3aa8719fe38..00000000000
--- a/spec/lib/error_tracking/collector/dsn_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ErrorTracking::Collector::Dsn do
- describe '.build_url' do
- let(:setting) do
- {
- protocol: 'https',
- https: true,
- port: 443,
- host: 'gitlab.example.com',
- relative_url_root: nil
- }
- end
-
- subject { described_class.build_url('abcdef1234567890', 778) }
-
- it 'returns a valid URL without explicit port' do
- stub_config_setting(setting)
-
- is_expected.to eq('https://abcdef1234567890@gitlab.example.com/api/v4/error_tracking/collector/778')
- end
-
- context 'with non-standard port' do
- it 'returns a valid URL with custom port' do
- setting[:port] = 4567
- stub_config_setting(setting)
-
- is_expected.to eq('https://abcdef1234567890@gitlab.example.com:4567/api/v4/error_tracking/collector/778')
- end
- end
- end
-end
diff --git a/spec/lib/error_tracking/stacktrace_builder_spec.rb b/spec/lib/error_tracking/stacktrace_builder_spec.rb
index 46d0bde8122..57eead13fc0 100644
--- a/spec/lib/error_tracking/stacktrace_builder_spec.rb
+++ b/spec/lib/error_tracking/stacktrace_builder_spec.rb
@@ -56,6 +56,35 @@ RSpec.describe ErrorTracking::StacktraceBuilder do
end
end
+ context 'when exception payload is a list' do
+ let(:payload_file) { 'error_tracking/go_two_exception_event.json' }
+
+ it 'extracts a stracktrace' do
+ expected_entry = {
+ 'lineNo' => 54,
+ 'context' => [
+ [49, "\t// Set the timeout to the maximum duration the program can afford to wait."],
+ [50, "\tdefer sentry.Flush(2 * time.Second)"],
+ [51, ""],
+ [52, "\tresp, err := http.Get(os.Args[1])"],
+ [53, "\tif err != nil {"],
+ [54, "\t\tsentry.CaptureException(err)"],
+ [55, "\t\tlog.Printf(\"reported to Sentry: %s\", err)"],
+ [56, "\t\treturn"],
+ [57, "\t}"],
+ [58, "\tdefer resp.Body.Close()"],
+ [59, ""]
+ ],
+ 'filename' => nil,
+ 'function' => 'main',
+ 'colNo' => 0
+ }
+
+ expect(stacktrace).to be_kind_of(Array)
+ expect(stacktrace.first).to eq(expected_entry)
+ end
+ end
+
context 'with empty payload' do
let(:payload) { {} }
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 6e32db09426..4db3f04717b 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -512,6 +512,11 @@ RSpec.describe Feature, stub_feature_flags: false do
let(:expected_extra) { { "extra.thing" => "true" } }
end
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
+ end
+
context 'when thing is an actor' do
let(:thing) { create(:project) }
@@ -533,6 +538,11 @@ RSpec.describe Feature, stub_feature_flags: false do
let(:expected_extra) { { "extra.thing" => "false" } }
end
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
+ end
+
context 'when thing is an actor' do
let(:thing) { create(:project) }
diff --git a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
index 4cba9732c22..b6e1d59f6c0 100644
--- a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
@@ -38,6 +38,29 @@ RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator, :silence_stdout
expect(monthly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric')
end
+ context 'with multiple events', :aggregate_failures do
+ let(:event_2) { 'i_test_event_2' }
+ let(:args) { [category, event, event_2] }
+
+ it 'creates metric definition files' do
+ described_class.new(args).invoke_all
+
+ [event, event_2].each do |event|
+ weekly_metric_definition_path = Dir.glob(File.join(temp_dir, "metrics/counts_7d/*#{event}_weekly.yml")).first
+ monthly_metric_definition_path = Dir.glob(File.join(temp_dir, "metrics/counts_28d/*#{event}_monthly.yml")).first
+
+ weekly_metric_definition = YAML.safe_load(File.read(weekly_metric_definition_path))
+ monthly_metric_definition = YAML.safe_load(File.read(monthly_metric_definition_path))
+
+ expect(weekly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.#{event}_weekly")
+ expect(monthly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.#{event}_monthly")
+
+ expect(weekly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric')
+ expect(monthly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric')
+ end
+ end
+ end
+
context 'with ee option' do
let(:weekly_metric_definition_path) { Dir.glob(File.join(temp_dir, 'ee/config/metrics/counts_7d/*i_test_event_weekly.yml')).first }
let(:monthly_metric_definition_path) { Dir.glob(File.join(temp_dir, 'ee/config/metrics/counts_28d/*i_test_event_monthly.yml')).first }
diff --git a/spec/lib/generators/model/mocks/migration_file.txt b/spec/lib/generators/model/mocks/migration_file.txt
new file mode 100644
index 00000000000..e92c2d2b534
--- /dev/null
+++ b/spec/lib/generators/model/mocks/migration_file.txt
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+# See https://docs.gitlab.com/ee/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class CreateModelGeneratorTestFoos < Gitlab::Database::Migration[2.0]
+ # When using the methods "add_concurrent_index" or "remove_concurrent_index"
+ # you must disable the use of transactions
+ # as these methods can not run in an existing transaction.
+ # When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
+ # that either of them is the _only_ method called in the migration,
+ # any other changes should go in a separate migration.
+ # This ensures that upon failure _only_ the index creation or removing fails
+ # and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ # disable_ddl_transaction!
+
+ def change
+ create_table :model_generator_test_foos do |t|
+
+ t.timestamps null: false
+ end
+ end
+end
diff --git a/spec/lib/generators/model/mocks/model_file.txt b/spec/lib/generators/model/mocks/model_file.txt
new file mode 100644
index 00000000000..066db4bfd76
--- /dev/null
+++ b/spec/lib/generators/model/mocks/model_file.txt
@@ -0,0 +1,2 @@
+class ModelGeneratorTestFoo < ApplicationRecord
+end
diff --git a/spec/lib/generators/model/mocks/spec_file.txt b/spec/lib/generators/model/mocks/spec_file.txt
new file mode 100644
index 00000000000..efd700df0a1
--- /dev/null
+++ b/spec/lib/generators/model/mocks/spec_file.txt
@@ -0,0 +1,5 @@
+require 'rails_helper'
+
+RSpec.describe ModelGeneratorTestFoo, type: :model do
+ pending "add some examples to (or delete) #{__FILE__}"
+end
diff --git a/spec/lib/generators/model/model_generator_spec.rb b/spec/lib/generators/model/model_generator_spec.rb
new file mode 100644
index 00000000000..0e770190d25
--- /dev/null
+++ b/spec/lib/generators/model/model_generator_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Model::ModelGenerator do
+ let(:args) { ['ModelGeneratorTestFoo'] }
+ let(:options) { { 'migration' => true, 'timestamps' => true, 'indexes' => true, 'test_framework' => :rspec } }
+ let(:temp_dir) { Dir.mktmpdir }
+ let(:migration_file_path) { Dir.glob(File.join(temp_dir, 'db/migrate/*create_model_generator_test_foos.rb')).first }
+ let(:model_file_path) { File.join(temp_dir, 'app/models/model_generator_test_foo.rb') }
+ let(:spec_file_path) { File.join(temp_dir, 'spec/models/model_generator_test_foo_spec.rb') }
+
+ subject { described_class.new(args, options, { destination_root: temp_dir }) }
+
+ context 'when generating a model' do
+ after do
+ FileUtils.rm_rf(temp_dir)
+ end
+
+ it 'creates the model file with the right content' do
+ subject.invoke_all
+
+ expect(File).to exist(model_file_path)
+ mock_model_file_content = File.read(File.expand_path('./mocks/model_file.txt', __dir__))
+ model_file_content = File.read(model_file_path)
+ expect(model_file_content).to eq(mock_model_file_content)
+ end
+
+ it 'creates the migration file with the right content' do
+ subject.invoke_all
+
+ expect(File).to exist(migration_file_path)
+ mock_migration_file_content = File.read(File.expand_path('./mocks/migration_file.txt', __dir__))
+ migration_file_content = File.read(migration_file_path)
+ expect(migration_file_content).to eq(mock_migration_file_content)
+ end
+
+ it 'creates the spec file with the right content' do
+ subject.invoke_all
+
+ expect(File).to exist(spec_file_path)
+ mock_spec_file_content = File.read(File.expand_path('./mocks/spec_file.txt', __dir__))
+ spec_file_content = File.read(spec_file_path)
+ expect(spec_file_content).to eq(mock_spec_file_content)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index ec394bb9f05..34d5158a5ab 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
describe '#serialized_records' do
shared_context 'when records are loaded by maintainer' do
before do
- project.add_user(user, Gitlab::Access::DEVELOPER)
+ project.add_member(user, Gitlab::Access::DEVELOPER)
end
it 'returns all records' do
@@ -72,7 +72,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
context 'when records are loaded by guest' do
before do
- project.add_user(user, Gitlab::Access::GUEST)
+ project.add_member(user, Gitlab::Access::GUEST)
end
it 'filters out confidential issues' do
@@ -124,7 +124,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
end
before do
- project.add_user(user, Gitlab::Access::DEVELOPER)
+ project.add_member(user, Gitlab::Access::DEVELOPER)
stub_const('Gitlab::Analytics::CycleAnalytics::RecordsFetcher::MAX_RECORDS', 2)
end
diff --git a/spec/lib/gitlab/application_rate_limiter/base_strategy_spec.rb b/spec/lib/gitlab/application_rate_limiter/base_strategy_spec.rb
new file mode 100644
index 00000000000..b34ac538b24
--- /dev/null
+++ b/spec/lib/gitlab/application_rate_limiter/base_strategy_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ApplicationRateLimiter::BaseStrategy do
+ describe '#increment' do
+ it 'raises NotImplementedError' do
+ expect { subject.increment('cache_key', 0) }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#read' do
+ it 'raises NotImplementedError' do
+ expect { subject.read('cache_key') }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/application_rate_limiter/increment_per_action_spec.rb b/spec/lib/gitlab/application_rate_limiter/increment_per_action_spec.rb
new file mode 100644
index 00000000000..b74d2360711
--- /dev/null
+++ b/spec/lib/gitlab/application_rate_limiter/increment_per_action_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ApplicationRateLimiter::IncrementPerAction, :freeze_time, :clean_gitlab_redis_rate_limiting do
+ let(:cache_key) { 'test' }
+ let(:expiry) { 60 }
+
+ subject(:counter) { described_class.new }
+
+ def increment
+ counter.increment(cache_key, expiry)
+ end
+
+ describe '#increment' do
+ it 'increments per call' do
+ expect(increment).to eq 1
+ expect(increment).to eq 2
+ expect(increment).to eq 3
+ end
+
+ it 'sets time to live (TTL) for the key' do
+ def ttl
+ Gitlab::Redis::RateLimiting.with { |r| r.ttl(cache_key) }
+ end
+
+ key_does_not_exist = -2
+
+ expect(ttl).to eq key_does_not_exist
+ expect { increment }.to change { ttl }.by(a_value > 0)
+ end
+ end
+
+ describe '#read' do
+ def read
+ counter.read(cache_key)
+ end
+
+ it 'returns 0 when there is no data' do
+ expect(read).to eq 0
+ end
+
+ it 'returns the correct value', :aggregate_failures do
+ increment
+ expect(read).to eq 1
+
+ increment
+ expect(read).to eq 2
+ end
+ end
+end
diff --git a/spec/lib/gitlab/application_rate_limiter/increment_per_actioned_resource_spec.rb b/spec/lib/gitlab/application_rate_limiter/increment_per_actioned_resource_spec.rb
new file mode 100644
index 00000000000..1f3ae2d749a
--- /dev/null
+++ b/spec/lib/gitlab/application_rate_limiter/increment_per_actioned_resource_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ApplicationRateLimiter::IncrementPerActionedResource,
+ :freeze_time, :clean_gitlab_redis_rate_limiting do
+ let(:cache_key) { 'test' }
+ let(:expiry) { 60 }
+
+ def increment(resource_key)
+ described_class.new(resource_key).increment(cache_key, expiry)
+ end
+
+ describe '#increment' do
+ it 'increments per resource', :aggregate_failures do
+ expect(increment('resource_1')).to eq(1)
+ expect(increment('resource_1')).to eq(1)
+ expect(increment('resource_2')).to eq(2)
+ expect(increment('resource_2')).to eq(2)
+ expect(increment('resource_3')).to eq(3)
+ end
+
+ it 'sets time to live (TTL) for the key' do
+ def ttl
+ Gitlab::Redis::RateLimiting.with { |r| r.ttl(cache_key) }
+ end
+
+ key_does_not_exist = -2
+
+ expect(ttl).to eq key_does_not_exist
+ expect { increment('resource_1') }.to change { ttl }.by(a_value > 0)
+ end
+ end
+
+ describe '#read' do
+ def read
+ described_class.new(nil).read(cache_key)
+ end
+
+ it 'returns 0 when there is no data' do
+ expect(read).to eq 0
+ end
+
+ it 'returns the correct value', :aggregate_failures do
+ increment 'r1'
+ expect(read).to eq 1
+
+ increment 'r2'
+ expect(read).to eq 2
+ end
+ end
+end
diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb
index efe78cd3a35..177ce1134d8 100644
--- a/spec/lib/gitlab/application_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/application_rate_limiter_spec.rb
@@ -13,8 +13,8 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
interval: 2.minutes
},
another_action: {
- threshold: 2,
- interval: 3.minutes
+ threshold: -> { 2 },
+ interval: -> { 3.minutes }
}
}
end
@@ -70,6 +70,44 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
end
end
+ describe 'counting actions once per unique resource' do
+ let(:scope) { [user, project] }
+
+ let(:start_time) { Time.current.beginning_of_hour }
+ let(:project1) { instance_double(Project, id: '1') }
+ let(:project2) { instance_double(Project, id: '2') }
+
+ it 'returns true when unique actioned resources count exceeds threshold' do
+ travel_to(start_time) do
+ expect(subject.throttled?(:test_action, scope: scope, resource: project1)).to eq(false)
+ end
+
+ travel_to(start_time + 1.minute) do
+ expect(subject.throttled?(:test_action, scope: scope, resource: project2)).to eq(true)
+ end
+ end
+
+ it 'returns false when unique actioned resource count does not exceed threshold' do
+ travel_to(start_time) do
+ expect(subject.throttled?(:test_action, scope: scope, resource: project1)).to eq(false)
+ end
+
+ travel_to(start_time + 1.minute) do
+ expect(subject.throttled?(:test_action, scope: scope, resource: project1)).to eq(false)
+ end
+ end
+
+ it 'returns false when interval has elapsed' do
+ travel_to(start_time) do
+ expect(subject.throttled?(:test_action, scope: scope, resource: project1)).to eq(false)
+ end
+
+ travel_to(start_time + 2.minutes) do
+ expect(subject.throttled?(:test_action, scope: scope, resource: project2)).to eq(false)
+ end
+ end
+ end
+
shared_examples 'throttles based on key and scope' do
let(:start_time) { Time.current.beginning_of_hour }
@@ -91,7 +129,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
travel_to(start_time) do
expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
- # another_action has a threshold of 3 so we simulate 2 requests
+ # another_action has a threshold of 2 so we simulate 2 requests
expect(subject.throttled?(:another_action, scope: scope)).to eq(false)
expect(subject.throttled?(:another_action, scope: scope)).to eq(false)
end
@@ -189,4 +227,20 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
end
end
end
+
+ context 'when interval is 0' do
+ let(:rate_limits) { { test_action: { threshold: 1, interval: 0 } } }
+ let(:scope) { user }
+ let(:start_time) { Time.current.beginning_of_hour }
+
+ it 'returns false' do
+ travel_to(start_time) do
+ expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
+ end
+
+ travel_to(start_time + 1.minute) do
+ expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/ldap/user_spec.rb b/spec/lib/gitlab/auth/ldap/user_spec.rb
index da0bb5fe675..b471a89b491 100644
--- a/spec/lib/gitlab/auth/ldap/user_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/user_spec.rb
@@ -49,6 +49,24 @@ RSpec.describe Gitlab::Auth::Ldap::User do
end
end
+ describe '#valid_sign_in?' do
+ before do
+ gl_user.save!
+ end
+
+ it 'returns true' do
+ expect(Gitlab::Auth::Ldap::Access).to receive(:allowed?).and_return(true)
+ expect(ldap_user.valid_sign_in?).to be true
+ end
+
+ it 'returns false if the GitLab user is not valid' do
+ gl_user.update_column(:username, nil)
+
+ expect(Gitlab::Auth::Ldap::Access).not_to receive(:allowed?)
+ expect(ldap_user.valid_sign_in?).to be false
+ end
+ end
+
describe 'find or create' do
it "finds the user if already existing" do
create(:omniauth_user, extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', provider: 'ldapmain')
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index f5a74956174..1e869df0988 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -481,6 +481,17 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it_behaves_like 'with an invalid access token'
+
+ context 'when the token belongs to a group via project share' do
+ let_it_be(:invited_group) { create(:group) }
+
+ before do
+ invited_group.add_maintainer(project_bot_user)
+ create(:project_group_link, group: invited_group, project: project)
+ end
+
+ it_behaves_like 'with a valid access token'
+ end
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb
new file mode 100644
index 00000000000..7c78d8b0305
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillCiRunnerSemver, :migration, schema: 20220601151900 do
+ let(:ci_runners) { table(:ci_runners, database: :ci) }
+
+ subject do
+ described_class.new(
+ start_id: 10,
+ end_id: 15,
+ batch_table: :ci_runners,
+ batch_column: :id,
+ sub_batch_size: 10,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection)
+ end
+
+ describe '#perform' do
+ it 'populates semver column on all runners in range' do
+ ci_runners.create!(id: 10, runner_type: 1, version: %q(HEAD-fd84d97))
+ ci_runners.create!(id: 11, runner_type: 1, version: %q(v1.2.3))
+ ci_runners.create!(id: 12, runner_type: 1, version: %q(2.1.0))
+ ci_runners.create!(id: 13, runner_type: 1, version: %q(11.8.0~beta.935.g7f6d2abc))
+ ci_runners.create!(id: 14, runner_type: 1, version: %q(13.2.2/1.1.0))
+ ci_runners.create!(id: 15, runner_type: 1, version: %q('14.3.4'))
+
+ subject.perform
+
+ expect(ci_runners.all).to contain_exactly(
+ an_object_having_attributes(id: 10, semver: nil),
+ an_object_having_attributes(id: 11, semver: '1.2.3'),
+ an_object_having_attributes(id: 12, semver: '2.1.0'),
+ an_object_having_attributes(id: 13, semver: '11.8.0'),
+ an_object_having_attributes(id: 14, semver: '13.2.2'),
+ an_object_having_attributes(id: 15, semver: '14.3.4')
+ )
+ end
+
+ it 'skips runners that already have semver value' do
+ ci_runners.create!(id: 10, runner_type: 1, version: %q(1.2.4), semver: '1.2.3')
+ ci_runners.create!(id: 11, runner_type: 1, version: %q(1.2.5))
+ ci_runners.create!(id: 12, runner_type: 1, version: %q(HEAD), semver: '1.2.4')
+
+ subject.perform
+
+ expect(ci_runners.all).to contain_exactly(
+ an_object_having_attributes(id: 10, semver: '1.2.3'),
+ an_object_having_attributes(id: 11, semver: '1.2.5'),
+ an_object_having_attributes(id: 12, semver: '1.2.4')
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
new file mode 100644
index 00000000000..e363a5a6b20
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillImportedIssueSearchData,
+ :migration,
+ schema: 20220707075300 do
+ let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let!(:issue_search_data_table) { table(:issue_search_data) }
+
+ let!(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) }
+ let!(:project) do
+ table(:projects)
+ .create!(
+ namespace_id: namespace.id,
+ creator_id: user.id,
+ name: 'projecty',
+ path: 'path',
+ project_namespace_id: namespace.id)
+ end
+
+ let!(:issue) do
+ table(:issues).create!(
+ project_id: project.id,
+ title: 'Patterson',
+ description: FFaker::HipsterIpsum.paragraph
+ )
+ end
+
+ let(:migration) do
+ described_class.new(start_id: issue.id,
+ end_id: issue.id + 30,
+ batch_table: :issues,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection)
+ end
+
+ let(:perform_migration) { migration.perform }
+
+ context 'when issue has search data record' do
+ let!(:issue_search_data) { issue_search_data_table.create!(project_id: project.id, issue_id: issue.id) }
+
+ it 'does not create or update any search data records' do
+ expect { perform_migration }
+ .to not_change { issue_search_data_table.count }
+ .and not_change { issue_search_data }
+
+ expect(issue_search_data_table.count).to eq(1)
+ end
+ end
+
+ context 'when issue has no search data record' do
+ let(:title_node) { "'#{issue.title.downcase}':1A" }
+
+ it 'creates search data records' do
+ expect { perform_migration }
+ .to change { issue_search_data_table.count }.from(0).to(1)
+
+ expect(issue_search_data_table.find_by(project_id: project.id).issue_id)
+ .to eq(issue.id)
+
+ expect(issue_search_data_table.find_by(project_id: project.id).search_vector)
+ .to include(title_node)
+ end
+ end
+
+ context 'error handling' do
+ let!(:issue2) do
+ table(:issues).create!(
+ project_id: project.id,
+ title: 'Chatterton',
+ description: FFaker::HipsterIpsum.paragraph
+ )
+ end
+
+ before do
+ issue.update!(description: Array.new(30_000) { SecureRandom.hex }.join(' '))
+ end
+
+ let(:title_node2) { "'#{issue2.title.downcase}':1A" }
+
+ it 'skips insertion for that issue but continues with migration' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(a_hash_including(message: /string is too long for tsvector/, model_id: issue.id))
+ end
+
+ expect { perform_migration }.to change { issue_search_data_table.count }.from(0).to(1)
+ expect(issue_search_data_table.find_by(issue_id: issue.id)).to eq(nil)
+ expect(issue_search_data_table.find_by(issue_id: issue2.id).search_vector)
+ .to include(title_node2)
+ end
+
+ it 're-raises exceptions' do
+ allow(migration)
+ .to receive(:update_search_data_individually)
+ .and_raise(ActiveRecord::StatementTimeout)
+
+ expect { perform_migration }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index cfa03db52fe..b5122af5cd4 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -47,10 +47,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
before do
allow(snippet_with_repo).to receive(:disk_path).and_return(disk_path(snippet_with_repo))
- TestEnv.copy_repo(snippet_with_repo,
- bare_repo: TestEnv.factory_repo_path_bare,
- refs: TestEnv::BRANCH_SHA)
-
+ raw_repository(snippet_with_repo).create_from_bundle(TestEnv.factory_repo_bundle_path)
raw_repository(snippet_with_empty_repo).create_repository
end
diff --git a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
index f8b3a8681f0..98866bb765f 100644
--- a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
@@ -92,5 +92,69 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
end
end
end
+
+ context 'when the subclass uses distinct each batch' do
+ let(:job_instance) do
+ job_class.new(start_id: 1,
+ end_id: 100,
+ batch_table: '_test_table',
+ batch_column: 'from_column',
+ sub_batch_size: 2,
+ pause_ms: 10,
+ connection: connection)
+ end
+
+ let(:job_class) do
+ Class.new(described_class) do
+ def perform(*job_arguments)
+ distinct_each_batch(operation_name: :insert) do |sub_batch|
+ sub_batch.pluck(:from_column).each do |value|
+ connection.execute("INSERT INTO _test_insert_table VALUES (#{value})")
+ end
+
+ sub_batch.size
+ end
+ end
+ end
+ end
+
+ let(:test_table) { table(:_test_table) }
+ let(:test_insert_table) { table(:_test_insert_table) }
+
+ before do
+ allow(job_instance).to receive(:sleep)
+
+ connection.create_table :_test_table do |t|
+ t.timestamps_with_timezone null: false
+ t.integer :from_column, null: false
+ end
+
+ connection.create_table :_test_insert_table, id: false do |t|
+ t.integer :to_column
+ t.index :to_column, unique: true
+ end
+
+ test_table.create!(id: 1, from_column: 5)
+ test_table.create!(id: 2, from_column: 10)
+ test_table.create!(id: 3, from_column: 10)
+ test_table.create!(id: 4, from_column: 5)
+ test_table.create!(id: 5, from_column: 15)
+ end
+
+ after do
+ connection.drop_table(:_test_table)
+ connection.drop_table(:_test_insert_table)
+ end
+
+ it 'calls the operation for each distinct batch' do
+ expect { perform_job }.to change { test_insert_table.pluck(:to_column) }.from([]).to([5, 10, 15])
+ end
+
+ it 'stores the affected rows' do
+ perform_job
+
+ expect(job_instance.batch_metrics.affected_rows[:insert]).to contain_exactly(2, 1)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb
new file mode 100644
index 00000000000..94e9bcf9207
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb
@@ -0,0 +1,138 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectStatisticsWithContainerRegistrySizeBatchingStrategy, '#next_batch' do # rubocop:disable Layout/LineLength
+ let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
+ let(:namespace) { table(:namespaces) }
+ let(:project) { table(:projects) }
+ let(:container_repositories) { table(:container_repositories) }
+
+ let!(:group) do
+ namespace.create!(
+ name: 'namespace1', type: 'Group', path: 'space1'
+ )
+ end
+
+ let!(:proj_namespace1) do
+ namespace.create!(
+ name: 'proj1', path: 'proj1', type: 'Project', parent_id: group.id
+ )
+ end
+
+ let!(:proj_namespace2) do
+ namespace.create!(
+ name: 'proj2', path: 'proj2', type: 'Project', parent_id: group.id
+ )
+ end
+
+ let!(:proj_namespace3) do
+ namespace.create!(
+ name: 'proj3', path: 'proj3', type: 'Project', parent_id: group.id
+ )
+ end
+
+ let!(:proj1) do
+ project.create!(
+ name: 'proj1', path: 'proj1', namespace_id: group.id, project_namespace_id: proj_namespace1.id
+ )
+ end
+
+ let!(:proj2) do
+ project.create!(
+ name: 'proj2', path: 'proj2', namespace_id: group.id, project_namespace_id: proj_namespace2.id
+ )
+ end
+
+ let!(:proj3) do
+ project.create!(
+ name: 'proj3', path: 'proj3', namespace_id: group.id, project_namespace_id: proj_namespace3.id
+ )
+ end
+
+ let!(:con1) do
+ container_repositories.create!(
+ project_id: proj1.id,
+ name: "ContReg_#{proj1.id}:1",
+ migration_state: 'import_done',
+ created_at: Date.new(2022, 01, 20)
+ )
+ end
+
+ let!(:con2) do
+ container_repositories.create!(
+ project_id: proj1.id,
+ name: "ContReg_#{proj1.id}:2",
+ migration_state: 'import_done',
+ created_at: Date.new(2022, 01, 20)
+ )
+ end
+
+ let!(:con3) do
+ container_repositories.create!(
+ project_id: proj2.id,
+ name: "ContReg_#{proj2.id}:1",
+ migration_state: 'import_done',
+ created_at: Date.new(2022, 01, 20)
+ )
+ end
+
+ let!(:con4) do
+ container_repositories.create!(
+ project_id: proj3.id,
+ name: "ContReg_#{proj3.id}:1",
+ migration_state: 'default',
+ created_at: Date.new(2022, 02, 20)
+ )
+ end
+
+ let!(:con5) do
+ container_repositories.create!(
+ project_id: proj3.id,
+ name: "ContReg_#{proj3.id}:2",
+ migration_state: 'default',
+ created_at: Date.new(2022, 02, 20)
+ )
+ end
+
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy }
+
+ context 'when starting on the first batch' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(
+ :container_repositories,
+ :project_id,
+ batch_min_value: con1.project_id,
+ batch_size: 3,
+ job_arguments: []
+ )
+ expect(batch_bounds).to eq([con1.project_id, con4.project_id])
+ end
+ end
+
+ context 'when additional batches remain' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(
+ :container_repositories,
+ :project_id,
+ batch_min_value: con3.project_id,
+ batch_size: 3,
+ job_arguments: []
+ )
+
+ expect(batch_bounds).to eq([con3.project_id, con5.project_id])
+ end
+ end
+
+ context 'when no additional batches remain' do
+ it 'returns nil' do
+ batch_bounds = batching_strategy.next_batch(:container_repositories,
+ :project_id,
+ batch_min_value: con5.project_id + 1,
+ batch_size: 1, job_arguments: []
+ )
+
+ expect(batch_bounds).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/dismissed_vulnerabilities_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/dismissed_vulnerabilities_strategy_spec.rb
new file mode 100644
index 00000000000..f96c7de50f2
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/batching_strategies/dismissed_vulnerabilities_strategy_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::DismissedVulnerabilitiesStrategy, '#next_batch' do
+ let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:users) { table(:users) }
+ let(:user) { create_user! }
+ let(:project) do
+ table(:projects).create!(
+ namespace_id: namespace.id,
+ project_namespace_id: namespace.id,
+ packages_enabled: false)
+ end
+
+ let(:vulnerabilities) { table(:vulnerabilities) }
+
+ let!(:vulnerability1) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ dismissed_at: Time.current
+ )
+ end
+
+ let!(:vulnerability2) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ dismissed_at: Time.current
+ )
+ end
+
+ let!(:vulnerability3) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ dismissed_at: Time.current
+ )
+ end
+
+ let!(:vulnerability4) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ dismissed_at: nil
+ )
+ end
+
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy }
+
+ context 'when starting on the first batch' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(
+ :vulnerabilities,
+ :id,
+ batch_min_value: vulnerability1.id,
+ batch_size: 2,
+ job_arguments: []
+ )
+ expect(batch_bounds).to eq([vulnerability1.id, vulnerability2.id])
+ end
+ end
+
+ context 'when additional batches remain' do
+ it 'returns the bounds of the next batch and skips the records that do not have `dismissed_at` set' do
+ batch_bounds = batching_strategy.next_batch(
+ :vulnerabilities,
+ :id,
+ batch_min_value: vulnerability3.id,
+ batch_size: 2,
+ job_arguments: []
+ )
+
+ expect(batch_bounds).to eq([vulnerability3.id, vulnerability3.id])
+ end
+ end
+
+ context 'when no additional batches remain' do
+ it 'returns nil' do
+ batch_bounds = batching_strategy.next_batch(
+ :vulnerabilities,
+ :id,
+ batch_min_value: vulnerability4.id + 1,
+ batch_size: 1,
+ job_arguments: []
+ )
+
+ expect(batch_bounds).to be_nil
+ end
+ end
+
+ private
+
+ def create_vulnerability!(
+ project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0, state: 1, dismissed_at: nil
+ )
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ state: state,
+ dismissed_at: dismissed_at
+ )
+ end
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 10
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy_spec.rb
new file mode 100644
index 00000000000..1a00fd7c8b3
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::LooseIndexScanBatchingStrategy, '#next_batch' do
+ let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+
+ let!(:namespace1) { namespaces.create!(name: 'ns1', path: 'ns1') }
+ let!(:namespace2) { namespaces.create!(name: 'ns2', path: 'ns2') }
+ let!(:namespace3) { namespaces.create!(name: 'ns3', path: 'ns3') }
+ let!(:namespace4) { namespaces.create!(name: 'ns4', path: 'ns4') }
+ let!(:namespace5) { namespaces.create!(name: 'ns5', path: 'ns5') }
+ let!(:project1) { projects.create!(name: 'p1', namespace_id: namespace1.id, project_namespace_id: namespace1.id) }
+ let!(:project2) { projects.create!(name: 'p2', namespace_id: namespace2.id, project_namespace_id: namespace2.id) }
+ let!(:project3) { projects.create!(name: 'p3', namespace_id: namespace3.id, project_namespace_id: namespace3.id) }
+ let!(:project4) { projects.create!(name: 'p4', namespace_id: namespace4.id, project_namespace_id: namespace4.id) }
+ let!(:project5) { projects.create!(name: 'p5', namespace_id: namespace5.id, project_namespace_id: namespace5.id) }
+
+ let!(:issue1) { issues.create!(title: 'title', description: 'description', project_id: project2.id) }
+ let!(:issue2) { issues.create!(title: 'title', description: 'description', project_id: project1.id) }
+ let!(:issue3) { issues.create!(title: 'title', description: 'description', project_id: project2.id) }
+ let!(:issue4) { issues.create!(title: 'title', description: 'description', project_id: project3.id) }
+ let!(:issue5) { issues.create!(title: 'title', description: 'description', project_id: project2.id) }
+ let!(:issue6) { issues.create!(title: 'title', description: 'description', project_id: project4.id) }
+ let!(:issue7) { issues.create!(title: 'title', description: 'description', project_id: project5.id) }
+
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::BaseStrategy }
+
+ context 'when starting on the first batch' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy
+ .next_batch(:issues, :project_id, batch_min_value: project1.id, batch_size: 2, job_arguments: [])
+
+ expect(batch_bounds).to eq([project1.id, project2.id])
+ end
+ end
+
+ context 'when additional batches remain' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy
+ .next_batch(:issues, :project_id, batch_min_value: project2.id, batch_size: 3, job_arguments: [])
+
+ expect(batch_bounds).to eq([project2.id, project4.id])
+ end
+ end
+
+ context 'when on the final batch' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy
+ .next_batch(:issues, :project_id, batch_min_value: project4.id, batch_size: 3, job_arguments: [])
+
+ expect(batch_bounds).to eq([project4.id, project5.id])
+ end
+ end
+
+ context 'when no additional batches remain' do
+ it 'returns nil' do
+ batch_bounds = batching_strategy
+ .next_batch(:issues, :project_id, batch_min_value: project5.id + 1, batch_size: 1, job_arguments: [])
+
+ expect(batch_bounds).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
index 521e2067744..943b5744b64 100644
--- a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
+++ b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
@@ -45,10 +45,30 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi
end
end
+ context 'when job_class is provided with a batching_scope' do
+ let(:job_class) do
+ Class.new(described_class) do
+ def self.batching_scope(relation, job_arguments:)
+ min_id = job_arguments.first
+
+ relation.where.not(type: 'Project').where('id >= ?', min_id)
+ end
+ end
+ end
+
+ it 'applies the batching scope' do
+ expect(job_class).to receive(:batching_scope).and_call_original
+
+ batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id, batch_size: 3, job_arguments: [1], job_class: job_class)
+
+ expect(batch_bounds).to eq([namespace4.id, namespace4.id])
+ end
+ end
+
context 'additional filters' do
let(:strategy_with_filters) do
Class.new(described_class) do
- def apply_additional_filters(relation, job_arguments:)
+ def apply_additional_filters(relation, job_arguments:, job_class: nil)
min_id = job_arguments.first
relation.where.not(type: 'Project').where('id >= ?', min_id)
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_inactive_public_projects_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_inactive_public_projects_spec.rb
new file mode 100644
index 00000000000..f5a2dc91185
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_inactive_public_projects_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForInactivePublicProjects, :migration do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:project_settings_table) { table(:project_settings) }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: projects_table.minimum(:id),
+ end_id: projects_table.maximum(:id),
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ let(:queries) { ActiveRecord::QueryRecorder.new { perform_migration } }
+
+ let(:namespace_1) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-1') }
+ let(:project_namespace_2) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-2', type: 'Project') }
+ let(:project_namespace_3) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-3', type: 'Project') }
+ let(:project_namespace_4) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-4', type: 'Project') }
+ let(:project_namespace_5) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-5', type: 'Project') }
+
+ let(:project_1) do
+ projects_table
+ .create!(
+ name: 'proj-1', path: 'path-1', namespace_id: namespace_1.id,
+ project_namespace_id: project_namespace_2.id, visibility_level: 0
+ )
+ end
+
+ let(:project_2) do
+ projects_table
+ .create!(
+ name: 'proj-2', path: 'path-2', namespace_id: namespace_1.id,
+ project_namespace_id: project_namespace_3.id, visibility_level: 10
+ )
+ end
+
+ let(:project_3) do
+ projects_table
+ .create!(
+ name: 'proj-3', path: 'path-3', namespace_id: namespace_1.id,
+ project_namespace_id: project_namespace_4.id, visibility_level: 20, last_activity_at: '2021-01-01'
+ )
+ end
+
+ let(:project_4) do
+ projects_table
+ .create!(
+ name: 'proj-4', path: 'path-4', namespace_id: namespace_1.id,
+ project_namespace_id: project_namespace_5.id, visibility_level: 20, last_activity_at: '2022-01-01'
+ )
+ end
+
+ before do
+ project_settings_table.create!(project_id: project_1.id, legacy_open_source_license_available: true)
+ project_settings_table.create!(project_id: project_2.id, legacy_open_source_license_available: true)
+ project_settings_table.create!(project_id: project_3.id, legacy_open_source_license_available: true)
+ project_settings_table.create!(project_id: project_4.id, legacy_open_source_license_available: true)
+ end
+
+ it 'sets `legacy_open_source_license_available` attribute to false for inactive, public projects',
+ :aggregate_failures do
+ expect(queries.count).to eq(5)
+
+ expect(migrated_attribute(project_1.id)).to be_truthy
+ expect(migrated_attribute(project_2.id)).to be_truthy
+ expect(migrated_attribute(project_3.id)).to be_falsey
+ expect(migrated_attribute(project_4.id)).to be_truthy
+ end
+
+ def migrated_attribute(project_id)
+ project_settings_table.find(project_id).legacy_open_source_license_available
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_operation_visibility_permissions_from_operations_spec.rb b/spec/lib/gitlab/background_migration/populate_operation_visibility_permissions_from_operations_spec.rb
new file mode 100644
index 00000000000..1ebdca136a3
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_operation_visibility_permissions_from_operations_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateOperationVisibilityPermissionsFromOperations do
+ let(:namespaces) { table(:namespaces) }
+ let(:project_features) { table(:project_features) }
+ let(:projects) { table(:projects) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+
+ let(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace.id) }
+ let(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace.id) }
+ let(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace.id) }
+
+ let(:project1) { create_project('test1', proj_namespace1) }
+ let(:project2) { create_project('test2', proj_namespace2) }
+ let(:project3) { create_project('test3', proj_namespace3) }
+
+ let!(:record1) { create_project_feature(project1) }
+ let!(:record2) { create_project_feature(project2, 20) }
+ let!(:record3) { create_project_feature(project3) }
+
+ let(:sub_batch_size) { 2 }
+ let(:start_id) { record1.id }
+ let(:end_id) { record3.id }
+ let(:batch_table) { :project_features }
+ let(:batch_column) { :id }
+ let(:pause_ms) { 1 }
+ let(:connection) { ApplicationRecord.connection }
+
+ let(:job) do
+ described_class.new(
+ start_id: start_id,
+ end_id: end_id,
+ batch_table: batch_table,
+ batch_column: batch_column,
+ sub_batch_size: sub_batch_size,
+ pause_ms: pause_ms,
+ connection: connection
+ )
+ end
+
+ subject(:perform) { job.perform }
+
+ it 'updates all project settings records from their operations_access_level', :aggregate_failures do
+ perform
+
+ expect_project_features_match_operations_access_level(record1)
+ expect_project_features_match_operations_access_level(record2)
+ expect_project_features_match_operations_access_level(record3)
+ end
+
+ private
+
+ def expect_project_features_match_operations_access_level(record)
+ record.reload
+ expect(record.monitor_access_level).to eq(record.operations_access_level)
+ expect(record.infrastructure_access_level).to eq(record.operations_access_level)
+ expect(record.feature_flags_access_level).to eq(record.operations_access_level)
+ expect(record.environments_access_level).to eq(record.operations_access_level)
+ end
+
+ def create_project(proj_name, proj_namespace)
+ projects.create!(
+ namespace_id: namespace.id,
+ project_namespace_id: proj_namespace.id,
+ name: proj_name,
+ path: proj_name
+ )
+ end
+
+ def create_project_feature(project, operations_access_level = 10)
+ project_features.create!(
+ project_id: project.id,
+ pages_access_level: 10,
+ operations_access_level: operations_access_level
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
index a54c840dd8e..8d71b117107 100644
--- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
+++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -73,26 +73,6 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
subject { described_class.new.perform(start_id, end_id) }
- context 'when the migration is disabled by the feature flag' do
- let(:start_id) { 1 }
- let(:end_id) { 1001 }
-
- before do
- stub_feature_flags(migrate_vulnerability_finding_uuids: false)
- end
-
- it 'logs the info message and does not run the migration' do
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:info).once.with(message: 'Migration is disabled by the feature flag',
- migrator: 'RecalculateVulnerabilitiesOccurrencesUuid',
- start_id: start_id,
- end_id: end_id)
- end
-
- subject
- end
- end
-
context "when finding has a UUIDv4" do
before do
@uuid_v4 = create_finding!(
@@ -474,6 +454,16 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_exception).with(expected_error).once
end
+
+ it_behaves_like 'marks background migration job records' do
+ let(:arguments) { [1, 4] }
+ subject { described_class.new }
+ end
+ end
+
+ it_behaves_like 'marks background migration job records' do
+ let(:arguments) { [1, 4] }
+ subject { described_class.new }
end
private
diff --git a/spec/lib/gitlab/background_migration/set_correct_vulnerability_state_spec.rb b/spec/lib/gitlab/background_migration/set_correct_vulnerability_state_spec.rb
new file mode 100644
index 00000000000..d5b98e49a31
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/set_correct_vulnerability_state_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::SetCorrectVulnerabilityState do
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:users) { table(:users) }
+ let(:user) { create_user! }
+ let(:project) do
+ table(:projects).create!(
+ namespace_id: namespace.id,
+ project_namespace_id: namespace.id,
+ packages_enabled: false)
+ end
+
+ let(:vulnerabilities) { table(:vulnerabilities) }
+
+ let!(:vulnerability_with_dismissed_at) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ dismissed_at: Time.current
+ )
+ end
+
+ let!(:vulnerability_without_dismissed_at) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ dismissed_at: nil
+ )
+ end
+
+ let(:detected_state) { 1 }
+ let(:dismissed_state) { 2 }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: vulnerability_with_dismissed_at.id,
+ end_id: vulnerability_without_dismissed_at.id,
+ batch_table: :vulnerabilities,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ it 'changes vulnerability state to `dismissed` when dismissed_at is not nil' do
+ expect { perform_migration }.to change { vulnerability_with_dismissed_at.reload.state }.to(dismissed_state)
+ end
+
+ it 'does not change the state when dismissed_at is nil' do
+ expect { perform_migration }.not_to change { vulnerability_without_dismissed_at.reload.state }
+ end
+
+ private
+
+ def create_vulnerability!(
+ project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0, state: 1, dismissed_at: nil
+ )
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ state: state,
+ dismissed_at: dismissed_at
+ )
+ end
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 10
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/update_delayed_project_removal_to_null_for_user_namespaces_spec.rb b/spec/lib/gitlab/background_migration/update_delayed_project_removal_to_null_for_user_namespaces_spec.rb
new file mode 100644
index 00000000000..980a7771f4c
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/update_delayed_project_removal_to_null_for_user_namespaces_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::UpdateDelayedProjectRemovalToNullForUserNamespaces,
+ :migration do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespace_settings_table) { table(:namespace_settings) }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: 1,
+ end_id: 30,
+ batch_table: :namespace_settings,
+ batch_column: :namespace_id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ namespaces_table.create!(id: 1, name: 'group_namespace', path: 'path-1', type: 'Group')
+ namespaces_table.create!(id: 2, name: 'user_namespace', path: 'path-2', type: 'User')
+ namespaces_table.create!(id: 3, name: 'user_three_namespace', path: 'path-3', type: 'User')
+ namespaces_table.create!(id: 4, name: 'group_four_namespace', path: 'path-4', type: 'Group')
+ namespaces_table.create!(id: 5, name: 'group_five_namespace', path: 'path-5', type: 'Group')
+
+ namespace_settings_table.create!(namespace_id: 1, delayed_project_removal: false)
+ namespace_settings_table.create!(namespace_id: 2, delayed_project_removal: false)
+ namespace_settings_table.create!(namespace_id: 3, delayed_project_removal: nil)
+ namespace_settings_table.create!(namespace_id: 4, delayed_project_removal: true)
+ namespace_settings_table.create!(namespace_id: 5, delayed_project_removal: nil)
+ end
+
+ it 'updates `delayed_project_removal` column to null for user namespaces', :aggregate_failures do
+ expect(ActiveRecord::QueryRecorder.new { perform_migration }.count).to eq(7)
+
+ expect(migrated_attribute(1)).to be_falsey
+ expect(migrated_attribute(2)).to be_nil
+ expect(migrated_attribute(3)).to be_nil
+ expect(migrated_attribute(4)).to be_truthy
+ expect(migrated_attribute(5)).to be_nil
+ end
+
+ def migrated_attribute(namespace_id)
+ namespace_settings_table.find(namespace_id).delayed_project_removal
+ end
+end
diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
index b0d721a74ce..8fb903154f3 100644
--- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
+RSpec.describe Gitlab::BareRepositoryImport::Importer do
let!(:admin) { create(:admin) }
let!(:base_dir) { Dir.mktmpdir + '/' }
let(:bare_repository) { Gitlab::BareRepositoryImport::Repository.new(base_dir, File.join(base_dir, "#{project_path}.git")) }
let(:gitlab_shell) { Gitlab::Shell.new }
- let(:source_project) { TEST_REPO_PATH }
+ let(:source_project) { TestEnv.factory_repo_bundle_path }
subject(:importer) { described_class.new(admin, bare_repository) }
@@ -17,8 +17,6 @@ RSpec.describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
after do
FileUtils.rm_rf(base_dir)
- TestEnv.clean_test_path
- ensure_seeds
end
shared_examples 'importing a repository' do
@@ -150,7 +148,6 @@ RSpec.describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
end
context 'with a repository already on disk' do
- let!(:base_dir) { TestEnv.repos_path }
# This is a quick way to get a valid repository instead of copying an
# existing one. Since it's not persisted, the importer will try to
# create the project.
@@ -193,8 +190,6 @@ RSpec.describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
def prepare_repository(project_path, source_project)
repo_path = File.join(base_dir, project_path)
- return create_bare_repository(repo_path) unless source_project
-
cmd = %W(#{Gitlab.config.git.bin_path} clone --bare #{source_project} #{repo_path})
system(git_env, *cmd, chdir: SEED_STORAGE_PATH, out: '/dev/null', err: '/dev/null')
diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
index bf115046744..d29447ee376 100644
--- a/spec/lib/gitlab/bare_repository_import/repository_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
@@ -59,18 +59,15 @@ RSpec.describe ::Gitlab::BareRepositoryImport::Repository do
let(:root_path) { TestEnv.repos_path }
let(:repo_path) { File.join(root_path, "#{hashed_path}.git") }
let(:wiki_path) { File.join(root_path, "#{hashed_path}.wiki.git") }
+ let(:raw_repository) { Gitlab::Git::Repository.new('default', "#{hashed_path}.git", nil, nil) }
before do
- TestEnv.create_bare_repository(repo_path)
-
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- repository = Rugged::Repository.new(repo_path)
- repository.config['gitlab.fullpath'] = 'to/repo'
- end
+ raw_repository.create_repository
+ raw_repository.set_full_path(full_path: 'to/repo')
end
after do
- FileUtils.rm_rf(repo_path)
+ raw_repository.remove
end
subject { described_class.new(root_path, repo_path) }
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index b723c31c4aa..e0a7044e5f9 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -328,6 +328,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
expect(project.issues.where(state_id: Issue.available_states[:closed]).size).to eq(5)
expect(project.issues.where(state_id: Issue.available_states[:opened]).size).to eq(2)
+ expect(project.issues.map(&:namespace_id).uniq).to match_array([project.project_namespace_id])
end
describe 'wiki import' do
@@ -362,6 +363,14 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
expect(project.issues.where("description LIKE ?", '%reporter3%').size).to eq(1)
expect(importer.errors).to be_empty
end
+
+ it 'sets work item type on new issues' do
+ allow(importer).to receive(:import_wiki)
+
+ importer.execute
+
+ expect(project.issues.map(&:work_item_type_id).uniq).to contain_exactly(WorkItems::Type.default_issue_type.id)
+ end
end
context 'metrics' do
diff --git a/spec/lib/gitlab/changelog/config_spec.rb b/spec/lib/gitlab/changelog/config_spec.rb
index 600682d30ad..92cad366cfd 100644
--- a/spec/lib/gitlab/changelog/config_spec.rb
+++ b/spec/lib/gitlab/changelog/config_spec.rb
@@ -20,6 +20,18 @@ RSpec.describe Gitlab::Changelog::Config do
described_class.from_git(project)
end
+ it "retrieves the specified configuration from git" do
+ allow(project.repository)
+ .to receive(:changelog_config).with('HEAD', 'specified_changelog_config.yml')
+ .and_return("---\ndate_format: '%Y'")
+
+ expect(described_class)
+ .to receive(:from_hash)
+ .with(project, { 'date_format' => '%Y' }, nil)
+
+ described_class.from_git(project, nil, 'specified_changelog_config.yml')
+ end
+
it 'returns the default configuration when no YAML file exists in Git' do
allow(project.repository)
.to receive(:changelog_config)
diff --git a/spec/lib/gitlab/ci/build/artifacts/expire_in_parser_spec.rb b/spec/lib/gitlab/ci/build/duration_parser_spec.rb
index 889878cf3ef..7f5ff1eb0ee 100644
--- a/spec/lib/gitlab/ci/build/artifacts/expire_in_parser_spec.rb
+++ b/spec/lib/gitlab/ci/build/duration_parser_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Build::Artifacts::ExpireInParser do
+RSpec.describe Gitlab::Ci::Build::DurationParser do
describe '.validate_duration', :request_store do
subject { described_class.validate_duration(value) }
diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb
index 8f77a1f60ad..4895077a731 100644
--- a/spec/lib/gitlab/ci/build/image_spec.rb
+++ b/spec/lib/gitlab/ci/build/image_spec.rb
@@ -98,9 +98,11 @@ RSpec.describe Gitlab::Ci::Build::Image do
let(:service_entrypoint) { '/bin/sh' }
let(:service_alias) { 'db' }
let(:service_command) { 'sleep 30' }
+ let(:pull_policy) { %w[always if-not-present] }
let(:job) do
create(:ci_build, options: { services: [{ name: service_image_name, entrypoint: service_entrypoint,
- alias: service_alias, command: service_command, ports: [80] }] })
+ alias: service_alias, command: service_command, ports: [80],
+ pull_policy: pull_policy }] })
end
it 'fabricates an non-empty array of objects' do
@@ -114,6 +116,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
expect(subject.first.entrypoint).to eq(service_entrypoint)
expect(subject.first.alias).to eq(service_alias)
expect(subject.first.command).to eq(service_command)
+ expect(subject.first.pull_policy).to eq(pull_policy)
port = subject.first.ports.first
expect(port.number).to eq 80
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
index 4ac8bf61738..3892b88598a 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
@@ -6,19 +6,43 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
describe '#satisfied_by?' do
subject { described_class.new(globs).satisfied_by?(pipeline, context) }
- it_behaves_like 'a glob matching rule' do
+ context 'a glob matching rule' do
+ using RSpec::Parameterized::TableSyntax
+
let(:pipeline) { build(:ci_pipeline) }
let(:context) {}
before do
allow(pipeline).to receive(:modified_paths).and_return(files.keys)
end
+
+ # rubocop:disable Layout/LineLength
+ where(:case_name, :globs, :files, :satisfied) do
+ 'exact top-level match' | ['Dockerfile'] | { 'Dockerfile' => '', 'Gemfile' => '' } | true
+ 'exact top-level match' | { paths: ['Dockerfile'] } | { 'Dockerfile' => '', 'Gemfile' => '' } | true
+ 'exact top-level no match' | { paths: ['Dockerfile'] } | { 'Gemfile' => '' } | false
+ 'pattern top-level match' | { paths: ['Docker*'] } | { 'Dockerfile' => '', 'Gemfile' => '' } | true
+ 'pattern top-level no match' | ['Docker*'] | { 'Gemfile' => '' } | false
+ 'pattern top-level no match' | { paths: ['Docker*'] } | { 'Gemfile' => '' } | false
+ 'exact nested match' | { paths: ['project/build.properties'] } | { 'project/build.properties' => '' } | true
+ 'exact nested no match' | { paths: ['project/build.properties'] } | { 'project/README.md' => '' } | false
+ 'pattern nested match' | { paths: ['src/**/*.go'] } | { 'src/gitlab.com/goproject/goproject.go' => '' } | true
+ 'pattern nested no match' | { paths: ['src/**/*.go'] } | { 'src/gitlab.com/goproject/README.md' => '' } | false
+ 'ext top-level match' | { paths: ['*.go'] } | { 'main.go' => '', 'cmd/goproject/main.go' => '' } | true
+ 'ext nested no match' | { paths: ['*.go'] } | { 'cmd/goproject/main.go' => '' } | false
+ 'ext slash no match' | { paths: ['/*.go'] } | { 'main.go' => '', 'cmd/goproject/main.go' => '' } | false
+ end
+ # rubocop:enable Layout/LineLength
+
+ with_them do
+ it { is_expected.to eq(satisfied) }
+ end
end
context 'when pipeline is nil' do
let(:pipeline) {}
let(:context) {}
- let(:globs) { [] }
+ let(:globs) { { paths: [] } }
it { is_expected.to be_truthy }
end
@@ -26,8 +50,8 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
context 'when using variable expansion' do
let(:pipeline) { build(:ci_pipeline) }
let(:modified_paths) { ['helm/test.txt'] }
- let(:globs) { ['$HELM_DIR/**/*'] }
- let(:context) { double('context') }
+ let(:globs) { { paths: ['$HELM_DIR/**/*'] } }
+ let(:context) { instance_double(Gitlab::Ci::Build::Context::Base) }
before do
allow(pipeline).to receive(:modified_paths).and_return(modified_paths)
@@ -58,7 +82,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
end
context 'when variable expansion does not match' do
- let(:globs) { ['path/with/$in/it/*'] }
+ let(:globs) { { paths: ['path/with/$in/it/*'] } }
let(:modified_paths) { ['path/with/$in/it/file.txt'] }
before do
diff --git a/spec/lib/gitlab/ci/build/rules/rule_spec.rb b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
index f905e229415..ac73b665f3a 100644
--- a/spec/lib/gitlab/ci/build/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
@@ -14,10 +14,14 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule do
let(:ci_build) { build(:ci_build, pipeline: pipeline) }
let(:rule) { described_class.new(rule_hash) }
+ before do
+ allow(pipeline).to receive(:modified_paths).and_return(['file.rb'])
+ end
+
describe '#matches?' do
subject { rule.matches?(pipeline, seed) }
- context 'with one matching clause' do
+ context 'with one matching clause if' do
let(:rule_hash) do
{ if: '$VAR == null', when: 'always' }
end
@@ -25,9 +29,17 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule do
it { is_expected.to eq(true) }
end
+ context 'with one matching clause changes' do
+ let(:rule_hash) do
+ { changes: { paths: ['**/*'] }, when: 'always' }
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
context 'with two matching clauses' do
let(:rule_hash) do
- { if: '$VAR == null', changes: '**/*', when: 'always' }
+ { if: '$VAR == null', changes: { paths: ['**/*'] }, when: 'always' }
end
it { is_expected.to eq(true) }
@@ -35,7 +47,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule do
context 'with a matching and non-matching clause' do
let(:rule_hash) do
- { if: '$VAR != null', changes: '$VAR == null', when: 'always' }
+ { if: '$VAR != null', changes: { paths: ['invalid.xyz'] }, when: 'always' }
end
it { is_expected.to eq(false) }
@@ -43,7 +55,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule do
context 'with two non-matching clauses' do
let(:rule_hash) do
- { if: '$VAR != null', changes: 'README', when: 'always' }
+ { if: '$VAR != null', changes: { paths: ['README'] }, when: 'always' }
end
it { is_expected.to eq(false) }
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index bd1ab5d8c41..0fa6d4f8804 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
before do
stub_feature_flags(ci_docker_image_pull_policy: true)
+
+ entry.compose!
end
let(:entry) { described_class.new(config) }
@@ -129,19 +131,16 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
describe '#valid?' do
it 'is valid' do
- entry.compose!
-
expect(entry).to be_valid
end
context 'when the feature flag ci_docker_image_pull_policy is disabled' do
before do
stub_feature_flags(ci_docker_image_pull_policy: false)
+ entry.compose!
end
it 'is not valid' do
- entry.compose!
-
expect(entry).not_to be_valid
expect(entry.errors).to include('image config contains unknown keys: pull_policy')
end
@@ -150,8 +149,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
describe '#value' do
it "returns value" do
- entry.compose!
-
expect(entry.value).to eq(
name: 'image:1.0',
pull_policy: ['if-not-present']
@@ -161,11 +158,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
context 'when the feature flag ci_docker_image_pull_policy is disabled' do
before do
stub_feature_flags(ci_docker_image_pull_policy: false)
+ entry.compose!
end
it 'is not valid' do
- entry.compose!
-
expect(entry.value).to eq(
name: 'image:1.0'
)
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
index 3ed4a9f263f..295561b3c4d 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
it { is_expected.not_to be_valid }
it 'reports an error about invalid policy' do
- expect(entry.errors).to include(/should be an array of strings/)
+ expect(entry.errors).to include(/should be an array or a hash/)
end
end
@@ -64,7 +64,59 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
it 'returns information about errors' do
expect(entry.errors)
- .to include(/should be an array of strings/)
+ .to include(/should be an array or a hash/)
+ end
+ end
+
+ context 'with paths' do
+ context 'when paths is an array of strings' do
+ let(:config) { { paths: %w[app/ lib/] } }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when paths is not an array' do
+ let(:config) { { paths: 'string' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include(/should be an array of strings/)
+ end
+ end
+
+ context 'when paths is an array of integers' do
+ let(:config) { { paths: [1, 2] } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include(/should be an array of strings/)
+ end
+ end
+
+ context 'when paths is an array of long strings' do
+ let(:config) { { paths: ['a'] * 51 } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include(/has too many entries \(maximum 50\)/)
+ end
+ end
+
+ context 'when paths is nil' do
+ let(:config) { { paths: nil } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include(/should be an array of strings/)
+ end
end
end
end
@@ -75,6 +127,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
context 'when using a string array' do
let(:config) { %w[app/ lib/ spec/ other/* paths/**/*.rb] }
+ it { is_expected.to eq(paths: config) }
+ end
+
+ context 'with paths' do
+ let(:config) do
+ { paths: ['app/', 'lib/'] }
+ end
+
it { is_expected.to eq(config) }
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 89d349efe8f..93f4a66bfb6 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'reports an error about invalid policy' do
- expect(subject.errors).to include(/should be an array of strings/)
+ expect(subject.errors).to include(/should be an array or a hash/)
end
end
@@ -411,7 +411,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
context 'when using a changes: clause' do
let(:config) { { changes: %w[app/ lib/ spec/ other/* paths/**/*.rb] } }
- it { is_expected.to eq(config) }
+ it { is_expected.to eq(changes: { paths: %w[app/ lib/ spec/ other/* paths/**/*.rb] }) }
+
+ context 'when using changes with paths' do
+ let(:config) { { changes: { paths: %w[app/ lib/ spec/ other/* paths/**/*.rb] } } }
+
+ it { is_expected.to eq(config) }
+ end
end
context 'when default value has been provided' do
@@ -426,7 +432,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
it 'does not add to provided configuration' do
- expect(entry.value).to eq(config)
+ expect(entry.value).to eq(changes: { paths: %w[app/**/*.rb] })
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
index cfec33003e4..b0871f2345e 100644
--- a/spec/lib/gitlab/ci/config/entry/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require 'support/helpers/stub_feature_flags'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Rules do
@@ -12,13 +11,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
end
let(:metadata) { { allowed_when: %w[always never] } }
- let(:entry) { factory.create! }
- describe '.new' do
- subject { entry }
+ subject(:entry) { factory.create! }
+ describe '.new' do
before do
- subject.compose!
+ entry.compose!
end
context 'with a list of rule rule' do
@@ -73,7 +71,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
end
describe '#value' do
- subject { entry.value }
+ subject(:value) { entry.value }
+
+ before do
+ entry.compose!
+ end
context 'with a list of rule rule' do
let(:config) do
@@ -99,7 +101,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
{ if: '$SKIP', when: 'never' }
end
- it { is_expected.to eq([config]) }
+ it { is_expected.to eq([]) }
end
context 'with nested rules' do
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 2795cc9dddf..3c000fd09ed 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -1,14 +1,19 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'support/helpers/stubbed_feature'
+require 'support/helpers/stub_feature_flags'
RSpec.describe Gitlab::Ci::Config::Entry::Service do
- let(:entry) { described_class.new(config) }
+ include StubFeatureFlags
before do
+ stub_feature_flags(ci_docker_image_pull_policy: true)
entry.compose!
end
+ subject(:entry) { described_class.new(config) }
+
context 'when configuration is a string' do
let(:config) { 'postgresql:9.5' }
@@ -90,6 +95,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
end
end
+ describe '#pull_policy' do
+ it "returns nil" do
+ expect(entry.pull_policy).to be_nil
+ end
+ end
+
context 'when configuration has ports' do
let(:ports) { [{ number: 80, protocol: 'http', name: 'foobar' }] }
let(:config) do
@@ -134,6 +145,49 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
end
end
end
+
+ context 'when configuration has pull_policy' do
+ let(:config) { { name: 'postgresql:9.5', pull_policy: 'if-not-present' } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ entry.compose!
+ end
+
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include('service config contains unknown keys: pull_policy')
+ end
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5',
+ pull_policy: ['if-not-present']
+ )
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'is not valid' do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5'
+ )
+ end
+ end
+ end
+ end
end
context 'when entry value is not correct' do
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index 800c563cd0b..40702e75404 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::Context do
- let(:project) { double('Project') }
+ let(:project) { build(:project) }
let(:user) { double('User') }
let(:sha) { '12345' }
let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'a', 'value' => 'b' }]) }
@@ -126,7 +126,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
end
context 'with attributes' do
- let(:new_attributes) { { project: double, user: double, sha: '56789' } }
+ let(:new_attributes) { { project: build(:project), user: double, sha: '56789' } }
it_behaves_like 'a mutated context'
end
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index 77e542cf933..72a85c9b03d 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -177,6 +177,22 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
expect(project_file.error_message).to include("Project `xxxxxxxxxxxxxxxxxxxxxxx` not found or access denied!")
end
end
+
+ context 'when a project contained in an array is used with a masked variable' do
+ let(:variables) do
+ Gitlab::Ci::Variables::Collection.new([
+ { key: 'VAR1', value: 'a_secret_variable_value', masked: true }
+ ])
+ end
+
+ let(:params) do
+ { project: ['a_secret_variable_value'], file: '/file.yml' }
+ end
+
+ it 'does not raise an error' do
+ expect { valid? }.not_to raise_error
+ end
+ end
end
describe '#expand_context' do
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 7e1b31fea6a..e74fdc2071b 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -232,11 +232,9 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
image: 'image:1.0' }
end
- before do
- stub_const("#{described_class}::MAX_INCLUDES", 2)
- end
-
it 'does not raise an exception' do
+ allow(context).to receive(:max_includes).and_return(2)
+
expect { subject }.not_to raise_error
end
end
@@ -250,11 +248,9 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
image: 'image:1.0' }
end
- before do
- stub_const("#{described_class}::MAX_INCLUDES", 1)
- end
-
it 'raises an exception' do
+ allow(context).to receive(:max_includes).and_return(1)
+
expect { subject }.to raise_error(described_class::TooManyIncludesError)
end
@@ -264,6 +260,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
end
it 'raises an exception' do
+ allow(context).to receive(:max_includes).and_return(1)
+
expect { subject }.to raise_error(described_class::TooManyIncludesError)
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 15a0ff40aa4..841a46e197d 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -323,11 +323,9 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
end
context 'when too many includes is included' do
- before do
- stub_const('Gitlab::Ci::Config::External::Mapper::MAX_INCLUDES', 1)
- end
-
it 'raises an error' do
+ allow(context).to receive(:max_includes).and_return(1)
+
expect { subject }.to raise_error(Gitlab::Ci::Config::External::Processor::IncludeError, /Maximum of 1 nested/)
end
end
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
index 179e2efc0c7..147801b6217 100644
--- a/spec/lib/gitlab/ci/jwt_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -48,6 +48,7 @@ RSpec.describe Gitlab::Ci::Jwt do
expect(payload[:ref_protected]).to eq(build.protected.to_s)
expect(payload[:environment]).to be_nil
expect(payload[:environment_protected]).to be_nil
+ expect(payload[:deployment_tier]).to be_nil
end
end
@@ -96,7 +97,7 @@ RSpec.describe Gitlab::Ci::Jwt do
end
describe 'environment' do
- let(:environment) { build_stubbed(:environment, project: project, name: 'production') }
+ let(:environment) { build_stubbed(:environment, project: project, name: 'production', tier: 'production') }
let(:build) do
build_stubbed(
:ci_build,
@@ -114,6 +115,19 @@ RSpec.describe Gitlab::Ci::Jwt do
it 'has correct values for environment attributes' do
expect(payload[:environment]).to eq('production')
expect(payload[:environment_protected]).to eq('false')
+ expect(payload[:deployment_tier]).to eq('production')
+ end
+
+ describe 'deployment_tier' do
+ context 'when build options specifies a different deployment_tier' do
+ before do
+ build.options[:environment] = { name: environment.name, deployment_tier: 'development' }
+ end
+
+ it 'uses deployment_tier from build options' do
+ expect(payload[:deployment_tier]).to eq('development')
+ end
+ end
end
end
end
@@ -121,8 +135,8 @@ RSpec.describe Gitlab::Ci::Jwt do
describe '.for_build' do
shared_examples 'generating JWT for build' do
context 'when signing key is present' do
- let(:rsa_key) { OpenSSL::PKey::RSA.generate(1024) }
- let(:rsa_key_data) { rsa_key.to_s }
+ let_it_be(:rsa_key) { OpenSSL::PKey::RSA.generate(3072) }
+ let_it_be(:rsa_key_data) { rsa_key.to_s }
it 'generates JWT with key id' do
_payload, headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
index 375841ce236..cbf92f8fa83 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) }
+ let(:stage) { build(:ci_stage, project: project, statuses: [job]) }
let(:pipeline) { create(:ci_pipeline, project: project, stages: [stage]) }
let(:command) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
index 9057c4e99df..eba0db0adfb 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
context 'tags persistence' do
let(:stage) do
- build(:ci_stage_entity, pipeline: pipeline, project: project)
+ build(:ci_stage, pipeline: pipeline, project: project)
end
let(:job) do
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
context 'without tags' do
it 'extracts an empty tag list' do
- expect(CommitStatus)
+ expect(Gitlab::Ci::Tags::BulkInsert)
.to receive(:bulk_insert_tags!)
.with([job])
.and_call_original
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
end
it 'bulk inserts tags' do
- expect(CommitStatus)
+ expect(Gitlab::Ci::Tags::BulkInsert)
.to receive(:bulk_insert_tags!)
.with([job])
.and_call_original
diff --git a/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb
index 6a7d9b58a05..e07a3ca9033 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::EnsureEnvironments do
let(:project) { create(:project) }
let(:user) { create(:user) }
- let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) }
+ let(:stage) { build(:ci_stage, project: project, statuses: [job]) }
let(:pipeline) { build(:ci_pipeline, project: project, stages: [stage]) }
let(:command) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb
index 571455d6279..f14dd70a753 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups do
let(:project) { create(:project) }
let(:user) { create(:user) }
- let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) }
+ let(:stage) { build(:ci_stage, project: project, statuses: [job]) }
let(:pipeline) { build(:ci_pipeline, project: project, stages: [stage]) }
let!(:environment) { create(:environment, name: 'production', project: project) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index cebc4c02d11..eeac0c85a77 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
end
end
- it 'respects the defined payload schema', :saas do
+ it 'respects the defined payload schema' do
expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
expect(params[:body]).to match_schema('/external_validation')
expect(params[:timeout]).to eq(described_class::DEFAULT_VALIDATION_REQUEST_TIMEOUT)
@@ -235,6 +235,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
end
it 'logs the authorization' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
expect(Gitlab::AppLogger).to receive(:info).with(message: 'Pipeline not authorized', project_id: project.id, user_id: user.id)
perform!
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 49505d397c2..040f3ab5830 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -858,14 +858,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'with an explicit `when: never`' do
where(:rule_set) do
[
- [[{ changes: %w[*/**/*.rb], when: 'never' }, { changes: %w[*/**/*.rb], when: 'always' }]],
- [[{ changes: %w[app/models/ci/pipeline.rb], when: 'never' }, { changes: %w[app/models/ci/pipeline.rb], when: 'always' }]],
- [[{ changes: %w[spec/**/*.rb], when: 'never' }, { changes: %w[spec/**/*.rb], when: 'always' }]],
- [[{ changes: %w[*.yml], when: 'never' }, { changes: %w[*.yml], when: 'always' }]],
- [[{ changes: %w[.*.yml], when: 'never' }, { changes: %w[.*.yml], when: 'always' }]],
- [[{ changes: %w[**/*], when: 'never' }, { changes: %w[**/*], when: 'always' }]],
- [[{ changes: %w[*/**/*.rb *.yml], when: 'never' }, { changes: %w[*/**/*.rb *.yml], when: 'always' }]],
- [[{ changes: %w[.*.yml **/*], when: 'never' }, { changes: %w[.*.yml **/*], when: 'always' }]]
+ [[{ changes: { paths: %w[*/**/*.rb] }, when: 'never' }, { changes: { paths: %w[*/**/*.rb] }, when: 'always' }]],
+ [[{ changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'never' }, { changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'always' }]],
+ [[{ changes: { paths: %w[spec/**/*.rb] }, when: 'never' }, { changes: { paths: %w[spec/**/*.rb] }, when: 'always' }]],
+ [[{ changes: { paths: %w[*.yml] }, when: 'never' }, { changes: { paths: %w[*.yml] }, when: 'always' }]],
+ [[{ changes: { paths: %w[.*.yml] }, when: 'never' }, { changes: { paths: %w[.*.yml] }, when: 'always' }]],
+ [[{ changes: { paths: %w[**/*] }, when: 'never' }, { changes: { paths: %w[**/*] }, when: 'always' }]],
+ [[{ changes: { paths: %w[*/**/*.rb *.yml] }, when: 'never' }, { changes: { paths: %w[*/**/*.rb *.yml] }, when: 'always' }]],
+ [[{ changes: { paths: %w[.*.yml **/*] }, when: 'never' }, { changes: { paths: %w[.*.yml **/*] }, when: 'always' }]]
]
end
@@ -881,14 +881,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'with an explicit `when: always`' do
where(:rule_set) do
[
- [[{ changes: %w[*/**/*.rb], when: 'always' }, { changes: %w[*/**/*.rb], when: 'never' }]],
- [[{ changes: %w[app/models/ci/pipeline.rb], when: 'always' }, { changes: %w[app/models/ci/pipeline.rb], when: 'never' }]],
- [[{ changes: %w[spec/**/*.rb], when: 'always' }, { changes: %w[spec/**/*.rb], when: 'never' }]],
- [[{ changes: %w[*.yml], when: 'always' }, { changes: %w[*.yml], when: 'never' }]],
- [[{ changes: %w[.*.yml], when: 'always' }, { changes: %w[.*.yml], when: 'never' }]],
- [[{ changes: %w[**/*], when: 'always' }, { changes: %w[**/*], when: 'never' }]],
- [[{ changes: %w[*/**/*.rb *.yml], when: 'always' }, { changes: %w[*/**/*.rb *.yml], when: 'never' }]],
- [[{ changes: %w[.*.yml **/*], when: 'always' }, { changes: %w[.*.yml **/*], when: 'never' }]]
+ [[{ changes: { paths: %w[*/**/*.rb] }, when: 'always' }, { changes: { paths: %w[*/**/*.rb] }, when: 'never' }]],
+ [[{ changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'always' }, { changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'never' }]],
+ [[{ changes: { paths: %w[spec/**/*.rb] }, when: 'always' }, { changes: { paths: %w[spec/**/*.rb] }, when: 'never' }]],
+ [[{ changes: { paths: %w[*.yml] }, when: 'always' }, { changes: { paths: %w[*.yml] }, when: 'never' }]],
+ [[{ changes: { paths: %w[.*.yml] }, when: 'always' }, { changes: { paths: %w[.*.yml] }, when: 'never' }]],
+ [[{ changes: { paths: %w[**/*] }, when: 'always' }, { changes: { paths: %w[**/*] }, when: 'never' }]],
+ [[{ changes: { paths: %w[*/**/*.rb *.yml] }, when: 'always' }, { changes: { paths: %w[*/**/*.rb *.yml] }, when: 'never' }]],
+ [[{ changes: { paths: %w[.*.yml **/*] }, when: 'always' }, { changes: { paths: %w[.*.yml **/*] }, when: 'never' }]]
]
end
@@ -904,14 +904,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'without an explicit when: value' do
where(:rule_set) do
[
- [[{ changes: %w[*/**/*.rb] }]],
- [[{ changes: %w[app/models/ci/pipeline.rb] }]],
- [[{ changes: %w[spec/**/*.rb] }]],
- [[{ changes: %w[*.yml] }]],
- [[{ changes: %w[.*.yml] }]],
- [[{ changes: %w[**/*] }]],
- [[{ changes: %w[*/**/*.rb *.yml] }]],
- [[{ changes: %w[.*.yml **/*] }]]
+ [[{ changes: { paths: %w[*/**/*.rb] } }]],
+ [[{ changes: { paths: %w[app/models/ci/pipeline.rb] } }]],
+ [[{ changes: { paths: %w[spec/**/*.rb] } }]],
+ [[{ changes: { paths: %w[*.yml] } }]],
+ [[{ changes: { paths: %w[.*.yml] } }]],
+ [[{ changes: { paths: %w[**/*] } }]],
+ [[{ changes: { paths: %w[*/**/*.rb *.yml] } }]],
+ [[{ changes: { paths: %w[.*.yml **/*] } }]]
]
end
diff --git a/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb b/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb
index eec218346c2..f116b175fc7 100644
--- a/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb
+++ b/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb
@@ -75,16 +75,6 @@ RSpec.describe Gitlab::Ci::Reports::CoverageReportGenerator, factory_default: :k
end
it_behaves_like 'having a coverage report'
-
- context 'when feature flag ci_child_pipeline_coverage_reports is disabled' do
- before do
- stub_feature_flags(ci_child_pipeline_coverage_reports: false)
- end
-
- it 'returns empty coverage reports' do
- expect(subject).to be_empty
- end
- end
end
context 'when both parent and child pipeline have builds with coverage reports' do
diff --git a/spec/lib/gitlab/ci/reports/test_reports_spec.rb b/spec/lib/gitlab/ci/reports/test_report_spec.rb
index 24c00de3731..539510bca9e 100644
--- a/spec/lib/gitlab/ci/reports/test_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_report_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::TestReports do
+RSpec.describe Gitlab::Ci::Reports::TestReport do
include TestReportsHelper
let(:test_reports) { described_class.new }
diff --git a/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb
index 3483dddca3a..ac64e4699fe 100644
--- a/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe Gitlab::Ci::Reports::TestReportsComparer do
include TestReportsHelper
let(:comparer) { described_class.new(base_reports, head_reports) }
- let(:base_reports) { Gitlab::Ci::Reports::TestReports.new }
- let(:head_reports) { Gitlab::Ci::Reports::TestReports.new }
+ let(:base_reports) { Gitlab::Ci::Reports::TestReport.new }
+ let(:head_reports) { Gitlab::Ci::Reports::TestReport.new }
describe '#suite_comparers' do
subject { comparer.suite_comparers }
diff --git a/spec/lib/gitlab/ci/runner/metrics_spec.rb b/spec/lib/gitlab/ci/runner/metrics_spec.rb
new file mode 100644
index 00000000000..3c459271092
--- /dev/null
+++ b/spec/lib/gitlab/ci/runner/metrics_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Runner::Metrics, :prometheus do
+ subject { described_class.new }
+
+ describe '#increment_runner_authentication_success_counter' do
+ it 'increments count for same type' do
+ expect { subject.increment_runner_authentication_success_counter(runner_type: 'instance_type') }
+ .to change { described_class.runner_authentication_success_counter.get(runner_type: 'instance_type') }.by(1)
+ end
+
+ it 'does not increment count for different type' do
+ expect { subject.increment_runner_authentication_success_counter(runner_type: 'group_type') }
+ .to not_change { described_class.runner_authentication_success_counter.get(runner_type: 'project_type') }
+ end
+
+ it 'does not increment failure count' do
+ expect { subject.increment_runner_authentication_success_counter(runner_type: 'project_type') }
+ .to not_change { described_class.runner_authentication_failure_counter.get }
+ end
+
+ it 'throws ArgumentError for invalid runner type' do
+ expect { subject.increment_runner_authentication_success_counter(runner_type: 'unknown_type') }
+ .to raise_error(ArgumentError, 'unknown runner type: unknown_type')
+ end
+ end
+
+ describe '#increment_runner_authentication_failure_counter' do
+ it 'increments count' do
+ expect { subject.increment_runner_authentication_failure_counter }
+ .to change { described_class.runner_authentication_failure_counter.get }.by(1)
+ end
+
+ it 'does not increment success count' do
+ expect { subject.increment_runner_authentication_failure_counter }
+ .to not_change { described_class.runner_authentication_success_counter.get(runner_type: 'instance_type') }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/runner_releases_spec.rb b/spec/lib/gitlab/ci/runner_releases_spec.rb
index 9e4a8739c0f..576eb02ad83 100644
--- a/spec/lib/gitlab/ci/runner_releases_spec.rb
+++ b/spec/lib/gitlab/ci/runner_releases_spec.rb
@@ -5,16 +5,25 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::RunnerReleases do
subject { described_class.instance }
- describe '#releases' do
- before do
- subject.reset!
+ let(:runner_releases_url) { 'the release API URL' }
- stub_application_setting(public_runner_releases_url: 'the release API URL')
- allow(Gitlab::HTTP).to receive(:try_get).with('the release API URL').once { mock_http_response(response) }
- end
+ def releases
+ subject.releases
+ end
+
+ def releases_by_minor
+ subject.releases_by_minor
+ end
+
+ before do
+ subject.reset_backoff!
- def releases
- subject.releases
+ stub_application_setting(public_runner_releases_url: runner_releases_url)
+ end
+
+ describe 'caching behavior', :use_clean_rails_memory_store_caching do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(runner_releases_url, anything).once { mock_http_response(response) }
end
shared_examples 'requests that follow cache status' do |validity_period|
@@ -25,9 +34,14 @@ RSpec.describe Gitlab::Ci::RunnerReleases do
releases
travel followup_request_interval do
- expect(Gitlab::HTTP).not_to receive(:try_get)
+ expect(Gitlab::HTTP).not_to receive(:get)
- expect(releases).to eq(expected_result)
+ if expected_releases
+ expected_result_by_minor = expected_releases.group_by(&:without_patch).transform_values(&:max)
+ end
+
+ expect(releases).to eq(expected_releases)
+ expect(releases_by_minor).to eq(expected_result_by_minor)
end
end
end
@@ -40,75 +54,189 @@ RSpec.describe Gitlab::Ci::RunnerReleases do
releases
travel followup_request_interval do
- expect(Gitlab::HTTP).to receive(:try_get).with('the release API URL').once { mock_http_response(followup_response) }
-
- expect(releases).to eq((expected_result || []) + [Gitlab::VersionInfo.new(14, 9, 2)])
+ expect(Gitlab::HTTP).to receive(:get)
+ .with(runner_releases_url, anything)
+ .once { mock_http_response(followup_response) }
+
+ new_releases = (expected_releases || []) + [Gitlab::VersionInfo.new(14, 9, 2)]
+ new_releases_by_minor_version = (expected_releases_by_minor || {}).merge(
+ Gitlab::VersionInfo.new(14, 9, 0) => Gitlab::VersionInfo.new(14, 9, 2)
+ )
+ expect(releases).to eq(new_releases)
+ expect(releases_by_minor).to eq(new_releases_by_minor_version)
end
end
end
end
- context 'when response is nil' do
- let(:response) { nil }
- let(:expected_result) { nil }
-
- it 'returns nil' do
- expect(releases).to be_nil
- end
-
- it_behaves_like 'requests that follow cache status', 5.seconds
-
+ shared_examples 'a service implementing exponential backoff' do |opts|
it 'performs exponential backoff on requests', :aggregate_failures do
start_time = Time.now.utc.change(usec: 0)
http_call_timestamp_offsets = []
- allow(Gitlab::HTTP).to receive(:try_get).with('the release API URL') do
+ allow(Gitlab::HTTP).to receive(:get).with(runner_releases_url, anything) do
http_call_timestamp_offsets << Time.now.utc - start_time
+
+ raise Net::OpenTimeout if opts&.dig(:raise_timeout)
+
mock_http_response(response)
end
# An initial HTTP request fails
travel_to(start_time)
- subject.reset!
+ subject.reset_backoff!
expect(releases).to be_nil
+ expect(releases_by_minor).to be_nil
# Successive failed requests result in HTTP requests only after specific backoff periods
backoff_periods = [5, 10, 20, 40, 80, 160, 320, 640, 1280, 2560, 3600].map(&:seconds)
backoff_periods.each do |period|
travel(period - 1.second)
expect(releases).to be_nil
+ expect(releases_by_minor).to be_nil
travel 1.second
expect(releases).to be_nil
+ expect(releases_by_minor).to be_nil
end
expect(http_call_timestamp_offsets).to eq([0, 5, 15, 35, 75, 155, 315, 635, 1275, 2555, 5115, 8715])
# Finally a successful HTTP request results in releases being returned
- allow(Gitlab::HTTP).to receive(:try_get).with('the release API URL').once { mock_http_response([{ 'name' => 'v14.9.1' }]) }
+ allow(Gitlab::HTTP).to receive(:get)
+ .with(runner_releases_url, anything)
+ .once { mock_http_response([{ 'name' => 'v14.9.1-beta1-ee' }]) }
travel 1.hour
expect(releases).not_to be_nil
+ expect(releases_by_minor).not_to be_nil
end
end
+ context 'when request results in timeout' do
+ let(:response) { }
+ let(:expected_releases) { nil }
+ let(:expected_releases_by_minor) { nil }
+
+ it_behaves_like 'requests that follow cache status', 5.seconds
+ it_behaves_like 'a service implementing exponential backoff', raise_timeout: true
+ end
+
+ context 'when response is nil' do
+ let(:response) { nil }
+ let(:expected_releases) { nil }
+ let(:expected_releases_by_minor) { nil }
+
+ it_behaves_like 'requests that follow cache status', 5.seconds
+ it_behaves_like 'a service implementing exponential backoff'
+ end
+
context 'when response is not nil' do
- let(:response) { [{ 'name' => 'v14.9.1' }, { 'name' => 'v14.9.0' }] }
- let(:expected_result) { [Gitlab::VersionInfo.new(14, 9, 0), Gitlab::VersionInfo.new(14, 9, 1)] }
+ let(:response) { [{ 'name' => 'v14.9.1-beta1-ee' }, { 'name' => 'v14.9.0' }] }
+ let(:expected_releases) do
+ [
+ Gitlab::VersionInfo.new(14, 9, 0),
+ Gitlab::VersionInfo.new(14, 9, 1, '-beta1-ee')
+ ]
+ end
+
+ let(:expected_releases_by_minor) do
+ {
+ Gitlab::VersionInfo.new(14, 9, 0) => Gitlab::VersionInfo.new(14, 9, 1, '-beta1-ee')
+ }
+ end
+
+ it_behaves_like 'requests that follow cache status', 1.day
+ end
+ end
+
+ describe '#releases', :use_clean_rails_memory_store_caching do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(runner_releases_url, anything).once { mock_http_response(response) }
+ end
+
+ context 'when response is nil' do
+ let(:response) { nil }
+ let(:expected_result) { nil }
+
+ it 'returns nil' do
+ expect(releases).to be_nil
+ end
+ end
+
+ context 'when response is not nil' do
+ let(:response) { [{ 'name' => 'v14.9.1-beta1-ee' }, { 'name' => 'v14.9.0' }] }
+ let(:expected_result) do
+ [
+ Gitlab::VersionInfo.new(14, 9, 0),
+ Gitlab::VersionInfo.new(14, 9, 1, '-beta1-ee')
+ ]
+ end
it 'returns parsed and sorted Gitlab::VersionInfo objects' do
expect(releases).to eq(expected_result)
end
+ end
- it_behaves_like 'requests that follow cache status', 1.day
+ context 'when response contains unexpected input type' do
+ let(:response) { 'error' }
+
+ it { expect(releases).to be_nil }
+ end
+
+ context 'when response contains unexpected input array' do
+ let(:response) { ['error'] }
+
+ it { expect(releases).to be_nil }
+ end
+ end
+
+ describe '#releases_by_minor', :use_clean_rails_memory_store_caching do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(runner_releases_url, anything).once { mock_http_response(response) }
end
- def mock_http_response(response)
- http_response = instance_double(HTTParty::Response)
+ context 'when response is nil' do
+ let(:response) { nil }
+ let(:expected_result) { nil }
- allow(http_response).to receive(:success?).and_return(response.present?)
- allow(http_response).to receive(:parsed_response).and_return(response)
+ it 'returns nil' do
+ expect(releases_by_minor).to be_nil
+ end
+ end
- http_response
+ context 'when response is not nil' do
+ let(:response) { [{ 'name' => 'v14.9.1-beta1-ee' }, { 'name' => 'v14.9.0' }, { 'name' => 'v14.8.1' }] }
+ let(:expected_result) do
+ {
+ Gitlab::VersionInfo.new(14, 8, 0) => Gitlab::VersionInfo.new(14, 8, 1),
+ Gitlab::VersionInfo.new(14, 9, 0) => Gitlab::VersionInfo.new(14, 9, 1, '-beta1-ee')
+ }
+ end
+
+ it 'returns parsed and grouped Gitlab::VersionInfo objects' do
+ expect(releases_by_minor).to eq(expected_result)
+ end
end
+
+ context 'when response contains unexpected input type' do
+ let(:response) { 'error' }
+
+ it { expect(releases_by_minor).to be_nil }
+ end
+
+ context 'when response contains unexpected input array' do
+ let(:response) { ['error'] }
+
+ it { expect(releases_by_minor).to be_nil }
+ end
+ end
+
+ def mock_http_response(response)
+ http_response = instance_double(HTTParty::Response)
+
+ allow(http_response).to receive(:success?).and_return(!response.nil?)
+ allow(http_response).to receive(:parsed_response).and_return(response)
+
+ http_response
end
end
diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
index 0353432741b..f2507a24b10 100644
--- a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
+++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
@@ -3,84 +3,156 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
- include StubVersion
using RSpec::Parameterized::TableSyntax
describe '#check_runner_upgrade_status' do
subject(:result) { described_class.instance.check_runner_upgrade_status(runner_version) }
+ let(:gitlab_version) { '14.1.1' }
+ let(:parsed_runner_version) { ::Gitlab::VersionInfo.parse(runner_version, parse_suffix: true) }
+
before do
- runner_releases_double = instance_double(Gitlab::Ci::RunnerReleases)
+ allow(described_class.instance).to receive(:gitlab_version)
+ .and_return(::Gitlab::VersionInfo.parse(gitlab_version))
+ end
+
+ context 'with failing Gitlab::Ci::RunnerReleases request' do
+ let(:runner_version) { '14.1.123' }
+ let(:runner_releases_double) { instance_double(Gitlab::Ci::RunnerReleases) }
+
+ before do
+ allow(Gitlab::Ci::RunnerReleases).to receive(:instance).and_return(runner_releases_double)
+ allow(runner_releases_double).to receive(:releases).and_return(nil)
+ end
- allow(Gitlab::Ci::RunnerReleases).to receive(:instance).and_return(runner_releases_double)
- allow(runner_releases_double).to receive(:releases).and_return(available_runner_releases.map { |v| ::Gitlab::VersionInfo.parse(v) })
+ it 'returns :error' do
+ is_expected.to eq({ error: parsed_runner_version })
+ end
end
- context 'with available_runner_releases configured up to 14.1.1' do
- let(:available_runner_releases) { %w[13.9.0 13.9.1 13.9.2 13.10.0 13.10.1 14.0.0 14.0.1 14.0.2 14.1.0 14.1.1 14.1.1-rc3] }
+ context 'with available_runner_releases configured' do
+ before do
+ url = ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
- context 'with nil runner_version' do
- let(:runner_version) { nil }
+ WebMock.stub_request(:get, url).to_return(
+ body: available_runner_releases.map { |v| { name: v } }.to_json,
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
- it 'returns :invalid' do
- is_expected.to eq(:invalid)
+ context 'with no available runner releases' do
+ let(:available_runner_releases) do
+ %w[]
end
- end
- context 'with invalid runner_version' do
- let(:runner_version) { 'junk' }
+ context 'with Gitlab::VERSION set to 14.1.1' do
+ let(:gitlab_version) { '14.1.1' }
- it 'raises ArgumentError' do
- expect { subject }.to raise_error(ArgumentError)
+ context 'with runner_version from last minor release' do
+ let(:runner_version) { 'v14.0.1' }
+
+ it 'returns :not_available' do
+ is_expected.to eq({ not_available: parsed_runner_version })
+ end
+ end
end
end
- context 'with Gitlab::VERSION set to 14.1.123' do
- before do
- stub_version('14.1.123', 'deadbeef')
+ context 'up to 14.1.1' do
+ let(:available_runner_releases) do
+ %w[13.9.0 13.9.1 13.9.2 13.10.0 13.10.1 14.0.0 14.0.1 14.0.2-rc1 14.0.2 14.1.0 14.1.1]
+ end
+
+ context 'with nil runner_version' do
+ let(:runner_version) { nil }
- described_class.instance.reset!
+ it 'returns :invalid_version' do
+ is_expected.to match({ invalid_version: anything })
+ end
end
- context 'with a runner_version that is too recent' do
- let(:runner_version) { 'v14.2.0' }
+ context 'with invalid runner_version' do
+ let(:runner_version) { 'junk' }
- it 'returns :not_available' do
- is_expected.to eq(:not_available)
+ it 'returns :invalid_version' do
+ is_expected.to match({ invalid_version: anything })
end
end
- end
- context 'with Gitlab::VERSION set to 14.0.1' do
- before do
- stub_version('14.0.1', 'deadbeef')
+ context 'with Gitlab::VERSION set to 14.1.123' do
+ let(:gitlab_version) { '14.1.123' }
+
+ context 'with a runner_version that is too recent' do
+ let(:runner_version) { 'v14.2.0' }
- described_class.instance.reset!
+ it 'returns :not_available' do
+ is_expected.to eq({ not_available: parsed_runner_version })
+ end
+ end
+ end
+
+ context 'with Gitlab::VERSION set to 14.0.1' do
+ let(:gitlab_version) { '14.0.1' }
+
+ context 'with valid params' do
+ where(:runner_version, :expected_result, :expected_suggested_version) do
+ 'v15.0.0' | :not_available | '15.0.0' # not available since the GitLab instance is still on 14.x, a major version might be incompatible, and a patch upgrade is not available
+ 'v14.1.0-rc3' | :recommended | '14.1.1' # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
+ 'v14.1.0~beta.1574.gf6ea9389' | :recommended | '14.1.1' # suffixes are correctly handled
+ 'v14.1.0/1.1.0' | :recommended | '14.1.1' # suffixes are correctly handled
+ 'v14.1.0' | :recommended | '14.1.1' # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
+ 'v14.0.1' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available
+ 'v14.0.2-rc1' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available and we'll move out of a release candidate
+ 'v14.0.2' | :not_available | '14.0.2' # not available since 14.0.2 is the latest 14.0.x release available within the instance's major.minor version
+ 'v13.10.1' | :available | '14.0.2' # available upgrade: 14.0.2
+ 'v13.10.1~beta.1574.gf6ea9389' | :recommended | '13.10.1' # suffixes are correctly handled, official 13.10.1 is available
+ 'v13.10.1/1.1.0' | :recommended | '13.10.1' # suffixes are correctly handled, official 13.10.1 is available
+ 'v13.10.0' | :recommended | '13.10.1' # recommended upgrade since 13.10.1 is available
+ 'v13.9.2' | :recommended | '14.0.2' # recommended upgrade since backports are no longer released for this version
+ 'v13.9.0' | :recommended | '14.0.2' # recommended upgrade since backports are no longer released for this version
+ 'v13.8.1' | :recommended | '14.0.2' # recommended upgrade since build is too old (missing in records)
+ 'v11.4.1' | :recommended | '14.0.2' # recommended upgrade since build is too old (missing in records)
+ end
+
+ with_them do
+ it { is_expected.to eq({ expected_result => Gitlab::VersionInfo.parse(expected_suggested_version) }) }
+ end
+ end
end
- context 'with valid params' do
- where(:runner_version, :expected_result) do
- 'v15.0.0' | :not_available # not available since the GitLab instance is still on 14.x and a major version might be incompatible
- 'v14.1.0-rc3' | :recommended # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
- 'v14.1.0~beta.1574.gf6ea9389' | :recommended # suffixes are correctly handled
- 'v14.1.0/1.1.0' | :recommended # suffixes are correctly handled
- 'v14.1.0' | :recommended # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
- 'v14.0.1' | :recommended # recommended upgrade since 14.0.2 is available
- 'v14.0.2' | :not_available # not available since 14.0.2 is the latest 14.0.x release available within the instance's major.minor version
- 'v13.10.1' | :available # available upgrade: 14.1.1
- 'v13.10.1~beta.1574.gf6ea9389' | :available # suffixes are correctly handled
- 'v13.10.1/1.1.0' | :available # suffixes are correctly handled
- 'v13.10.0' | :recommended # recommended upgrade since 13.10.1 is available
- 'v13.9.2' | :recommended # recommended upgrade since backports are no longer released for this version
- 'v13.9.0' | :recommended # recommended upgrade since backports are no longer released for this version
- 'v13.8.1' | :recommended # recommended upgrade since build is too old (missing in records)
- 'v11.4.1' | :recommended # recommended upgrade since build is too old (missing in records)
+ context 'with Gitlab::VERSION set to 13.9.0' do
+ let(:gitlab_version) { '13.9.0' }
+
+ context 'with valid params' do
+ where(:runner_version, :expected_result, :expected_suggested_version) do
+ 'v14.0.0' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available, even though the GitLab instance is still on 13.x and a major version might be incompatible
+ 'v13.10.1' | :not_available | '13.10.1' # not available since 13.10.1 is already ahead of GitLab instance version and is the latest patch update for 13.10.x
+ 'v13.10.0' | :recommended | '13.10.1' # recommended upgrade since 13.10.1 is available
+ 'v13.9.2' | :not_available | '13.9.2' # not_available even though backports are no longer released for this version because the runner is already on the same version as the GitLab version
+ 'v13.9.0' | :recommended | '13.9.2' # recommended upgrade since backports are no longer released for this version
+ 'v13.8.1' | :recommended | '13.9.2' # recommended upgrade since build is too old (missing in records)
+ 'v11.4.1' | :recommended | '13.9.2' # recommended upgrade since build is too old (missing in records)
+ end
+
+ with_them do
+ it { is_expected.to eq({ expected_result => Gitlab::VersionInfo.parse(expected_suggested_version) }) }
+ end
end
+ end
+ end
+
+ context 'up to 15.1.0' do
+ let(:available_runner_releases) { %w[14.9.1 14.9.2 14.10.0 14.10.1 15.0.0 15.1.0] }
+
+ context 'with Gitlab::VERSION set to 15.2.0-pre' do
+ let(:gitlab_version) { '15.2.0-pre' }
+
+ context 'with unknown runner version' do
+ let(:runner_version) { '14.11.0~beta.29.gd0c550e3' }
- with_them do
- it 'returns symbol representing expected upgrade status' do
- is_expected.to be_a(Symbol)
- is_expected.to eq(expected_result)
+ it 'recommends 15.1.0 since 14.11 is an unknown release and 15.1.0 is available' do
+ is_expected.to eq({ recommended: Gitlab::VersionInfo.new(15, 1, 0) })
end
end
end
diff --git a/spec/lib/gitlab/ci/status/stage/factory_spec.rb b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
index e0f5531f370..35d44281072 100644
--- a/spec/lib/gitlab/ci/status/stage/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
@@ -7,9 +7,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
let(:project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
- let(:stage) do
- build(:ci_stage, pipeline: pipeline, name: 'test')
- end
+ let(:stage) { create(:ci_stage, pipeline: pipeline) }
subject do
described_class.new(stage, user)
@@ -26,11 +24,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
context 'when stage has a core status' do
(Ci::HasStatus::AVAILABLE_STATUSES - %w(manual skipped scheduled)).each do |core_status|
context "when core status is #{core_status}" do
- before do
- create(:ci_build, pipeline: pipeline, stage: 'test', status: core_status)
- create(:commit_status, pipeline: pipeline, stage: 'test', status: core_status)
- create(:ci_build, pipeline: pipeline, stage: 'build', status: :failed)
- end
+ let(:stage) { create(:ci_stage, pipeline: pipeline, status: core_status) }
it "fabricates a core status #{core_status}" do
expect(status).to be_a(
@@ -48,12 +42,12 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
context 'when stage has warnings' do
let(:stage) do
- build(:ci_stage, name: 'test', status: :success, pipeline: pipeline)
+ create(:ci_stage, status: :success, pipeline: pipeline)
end
before do
create(:ci_build, :allowed_to_fail, :failed,
- stage: 'test', pipeline: stage.pipeline)
+ stage_id: stage.id, pipeline: stage.pipeline)
end
it 'fabricates extended "success with warnings" status' do
@@ -70,11 +64,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory do
context 'when stage has manual builds' do
(Ci::HasStatus::BLOCKED_STATUS + ['skipped']).each do |core_status|
context "when status is #{core_status}" do
- before do
- create(:ci_build, pipeline: pipeline, stage: 'test', status: core_status)
- create(:commit_status, pipeline: pipeline, stage: 'test', status: core_status)
- create(:ci_build, pipeline: pipeline, stage: 'build', status: :manual)
- end
+ let(:stage) { create(:ci_stage, pipeline: pipeline, status: core_status) }
it 'fabricates a play manual status' do
expect(status).to be_a(Gitlab::Ci::Status::Stage::PlayManual)
diff --git a/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
index 25b79ff2099..9fdaddc083e 100644
--- a/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
+++ b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::PlayManual do
end
describe '#action_path' do
- let(:stage) { create(:ci_stage_entity, status: 'manual') }
+ let(:stage) { create(:ci_stage, status: 'manual') }
let(:pipeline) { stage.pipeline }
let(:play_manual) { stage.detailed_status(create(:user)) }
@@ -46,25 +46,25 @@ RSpec.describe Gitlab::Ci::Status::Stage::PlayManual do
subject { described_class.matches?(stage, user) }
context 'when stage is skipped' do
- let(:stage) { create(:ci_stage_entity, status: :skipped) }
+ let(:stage) { create(:ci_stage, status: :skipped) }
it { is_expected.to be_truthy }
end
context 'when stage is manual' do
- let(:stage) { create(:ci_stage_entity, status: :manual) }
+ let(:stage) { create(:ci_stage, status: :manual) }
it { is_expected.to be_truthy }
end
context 'when stage is scheduled' do
- let(:stage) { create(:ci_stage_entity, status: :scheduled) }
+ let(:stage) { create(:ci_stage, status: :scheduled) }
it { is_expected.to be_truthy }
end
context 'when stage is success' do
- let(:stage) { create(:ci_stage_entity, status: :success) }
+ let(:stage) { create(:ci_stage, status: :success) }
context 'and does not have manual builds' do
it { is_expected.to be_falsy }
diff --git a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
index 6c4f69fb036..5ab859241c6 100644
--- a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
+++ b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
let(:error_message) do
<<~MESSAGE
A mechanism depending on internals of 'act-as-taggable-on` has been designed
- to bulk insert tags for Ci::Build records.
+ to bulk insert tags for Ci::Build/Ci::Runner records.
Please review the code carefully before updating the gem version
https://gitlab.com/gitlab-org/gitlab/-/issues/350053
MESSAGE
@@ -27,6 +27,21 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
it { expect(ActsAsTaggableOn::VERSION).to eq(acceptable_version), error_message }
end
+ describe '.bulk_insert_tags!' do
+ let(:inserter) { instance_double(described_class) }
+
+ it 'delegates to bulk insert class' do
+ expect(Gitlab::Ci::Tags::BulkInsert)
+ .to receive(:new)
+ .with(statuses)
+ .and_return(inserter)
+
+ expect(inserter).to receive(:insert!)
+
+ described_class.bulk_insert_tags!(statuses)
+ end
+ end
+
describe '#insert!' do
context 'without tags' do
it { expect(service.insert!).to be_falsey }
@@ -44,6 +59,50 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
end
+
+ it 'persists taggings' do
+ service.insert!
+
+ expect(job.taggings.size).to eq(2)
+ expect(other_job.taggings.size).to eq(3)
+
+ expect(Ci::Build.tagged_with('tag1')).to include(job)
+ expect(Ci::Build.tagged_with('tag2')).to include(job, other_job)
+ expect(Ci::Build.tagged_with('tag3')).to include(other_job)
+ end
+
+ it 'strips tags' do
+ job.tag_list = [' taga', 'tagb ', ' tagc ']
+
+ service.insert!
+ expect(job.tags.map(&:name)).to match_array(%w[taga tagb tagc])
+ end
+
+ context 'when batching inserts for tags' do
+ before do
+ stub_const("#{described_class}::TAGS_BATCH_SIZE", 2)
+ end
+
+ it 'inserts tags in batches' do
+ recorder = ActiveRecord::QueryRecorder.new { service.insert! }
+ count = recorder.log.count { |query| query.include?('INSERT INTO "tags"') }
+
+ expect(count).to eq(2)
+ end
+ end
+
+ context 'when batching inserts for taggings' do
+ before do
+ stub_const("#{described_class}::TAGGINGS_BATCH_SIZE", 2)
+ end
+
+ it 'inserts taggings in batches' do
+ recorder = ActiveRecord::QueryRecorder.new { service.insert! }
+ count = recorder.log.count { |query| query.include?('INSERT INTO "taggings"') }
+
+ expect(count).to eq(3)
+ end
+ end
end
context 'with tags for only one job' do
@@ -57,6 +116,15 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
expect(other_job.reload.tag_list).to be_empty
end
+
+ it 'persists taggings' do
+ service.insert!
+
+ expect(job.taggings.size).to eq(2)
+
+ expect(Ci::Build.tagged_with('tag1')).to include(job)
+ expect(Ci::Build.tagged_with('tag2')).to include(job)
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
index 27de8324206..65fd2b016ac 100644
--- a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
@@ -34,6 +34,16 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
expect(build_names).to include('production_ecs')
end
+ context 'when the DAST template is also included' do
+ let(:dast_template) { Gitlab::Template::GitlabCiYmlTemplate.find('Security/DAST') }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content + dast_template.content)
+ end
+
+ include_examples 'no pipeline yaml error'
+ end
+
context 'when running a pipeline for a branch' do
let(:pipeline_branch) { 'test_branch' }
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index b0704ad7f50..8ec0846bdca 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -166,9 +166,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
allow(builder).to receive(:secret_instance_variables) { [var('J', 10), var('K', 10)] }
allow(builder).to receive(:secret_group_variables) { [var('K', 11), var('L', 11)] }
allow(builder).to receive(:secret_project_variables) { [var('L', 12), var('M', 12)] }
- allow(job).to receive(:trigger_request) { double(user_variables: [var('M', 13), var('N', 13)]) }
- allow(pipeline).to receive(:variables) { [var('N', 14), var('O', 14)] }
- allow(pipeline).to receive(:pipeline_schedule) { double(job_variables: [var('O', 15), var('P', 15)]) }
+ allow(pipeline).to receive(:variables) { [var('M', 13), var('N', 13)] }
+ allow(pipeline).to receive(:pipeline_schedule) { double(job_variables: [var('N', 14), var('O', 14)]) }
end
it 'returns variables in order depending on resource hierarchy' do
@@ -185,8 +184,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
var('K', 11), var('L', 11),
var('L', 12), var('M', 12),
var('M', 13), var('N', 13),
- var('N', 14), var('O', 14),
- var('O', 15), var('P', 15)])
+ var('N', 14), var('O', 14)])
end
it 'overrides duplicate keys depending on resource hierarchy' do
@@ -198,7 +196,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
'I' => '9', 'J' => '10',
'K' => '11', 'L' => '12',
'M' => '13', 'N' => '14',
- 'O' => '15', 'P' => '15')
+ 'O' => '14')
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor/feature_flags_spec.rb b/spec/lib/gitlab/ci/yaml_processor/feature_flags_spec.rb
new file mode 100644
index 00000000000..0bd9563d191
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/feature_flags_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::YamlProcessor::FeatureFlags do
+ let(:feature_flag) { :my_feature_flag }
+
+ context 'when the actor is set' do
+ let(:actor) { double }
+ let(:another_actor) { double }
+
+ it 'checks the feature flag using the given actor' do
+ described_class.with_actor(actor) do
+ expect(Feature).to receive(:enabled?).with(feature_flag, actor)
+
+ described_class.enabled?(feature_flag)
+ end
+ end
+
+ it 'returns the value of the block' do
+ result = described_class.with_actor(actor) do
+ :test
+ end
+
+ expect(result).to eq(:test)
+ end
+
+ it 'restores the existing actor if any' do
+ described_class.with_actor(actor) do
+ described_class.with_actor(another_actor) do
+ expect(Feature).to receive(:enabled?).with(feature_flag, another_actor)
+
+ described_class.enabled?(feature_flag)
+ end
+
+ expect(Feature).to receive(:enabled?).with(feature_flag, actor)
+ described_class.enabled?(feature_flag)
+ end
+ end
+
+ it 'restores the actor to nil after the block' do
+ described_class.with_actor(actor) do
+ expect(Thread.current[described_class::ACTOR_KEY]).to eq(actor)
+ end
+
+ expect(Thread.current[described_class::ACTOR_KEY]).to be nil
+ end
+ end
+
+ context 'when feature flag is checked outside the "with_actor" block' do
+ it 'raises an error on dev/test environment' do
+ expect { described_class.enabled?(feature_flag) }.to raise_error(described_class::NoActorError)
+ end
+
+ context 'when on production' do
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ end
+
+ it 'checks the feature flag without actor' do
+ expect(Feature).to receive(:enabled?).with(feature_flag, nil)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .and_call_original
+
+ described_class.enabled?(feature_flag)
+ end
+ end
+ end
+
+ context 'when actor is explicitly nil' do
+ it 'checks the feature flag without actor' do
+ described_class.with_actor(nil) do
+ expect(Feature).to receive(:enabled?).with(feature_flag, nil)
+
+ described_class.enabled?(feature_flag)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 3dd9ca35881..22bc6b0db59 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -70,7 +70,7 @@ module Gitlab
options: { script: ['rspec'] },
rules: [
{ if: '$CI_COMMIT_REF_NAME == "master"' },
- { changes: %w[README.md] }
+ { changes: { paths: %w[README.md] } }
],
allow_failure: false,
when: 'on_success',
@@ -980,7 +980,7 @@ module Gitlab
it { is_expected.to be_valid }
- it "returns image and service when defined" do
+ it "returns with image" do
expect(processor.stage_builds_attributes("test")).to contain_exactly({
stage: "test",
stage_idx: 2,
@@ -1010,6 +1010,51 @@ module Gitlab
end
end
end
+
+ context 'when a service has pull_policy' do
+ let(:config) do
+ <<~YAML
+ services:
+ - name: postgres:11.9
+ pull_policy: if-not-present
+
+ test:
+ script: exit 0
+ YAML
+ end
+
+ it { is_expected.to be_valid }
+
+ it "returns with service" do
+ expect(processor.stage_builds_attributes("test")).to contain_exactly({
+ stage: "test",
+ stage_idx: 2,
+ name: "test",
+ only: { refs: %w[branches tags] },
+ options: {
+ script: ["exit 0"],
+ services: [{ name: "postgres:11.9", pull_policy: ["if-not-present"] }]
+ },
+ allow_failure: false,
+ when: "on_success",
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ })
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it "returns no job" do
+ expect(processor.jobs).to eq({})
+ end
+ end
+ end
end
describe 'Variables' do
@@ -2848,6 +2893,51 @@ module Gitlab
end
end
+ describe 'Rules' do
+ context 'changes' do
+ let(:config) do
+ <<~YAML
+ rspec:
+ script: exit 0
+ rules:
+ - changes: [README.md]
+ YAML
+ end
+
+ it 'returns builds with correct rules' do
+ expect(processor.builds.size).to eq(1)
+ expect(processor.builds[0]).to match(
+ hash_including(
+ name: "rspec",
+ rules: [{ changes: { paths: ["README.md"] } }]
+ )
+ )
+ end
+
+ context 'with paths' do
+ let(:config) do
+ <<~YAML
+ rspec:
+ script: exit 0
+ rules:
+ - changes:
+ paths: [README.md]
+ YAML
+ end
+
+ it 'returns builds with correct rules' do
+ expect(processor.builds.size).to eq(1)
+ expect(processor.builds[0]).to match(
+ hash_including(
+ name: "rspec",
+ rules: [{ changes: { paths: ["README.md"] } }]
+ )
+ )
+ end
+ end
+ end
+ end
+
describe '#execute' do
subject { Gitlab::Ci::YamlProcessor.new(content).execute }
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 109e83be294..616fe15c1a6 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -92,11 +92,11 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
context 'when sentry is configured' do
before do
stub_sentry_settings
- stub_config_setting(host: 'example.com')
+ stub_config_setting(host: 'gitlab.example.com')
end
it 'adds sentry path to CSP without user' do
- expect(directives['connect_src']).to eq("'self' ws://example.com dummy://example.com/43")
+ expect(directives['connect_src']).to eq("'self' ws://gitlab.example.com dummy://example.com")
end
end
@@ -146,7 +146,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
let(:snowplow_micro_url) { "http://#{snowplow_micro_hostname}/" }
before do
- stub_env('SNOWPLOW_MICRO_ENABLE', 1)
+ stub_config(snowplow_micro: { enabled: true })
allow(Gitlab::Tracking).to receive(:collector_hostname).and_return(snowplow_micro_hostname)
end
@@ -169,9 +169,9 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['connect_src']).to match(Regexp.new(snowplow_micro_url))
end
- context 'when not enabled using ENV[SNOWPLOW_MICRO_ENABLE]' do
+ context 'when not enabled using config' do
before do
- stub_env('SNOWPLOW_MICRO_ENABLE', nil)
+ stub_config(snowplow_micro: { enabled: false })
end
it 'does not add Snowplow Micro URL to connect-src' do
@@ -220,10 +220,11 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(policy.directives['base-uri']).to be_nil
end
- it 'returns default values for directives not defined by the user' do
+ it 'returns default values for directives not defined by the user or with <default_value> and disables directives set to false' do
# Explicitly disabling script_src and setting report_uri
csp_config[:directives] = {
script_src: false,
+ style_src: '<default_value>',
report_uri: 'https://example.org'
}
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index e8fe80f75cb..8ee57542d43 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
it 'returns the object kind for a deployment' do
deployment = build(:deployment, deployable: nil, environment: create(:environment))
- data = described_class.build(deployment, Time.current)
+ data = described_class.build(deployment, 'success', Time.current)
expect(data[:object_kind]).to eq('deployment')
end
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
expected_commit_url = Gitlab::UrlBuilder.build(commit)
status_changed_at = Time.current
- data = described_class.build(deployment, status_changed_at)
+ data = described_class.build(deployment, 'failed', status_changed_at)
expect(data[:status]).to eq('failed')
expect(data[:status_changed_at]).to eq(status_changed_at)
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
it 'does not include the deployable URL when there is no deployable' do
deployment = create(:deployment, status: :failed, deployable: nil)
- data = described_class.build(deployment, Time.current)
+ data = described_class.build(deployment, 'failed', Time.current)
expect(data[:deployable_url]).to be_nil
end
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:deployment) { create(:deployment, project: project) }
- subject(:data) { described_class.build(deployment, Time.current) }
+ subject(:data) { described_class.build(deployment, 'created', Time.current) }
before(:all) do
project.repository.remove
@@ -69,7 +69,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
context 'when deployed_by is nil' do
let_it_be(:deployment) { create(:deployment, user: nil, deployable: nil) }
- subject(:data) { described_class.build(deployment, Time.current) }
+ subject(:data) { described_class.build(deployment, 'created', Time.current) }
before(:all) do
deployment.user = nil
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index c2bd20798f1..469812c80fc 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(build_data).to be_a(Hash)
expect(build_data[:id]).to eq(build.id)
expect(build_data[:status]).to eq(build.status)
+ expect(build_data[:failure_reason]).to be_nil
expect(build_data[:allow_failure]).to eq(build.allow_failure)
expect(build_data[:environment]).to be_nil
expect(runner_data).to eq(nil)
@@ -197,4 +198,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end
end
end
+
+ describe '.build failed' do
+ let(:build) { create(:ci_build, :failed, pipeline: pipeline, failure_reason: :script_failure) }
+ let(:data) { described_class.build(pipeline) }
+ let(:build_data) { data[:builds].last }
+
+ it 'has failure_reason' do
+ expect(build_data[:failure_reason]).to eq(build.failure_reason)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index c39f6a78e93..a7b3670da7c 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -220,6 +220,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
expect(described_class.created_since(fixed_time)).to contain_exactly(stuck_job, failed_job, max_attempts_failed_job)
end
end
+
+ describe '.blocked_by_max_attempts' do
+ it 'returns blocked jobs' do
+ expect(described_class.blocked_by_max_attempts).to contain_exactly(max_attempts_failed_job)
+ end
+ end
end
describe 'delegated batched_migration attributes' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index 97459d4a7be..b8ff78be333 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -14,6 +14,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
end
+ before do
+ allow(Gitlab::Database::BackgroundMigration::HealthStatus).to receive(:evaluate)
+ .and_return(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ end
+
describe '#run_migration_job' do
shared_examples_for 'it has completed the migration' do
it 'does not create and run a migration job' do
@@ -59,13 +64,48 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
sub_batch_size: migration.sub_batch_size)
end
- it 'optimizes the migration after executing the job' do
- migration.update!(min_value: event1.id, max_value: event2.id)
+ context 'migration health' do
+ let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus }
+ let(:stop_signal) { health_status::Signals::Stop.new(:indicator, reason: 'Take a break') }
+ let(:normal_signal) { health_status::Signals::Normal.new(:indicator, reason: 'All good') }
+ let(:not_available_signal) { health_status::Signals::NotAvailable.new(:indicator, reason: 'Indicator is disabled') }
+ let(:unknown_signal) { health_status::Signals::Unknown.new(:indicator, reason: 'Something went wrong') }
- expect(migration_wrapper).to receive(:perform).ordered
- expect(migration).to receive(:optimize!).ordered
+ before do
+ migration.update!(min_value: event1.id, max_value: event2.id)
+ expect(migration_wrapper).to receive(:perform)
+ end
- runner.run_migration_job(migration)
+ it 'puts migration on hold on stop signal' do
+ expect(health_status).to receive(:evaluate).and_return(stop_signal)
+
+ expect { runner.run_migration_job(migration) }.to change { migration.on_hold? }
+ .from(false).to(true)
+ end
+
+ it 'optimizes migration on normal signal' do
+ expect(health_status).to receive(:evaluate).and_return(normal_signal)
+
+ expect(migration).to receive(:optimize!)
+
+ expect { runner.run_migration_job(migration) }.not_to change { migration.on_hold? }
+ end
+
+ it 'optimizes migration on no signal' do
+ expect(health_status).to receive(:evaluate).and_return(not_available_signal)
+
+ expect(migration).to receive(:optimize!)
+
+ expect { runner.run_migration_job(migration) }.not_to change { migration.on_hold? }
+ end
+
+ it 'optimizes migration on unknown signal' do
+ expect(health_status).to receive(:evaluate).and_return(unknown_signal)
+
+ expect(migration).to receive(:optimize!)
+
+ expect { runner.run_migration_job(migration) }.not_to change { migration.on_hold? }
+ end
end
end
@@ -362,6 +402,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
.with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
.and_return(batched_migration)
+ expect(batched_migration).to receive(:reset_attempts_of_blocked_jobs!).and_call_original
+
expect(batched_migration).to receive(:finalize!).and_call_original
expect do
@@ -380,8 +422,15 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
context 'when migration fails to complete' do
+ let(:error_message) do
+ "Batched migration #{batched_migration.job_class_name} could not be completed and a manual action is required."\
+ "Check the admin panel at (`/admin/background_migrations`) for more details."
+ end
+
it 'raises an error' do
- batched_migration.batched_jobs.with_status(:failed).update_all(attempts: Gitlab::Database::BackgroundMigration::BatchedJob::MAX_ATTEMPTS)
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration).and_return(batched_migration)
+
+ allow(batched_migration).to receive(:finished?).and_return(false)
expect do
runner.finalize(
@@ -390,7 +439,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
column_name,
job_arguments
)
- end.to raise_error described_class::FailedToFinalize
+ end.to raise_error(described_class::FailedToFinalize, error_message)
end
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 8819171cfd0..55f607c0cb0 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -157,6 +157,27 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
+ describe '#reset_attempts_of_blocked_jobs!' do
+ let!(:migration) { create(:batched_background_migration) }
+ let(:max_attempts) { Gitlab::Database::BackgroundMigration::BatchedJob::MAX_ATTEMPTS }
+
+ before do
+ create(:batched_background_migration_job, attempts: max_attempts - 1, batched_migration: migration)
+ create(:batched_background_migration_job, attempts: max_attempts + 1, batched_migration: migration)
+ create(:batched_background_migration_job, attempts: max_attempts + 1, batched_migration: migration)
+ end
+
+ it 'sets the number of attempts to zero for blocked jobs' do
+ migration.reset_attempts_of_blocked_jobs!
+
+ expect(migration.batched_jobs.size).to eq(3)
+
+ migration.batched_jobs.blocked_by_max_attempts.each do |job|
+ expect(job.attempts).to be_zero
+ end
+ end
+ end
+
describe '#interval_elapsed?' do
context 'when the migration has no last_job' do
let(:batched_migration) { build(:batched_background_migration) }
@@ -322,6 +343,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#retry_failed_jobs!' do
let(:batched_migration) { create(:batched_background_migration, status: 'failed') }
+ let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
subject(:retry_failed_jobs) { batched_migration.retry_failed_jobs! }
@@ -335,7 +357,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
anything,
batch_min_value: 6,
batch_size: 5,
- job_arguments: batched_migration.job_arguments
+ job_arguments: batched_migration.job_arguments,
+ job_class: job_class
).and_return([6, 10])
end
end
@@ -570,6 +593,30 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
+ describe '#on_hold?', :freeze_time do
+ subject { migration.on_hold? }
+
+ let(:migration) { create(:batched_background_migration) }
+
+ it 'returns false if no on_hold_until is set' do
+ migration.on_hold_until = nil
+
+ expect(subject).to be_falsey
+ end
+
+ it 'returns false if on_hold_until has passed' do
+ migration.on_hold_until = 1.minute.ago
+
+ expect(subject).to be_falsey
+ end
+
+ it 'returns true if on_hold_until is in the future' do
+ migration.on_hold_until = 1.minute.from_now
+
+ expect(subject).to be_truthy
+ end
+ end
+
describe '.for_configuration' do
let!(:attributes) do
{
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb b/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
new file mode 100644
index 00000000000..21204814f17
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::AutovacuumActiveOnTable do
+ include Database::DatabaseHelpers
+
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ describe '#evaluate' do
+ subject { described_class.new(context).evaluate }
+
+ before do
+ swapout_view_for_table(:postgres_autovacuum_activity)
+ end
+
+ let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(tables) }
+ let(:tables) { [table] }
+ let(:table) { 'users' }
+
+ context 'without autovacuum activity' do
+ it 'returns Normal signal' do
+ expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ end
+
+ it 'remembers the indicator class' do
+ expect(subject.indicator_class).to eq(described_class)
+ end
+ end
+
+ context 'with autovacuum activity' do
+ before do
+ create(:postgres_autovacuum_activity, table: table, table_identifier: "public.#{table}")
+ end
+
+ it 'returns Stop signal' do
+ expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ end
+
+ it 'explains why' do
+ expect(subject.reason).to include('autovacuum running on: table public.users')
+ end
+
+ it 'remembers the indicator class' do
+ expect(subject.indicator_class).to eq(described_class)
+ end
+
+ it 'returns NoSignal signal in case the feature flag is disabled' do
+ stub_feature_flags(batched_migrations_health_status_autovacuum: false)
+
+ expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/health_status_spec.rb b/spec/lib/gitlab/database/background_migration/health_status_spec.rb
new file mode 100644
index 00000000000..6d0430dcbbb
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/health_status_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ describe '.evaluate' do
+ subject(:evaluate) { described_class.evaluate(migration, indicator_class) }
+
+ let(:migration) { build(:batched_background_migration, :active) }
+
+ let(:health_status) { 'Gitlab::Database::BackgroundMigration::HealthStatus' }
+ let(:indicator_class) { class_double("#{health_status}::Indicators::AutovacuumActiveOnTable") }
+ let(:indicator) { instance_double("#{health_status}::Indicators::AutovacuumActiveOnTable") }
+
+ before do
+ allow(indicator_class).to receive(:new).with(migration.health_context).and_return(indicator)
+ end
+
+ it 'returns a signal' do
+ signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+
+ expect(indicator).to receive(:evaluate).and_return(signal)
+
+ expect(evaluate).to eq(signal)
+ end
+
+ it 'logs interesting signals' do
+ signal = instance_double("#{health_status}::Signals::Stop", log_info?: true)
+
+ expect(indicator).to receive(:evaluate).and_return(signal)
+ expect(described_class).to receive(:log_signal).with(signal, migration)
+
+ evaluate
+ end
+
+ it 'does not log signals of no interest' do
+ signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+
+ expect(indicator).to receive(:evaluate).and_return(signal)
+ expect(described_class).not_to receive(:log_signal)
+
+ evaluate
+ end
+
+ context 'on indicator error' do
+ let(:error) { RuntimeError.new('everything broken') }
+
+ before do
+ expect(indicator).to receive(:evaluate).and_raise(error)
+ end
+
+ it 'does not fail' do
+ expect { evaluate }.not_to raise_error
+ end
+
+ it 'returns Unknown signal' do
+ expect(evaluate).to be_an_instance_of(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate.reason).to eq("unexpected error: everything broken (RuntimeError)")
+ end
+
+ it 'reports the exception to error tracking' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(error, migration_id: migration.id, job_class_name: migration.job_class_name)
+
+ evaluate
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index 8345cdfb8fb..2a6eb8f779d 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -4,9 +4,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::EachDatabase do
describe '.each_database_connection', :add_ci_connection do
+ let(:database_base_models) { { main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access }
+
before do
- allow(Gitlab::Database).to receive(:database_base_models)
- .and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(database_base_models)
end
it 'yields each connection after connecting SharedModel' do
@@ -60,12 +61,20 @@ RSpec.describe Gitlab::Database::EachDatabase do
end
context 'when shared connections are not included' do
+ def clear_memoization(key)
+ Gitlab::Database.remove_instance_variable(key) if Gitlab::Database.instance_variable_defined?(key)
+ end
+
+ before do
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(database_base_models)
+
+ # Clear the memoization because the return of Gitlab::Database#schemas_to_base_models depends stubbed value
+ clear_memoization(:@schemas_to_base_models)
+ clear_memoization(:@schemas_to_base_models_ee)
+ end
+
it 'only yields the unshared connections' do
- if Gitlab::Database.has_config?(:ci)
- expect(Gitlab::Database).to receive(:db_config_share_with).exactly(3).times.and_return(nil, 'main', 'main')
- else
- expect(Gitlab::Database).to receive(:db_config_share_with).twice.and_return(nil, 'main')
- end
+ expect(Gitlab::Database).to receive(:db_config_share_with).exactly(3).times.and_return(nil, 'main', 'main')
expect { |b| described_class.each_database_connection(include_shared: false, &b) }
.to yield_successive_args([ActiveRecord::Base.connection, 'main'])
@@ -79,7 +88,7 @@ RSpec.describe Gitlab::Database::EachDatabase do
let(:model2) { Class.new(Gitlab::Database::SharedModel) }
before do
- allow(Gitlab::Database).to receive(:database_base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared)
.and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access)
end
@@ -136,7 +145,7 @@ RSpec.describe Gitlab::Database::EachDatabase do
let(:ci_model) { Class.new(Ci::ApplicationRecord) }
before do
- allow(Gitlab::Database).to receive(:database_base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared)
.and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access)
allow(main_model).to receive_message_chain('connection_db_config.name').and_return('main')
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 611b2fbad72..72950895022 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -3,26 +3,27 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::GitlabSchema do
describe '.tables_to_schema' do
- subject { described_class.tables_to_schema }
-
it 'all tables have assigned a known gitlab_schema' do
- is_expected.to all(
- match([be_a(String), be_in([:gitlab_internal, :gitlab_shared, :gitlab_main, :gitlab_ci])])
+ expect(described_class.tables_to_schema).to all(
+ match([be_a(String), be_in(Gitlab::Database.schemas_to_base_models.keys.map(&:to_sym))])
)
end
# This being run across different databases indirectly also tests
# a general consistency of structure across databases
- Gitlab::Database.database_base_models.each do |db_config_name, db_class|
- let(:db_data_sources) { db_class.connection.data_sources }
-
+ Gitlab::Database.database_base_models.select { |k, _| k != 'geo' }.each do |db_config_name, db_class|
context "for #{db_config_name} using #{db_class}" do
+ let(:db_data_sources) { db_class.connection.data_sources }
+
+ # The Geo database does not share the same structure as all decomposed databases
+ subject { described_class.tables_to_schema.select { |_, v| v != :gitlab_geo } }
+
it 'new data sources are added' do
missing_tables = db_data_sources.to_set - subject.keys
expect(missing_tables).to be_empty, \
"Missing table(s) #{missing_tables.to_a} not found in #{described_class}.tables_to_schema. " \
- "Any new tables must be added to lib/gitlab/database/gitlab_schemas.yml."
+ "Any new tables must be added to #{described_class::GITLAB_SCHEMAS_FILE}."
end
it 'non-existing data sources are removed' do
@@ -30,7 +31,7 @@ RSpec.describe Gitlab::Database::GitlabSchema do
expect(extra_tables).to be_empty, \
"Extra table(s) #{extra_tables.to_a} found in #{described_class}.tables_to_schema. " \
- "Any removed or renamed tables must be removed from lib/gitlab/database/gitlab_schemas.yml."
+ "Any removed or renamed tables must be removed from #{described_class::GITLAB_SCHEMAS_FILE}."
end
end
end
diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
index ed11699e494..87a3e0f81e4 100644
--- a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
@@ -63,19 +63,22 @@ RSpec.describe Gitlab::Database::LooseForeignKeys do
Gitlab::Database.schemas_to_base_models.fetch(parent_table_schema)
end
- it 'all `to_table` tables are present' do
+ it 'all `to_table` tables are present', :aggregate_failures do
definitions.each do |definition|
base_models_for(definition.to_table).each do |model|
- expect(model.connection).to be_table_exist(definition.to_table)
+ expect(model.connection).to be_table_exist(definition.to_table),
+ "Table #{definition.from_table} does not exist"
end
end
end
- it 'all `from_table` tables are present' do
+ it 'all `from_table` tables are present', :aggregate_failures do
definitions.each do |definition|
base_models_for(definition.from_table).each do |model|
- expect(model.connection).to be_table_exist(definition.from_table)
- expect(model.connection).to be_column_exist(definition.from_table, definition.column)
+ expect(model.connection).to be_table_exist(definition.from_table),
+ "Table #{definition.from_table} does not exist"
+ expect(model.connection).to be_column_exist(definition.from_table, definition.column),
+ "Column #{definition.column} in #{definition.from_table} does not exist"
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index e09016b2b2b..3ccc3a17862 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -2477,6 +2477,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#backfill_iids' do
include MigrationsHelpers
+ let_it_be(:issue_base_type_enum) { 0 }
+ let_it_be(:issue_type) { table(:work_item_types).find_by(base_type: issue_base_type_enum) }
+
let(:issue_class) do
Class.new(ActiveRecord::Base) do
include AtomicInternalId
@@ -2490,6 +2493,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
scope: :project,
init: ->(s, _scope) { s&.project&.issues&.maximum(:iid) },
presence: false
+
+ before_validation -> { self.work_item_type_id = ::WorkItems::Type.default_issue_type.id }
end
end
@@ -2515,7 +2520,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'generates iids properly for models created after the migration when iids are backfilled' do
project = setup
- issue_a = issues.create!(project_id: project.id)
+ issue_a = issues.create!(project_id: project.id, work_item_type_id: issue_type.id)
model.backfill_iids('issues')
@@ -2528,14 +2533,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'generates iids properly for models created after the migration across multiple projects' do
project_a = setup
project_b = setup
- issues.create!(project_id: project_a.id)
- issues.create!(project_id: project_b.id)
- issues.create!(project_id: project_b.id)
+ issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id)
+ issues.create!(project_id: project_b.id, work_item_type_id: issue_type.id)
+ issues.create!(project_id: project_b.id, work_item_type_id: issue_type.id)
model.backfill_iids('issues')
- issue_a = issue_class.create!(project_id: project_a.id)
- issue_b = issue_class.create!(project_id: project_b.id)
+ issue_a = issue_class.create!(project_id: project_a.id, work_item_type_id: issue_type.id)
+ issue_b = issue_class.create!(project_id: project_b.id, work_item_type_id: issue_type.id)
expect(issue_a.iid).to eq(2)
expect(issue_b.iid).to eq(3)
@@ -2545,7 +2550,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'generates an iid' do
project_a = setup
project_b = setup
- issue_a = issues.create!(project_id: project_a.id)
+ issue_a = issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id)
model.backfill_iids('issues')
@@ -2559,8 +2564,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when a row already has an iid set in the database' do
it 'backfills iids' do
project = setup
- issue_a = issues.create!(project_id: project.id, iid: 1)
- issue_b = issues.create!(project_id: project.id, iid: 2)
+ issue_a = issues.create!(project_id: project.id, work_item_type_id: issue_type.id, iid: 1)
+ issue_b = issues.create!(project_id: project.id, work_item_type_id: issue_type.id, iid: 2)
model.backfill_iids('issues')
@@ -2571,9 +2576,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'backfills for multiple projects' do
project_a = setup
project_b = setup
- issue_a = issues.create!(project_id: project_a.id, iid: 1)
- issue_b = issues.create!(project_id: project_b.id, iid: 1)
- issue_c = issues.create!(project_id: project_a.id, iid: 2)
+ issue_a = issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id, iid: 1)
+ issue_b = issues.create!(project_id: project_b.id, work_item_type_id: issue_type.id, iid: 1)
+ issue_c = issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id, iid: 2)
model.backfill_iids('issues')
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index f3414727245..5bfb2516ba1 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -173,17 +173,6 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished
end
-
- context 'when within transaction' do
- before do
- allow(migration).to receive(:transaction_open?).and_return(true)
- end
-
- it 'does raise an exception' do
- expect { migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)}
- .to raise_error /`queue_batched_background_migration` cannot be run inside a transaction./
- end
- end
end
end
@@ -301,12 +290,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
describe '#delete_batched_background_migration' do
- let(:transaction_open) { false }
-
before do
expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
-
- allow(migration).to receive(:transaction_open?).and_return(transaction_open)
end
context 'when migration exists' do
@@ -360,15 +345,6 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
end
end
-
- context 'when within transaction' do
- let(:transaction_open) { true }
-
- it 'raises an exception' do
- expect { migration.delete_batched_background_migration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]]) }
- .to raise_error /`#delete_batched_background_migration` cannot be run inside a transaction./
- end
- end
end
describe '#gitlab_schema_from_context' do
diff --git a/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb b/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
index d197f39be40..c6327de98d1 100644
--- a/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
+++ b/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Database::Migrations::ReestablishedConnectionStack do
end
describe '#with_restored_connection_stack' do
- Gitlab::Database.database_base_models.each do |db_config_name, _|
+ Gitlab::Database.database_base_models_with_gitlab_shared.each do |db_config_name, _|
context db_config_name do
it_behaves_like "reconfigures connection stack", db_config_name do
it 'does restore connection hierarchy' do
diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
index 2f3d44f6f8f..f1f72d71e1a 100644
--- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
@@ -68,10 +68,10 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
end
context 'with multiple jobs to run' do
- it 'runs all jobs created within the last 48 hours' do
+ it 'runs all jobs created within the last 3 hours' do
old_migration = define_background_migration(migration_name)
- travel 3.days
+ travel 4.hours
new_migration = define_background_migration('NewMigration') { travel 1.second }
migration.queue_batched_background_migration('NewMigration', table_name, :id,
diff --git a/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
new file mode 100644
index 00000000000..c1ac8f0c9cd
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresAutovacuumActivity, type: :model do
+ include Database::DatabaseHelpers
+
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
+ describe '.for_tables' do
+ subject { described_class.for_tables(tables) }
+
+ let(:tables) { %w[foo test] }
+
+ before do
+ swapout_view_for_table(:postgres_autovacuum_activity)
+
+ # unrelated
+ create(:postgres_autovacuum_activity, table: 'bar')
+
+ tables.each do |table|
+ create(:postgres_autovacuum_activity, table: table)
+ end
+
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary).and_yield
+ end
+
+ it 'returns autovacuum activity for queries tables' do
+ expect(subject.map(&:table).sort).to eq(tables)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 0c576505e07..976b9896dfa 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::Reindexing do
include Database::DatabaseHelpers
describe '.invoke' do
- let(:databases) { Gitlab::Database.database_base_models }
+ let(:databases) { Gitlab::Database.database_base_models_with_gitlab_shared }
let(:databases_count) { databases.count }
it 'cleans up any leftover indexes' do
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
index e676e5fe034..68c29bad287 100644
--- a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -38,7 +38,11 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
end
end
- context 'with application settings and admin users', :do_not_mock_admin_mode_setting do
+ context(
+ 'with application settings and admin users',
+ :do_not_mock_admin_mode_setting,
+ :do_not_stub_snowplow_by_default
+ ) do
let(:group) { result[:group] }
let(:application_setting) { Gitlab::CurrentSettings.current_application_settings }
@@ -109,7 +113,7 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
admin2 = create(:user, :admin)
existing_group.add_owner(user)
- existing_group.add_users([admin1, admin2], Gitlab::Access::MAINTAINER)
+ existing_group.add_members([admin1, admin2], Gitlab::Access::MAINTAINER)
application_setting.instance_administrators_group_id = existing_group.id
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 064613074cd..452a662bdcb 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -337,7 +337,7 @@ RSpec.describe Gitlab::Database do
let(:model2) { Class.new(base_model) }
before do
- allow(described_class).to receive(:database_base_models)
+ allow(described_class).to receive(:database_base_models_using_load_balancing)
.and_return({ model1: model1, model2: model2 }.with_indifferent_access)
end
diff --git a/spec/lib/gitlab/dependency_linker/base_linker_spec.rb b/spec/lib/gitlab/dependency_linker/base_linker_spec.rb
index 678d4a90e8d..2811bc859da 100644
--- a/spec/lib/gitlab/dependency_linker/base_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/base_linker_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Gitlab::DependencyLinker::BaseLinker do
it 'only converts valid links' do
expect(subject).to eq(
<<~CONTENT
- <span><span>#{link('http://')}</span><span>#{link('\n', url: '%5Cn')}</span><span>#{link('javascript:alert(1)', url: nil)}</span></span>
+ <span><span>#{link('http://', url: nil)}</span><span>#{link('\n', url: nil)}</span><span>#{link('javascript:alert(1)', url: nil)}</span></span>
<span><span>#{link('https://gitlab.com/gitlab-org/gitlab')}</span></span>
CONTENT
)
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 34f4bdde3b5..28557aab830 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -129,6 +129,14 @@ RSpec.describe Gitlab::Diff::File do
expect(diff_file.rendered).to be_kind_of(Gitlab::Diff::Rendered::Notebook::DiffFile)
end
+ context 'when collapsed' do
+ it 'is nil' do
+ expect(diff).to receive(:collapsed?).and_return(true)
+
+ expect(diff_file.rendered).to be_nil
+ end
+ end
+
context 'when too large' do
it 'is nil' do
expect(diff).to receive(:too_large?).and_return(true)
diff --git a/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
index 579776d44aa..73c0d0dba88 100644
--- a/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe Gitlab::Diff::Formatters::ImageFormatter do
head_sha: 789,
old_path: 'old_image.png',
new_path: 'new_image.png',
- file_identifier_hash: '777',
position_type: 'image'
}
end
diff --git a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
index b6bdc5ff493..290585d0991 100644
--- a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe Gitlab::Diff::Formatters::TextFormatter do
head_sha: 789,
old_path: 'old_path.txt',
new_path: 'new_path.txt',
- file_identifier_hash: '777',
line_range: nil
}
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index e643b58ee32..5350dda5fb2 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
- let(:merge_request) { create(:merge_request_with_diffs) }
+ let_it_be(:merge_request) { create(:merge_request_with_diffs) }
+
let(:diff_hash) do
{ ".gitignore-false-false-false" =>
[{ line_code: nil, rich_text: nil, text: "@@ -17,3 +17,4 @@ rerun.txt", type: "match", index: 0, old_pos: 17, new_pos: 17 },
@@ -229,10 +230,10 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
end
describe 'metrics' do
- let(:transaction) { Gitlab::Metrics::WebTransaction.new({} ) }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new({}) }
before do
- allow(cache).to receive(:current_transaction).and_return(transaction)
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current).and_return(transaction)
end
it 'observes :gitlab_redis_diff_caching_memory_usage_bytes' do
@@ -241,6 +242,18 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
cache.write_if_empty
end
+
+ it 'records hit ratio metrics' do
+ expect(transaction)
+ .to receive(:increment).with(:gitlab_redis_diff_caching_requests_total).exactly(5).times
+ expect(transaction)
+ .to receive(:increment).with(:gitlab_redis_diff_caching_hits_total).exactly(4).times
+
+ 5.times do
+ cache = described_class.new(merge_request.diffs)
+ cache.write_if_empty
+ end
+ end
end
describe '#key' do
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index c9a20f40462..bb3522eb579 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -574,86 +574,6 @@ RSpec.describe Gitlab::Diff::Position do
end
end
- describe '#find_diff_file_from' do
- context "position for a diff file that has changed from symlink to regular file" do
- let(:commit) { project.commit("81e6355ce4e1544a3524b230952c12455de0777b") }
-
- let(:old_symlink_file_identifier_hash) { "bfa430463f33619872d52a6b85ced59c973e42dc" }
- let(:new_regular_file_identifier_hash) { "e25b60c2e5ffb977d2b1431b96c6f7800c3c3529" }
- let(:file_identifier_hash) { new_regular_file_identifier_hash }
-
- let(:args) do
- {
- file_identifier_hash: file_identifier_hash,
- old_path: "symlink",
- new_path: "symlink",
- old_line: nil,
- new_line: 1,
- diff_refs: commit.diff_refs
- }
- end
-
- let(:diffable) { commit.diff_refs.compare_in(project) }
-
- subject(:diff_file) { described_class.new(args).find_diff_file_from(diffable) }
-
- context 'when file_identifier_hash is disabled' do
- before do
- stub_feature_flags(file_identifier_hash: false)
- end
-
- it "returns the first diff file" do
- expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
- end
- end
-
- context 'when file_identifier_hash is enabled' do
- before do
- stub_feature_flags(file_identifier_hash: true)
- end
-
- context 'for new regular file' do
- it "returns the correct diff file" do
- expect(diff_file.file_identifier_hash).to eq(new_regular_file_identifier_hash)
- end
- end
-
- context 'for old symlink file' do
- let(:args) do
- {
- file_identifier_hash: old_symlink_file_identifier_hash,
- old_path: "symlink",
- new_path: "symlink",
- old_line: 1,
- new_line: nil,
- diff_refs: commit.diff_refs
- }
- end
-
- it "returns the correct diff file" do
- expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
- end
- end
-
- context 'when file_identifier_hash is missing' do
- let(:file_identifier_hash) { nil }
-
- it "returns the first diff file" do
- expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
- end
- end
-
- context 'when file_identifier_hash cannot be found' do
- let(:file_identifier_hash) { "missingidentifier" }
-
- it "returns nil" do
- expect(diff_file).to be_nil
- end
- end
- end
- end
- end
-
describe '#==' do
let(:commit) { project.commit("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") }
diff --git a/spec/lib/gitlab/diff/position_tracer/image_strategy_spec.rb b/spec/lib/gitlab/diff/position_tracer/image_strategy_spec.rb
index 1414056ad6a..563480d214b 100644
--- a/spec/lib/gitlab/diff/position_tracer/image_strategy_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer/image_strategy_spec.rb
@@ -234,118 +234,5 @@ RSpec.describe Gitlab::Diff::PositionTracer::ImageStrategy do
end
end
end
-
- describe 'symlink scenarios' do
- let(:new_file) { old_file_status == :new }
- let(:deleted_file) { old_file_status == :deleted }
- let(:renamed_file) { old_file_status == :renamed }
-
- let(:file_identifier) { "#{file_name}-#{new_file}-#{deleted_file}-#{renamed_file}" }
- let(:file_identifier_hash) { Digest::SHA1.hexdigest(file_identifier) }
- let(:old_position) { position(old_path: file_name, new_path: file_name, position_type: 'image', file_identifier_hash: file_identifier_hash) }
-
- let(:update_file_commit) do
- initial_commit
-
- update_file(
- branch_name,
- file_name,
- Base64.encode64('morecontent')
- )
- end
-
- let(:delete_file_commit) do
- initial_commit
-
- delete_file(branch_name, file_name)
- end
-
- let(:create_second_file_commit) do
- initial_commit
-
- create_file(
- branch_name,
- second_file_name,
- Base64.encode64('morecontent')
- )
- end
-
- before do
- stub_feature_flags(file_identifier_hash: true)
- end
-
- describe 'from symlink to image' do
- let(:initial_commit) { project.commit('a19c7f9a147e35e535c797cf148d29c24dac5544') }
- let(:symlink_to_image_commit) { project.commit('8cfca8420812e5bd7479aa32cf33e0c95a3ca576') }
- let(:branch_name) { 'diff-files-symlink-to-image' }
- let(:file_name) { 'symlink-to-image.png' }
-
- context "when the old position is on the new image file" do
- let(:old_file_status) { :new }
-
- context "when the image file's content was unchanged between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit, symlink_to_image_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit, create_second_file_commit) }
-
- it "returns the new position" do
- expect_new_position(
- old_path: file_name,
- new_path: file_name
- )
- end
- end
-
- context "when the image file's content was changed between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit, symlink_to_image_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit, update_file_commit) }
- let(:change_diff_refs) { diff_refs(symlink_to_image_commit, update_file_commit) }
-
- it "returns the position of the change" do
- expect_change_position(
- old_path: file_name,
- new_path: file_name
- )
- end
- end
-
- context "when the image file was removed between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit, symlink_to_image_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit, delete_file_commit) }
- let(:change_diff_refs) { diff_refs(symlink_to_image_commit, delete_file_commit) }
-
- it "returns the position of the change" do
- expect_change_position(
- old_path: file_name,
- new_path: file_name
- )
- end
- end
- end
- end
-
- describe 'from image to symlink' do
- let(:initial_commit) { project.commit('d10dcdfbbb2b59a959a5f5d66a4adf28f0ea4008') }
- let(:image_to_symlink_commit) { project.commit('3e94fdaa60da8aed38401b91bc56be70d54ca424') }
- let(:branch_name) { 'diff-files-image-to-symlink' }
- let(:file_name) { 'image-to-symlink.png' }
-
- context "when the old position is on the added image file" do
- let(:old_file_status) { :new }
-
- context "when the image file gets changed to a symlink between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit.parent, initial_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit.parent, image_to_symlink_commit) }
- let(:change_diff_refs) { diff_refs(initial_commit, image_to_symlink_commit) }
-
- it "returns the position of the change" do
- expect_change_position(
- old_path: file_name,
- new_path: file_name
- )
- end
- end
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
index ea56a87dec2..2b21084d8e5 100644
--- a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
@@ -1860,143 +1860,5 @@ RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy, :clean_gitlab_redis_c
end
end
end
-
- describe 'symlink scenarios' do
- let(:new_file) { old_file_status == :new }
- let(:deleted_file) { old_file_status == :deleted }
- let(:renamed_file) { old_file_status == :renamed }
-
- let(:file_identifier) { "#{file_name}-#{new_file}-#{deleted_file}-#{renamed_file}" }
- let(:file_identifier_hash) { Digest::SHA1.hexdigest(file_identifier) }
-
- let(:update_line_commit) do
- update_file(
- branch_name,
- file_name,
- <<-CONTENT.strip_heredoc
- A
- BB
- C
- CONTENT
- )
- end
-
- let(:delete_file_commit) do
- delete_file(branch_name, file_name)
- end
-
- let(:create_second_file_commit) do
- create_file(
- branch_name,
- second_file_name,
- <<-CONTENT.strip_heredoc
- D
- E
- CONTENT
- )
- end
-
- before do
- stub_feature_flags(file_identifier_hash: true)
- end
-
- describe 'from symlink to text' do
- let(:initial_commit) { project.commit('0e5b363105e9176a77bac94d7ff6d8c4fb35c3eb') }
- let(:symlink_to_text_commit) { project.commit('689815e617abc6889f1fded4834d2dd7d942a58e') }
- let(:branch_name) { 'diff-files-symlink-to-text' }
- let(:file_name) { 'symlink-to-text.txt' }
- let(:old_position) { position(old_path: file_name, new_path: file_name, new_line: 3, file_identifier_hash: file_identifier_hash) }
-
- before do
- create_branch('diff-files-symlink-to-text-test', branch_name)
- end
-
- context "when the old position is on the new text file" do
- let(:old_file_status) { :new }
-
- context "when the text file's content was unchanged between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit, symlink_to_text_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit, create_second_file_commit) }
-
- it "returns the new position" do
- expect_new_position(
- new_path: old_position.new_path,
- new_line: old_position.new_line
- )
- end
- end
-
- context "when the text file's content has change, but the line was unchanged between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit, symlink_to_text_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit, update_line_commit) }
-
- it "returns the new position" do
- expect_new_position(
- new_path: old_position.new_path,
- new_line: old_position.new_line
- )
- end
- end
-
- context "when the text file's line was changed between the old and the new diff" do
- let(:old_position) { position(old_path: file_name, new_path: file_name, new_line: 2, file_identifier_hash: file_identifier_hash) }
-
- let(:old_diff_refs) { diff_refs(initial_commit, symlink_to_text_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit, update_line_commit) }
- let(:change_diff_refs) { diff_refs(symlink_to_text_commit, update_line_commit) }
-
- it "returns the position of the change" do
- expect_change_position(
- old_path: file_name,
- new_path: file_name,
- old_line: 2,
- new_line: nil
- )
- end
- end
-
- context "when the text file was removed between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit, symlink_to_text_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit, delete_file_commit) }
- let(:change_diff_refs) { diff_refs(symlink_to_text_commit, delete_file_commit) }
-
- it "returns the position of the change" do
- expect_change_position(
- old_path: file_name,
- new_path: file_name,
- old_line: 3,
- new_line: nil
- )
- end
- end
- end
-
- describe 'from text to symlink' do
- let(:initial_commit) { project.commit('3db7bd90bab8ce8f02c9818590b84739a2e97230') }
- let(:text_to_symlink_commit) { project.commit('5e2c2708c2e403dece5dd25759369150aac51644') }
- let(:branch_name) { 'diff-files-text-to-symlink' }
- let(:file_name) { 'text-to-symlink.txt' }
-
- context "when the position is on the added text file" do
- let(:old_file_status) { :new }
-
- context "when the text file gets changed to a symlink between the old and the new diff" do
- let(:old_diff_refs) { diff_refs(initial_commit.parent, initial_commit) }
- let(:new_diff_refs) { diff_refs(initial_commit.parent, text_to_symlink_commit) }
- let(:change_diff_refs) { diff_refs(initial_commit, text_to_symlink_commit) }
-
- it "returns the position of the change" do
- expect_change_position(
- old_path: file_name,
- new_path: file_name,
- old_line: 3,
- new_line: nil
- )
- end
- end
- end
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
index cb046548880..42ab2d1d063 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFileHelper do
where(:case, :transformed_blocks, :result) do
'if transformed diff is empty' | [] | 0
'if the transformed line does not map to any in the original file' | [{ source_line: nil }] | 0
- 'if the transformed line maps to a line in the source file' | [{ source_line: 2 }] | 3
+ 'if the transformed line maps to a line in the source file' | [{ source_line: 3 }] | 3
end
with_them do
@@ -81,8 +81,8 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFileHelper do
let(:blocks) do
{
- from: [0, 2, 1, nil, nil, 3].map { |i| { source_line: i } },
- to: [0, 1, nil, 2, nil, 3].map { |i| { source_line: i } }
+ from: [1, 3, 2, nil, nil, 4].map { |i| { source_line: i } },
+ to: [1, 2, nil, 3, nil, 4].map { |i| { source_line: i } }
}
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
index c38684a6dc3..b5137f9db6b 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
@@ -144,7 +144,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
context 'has image' do
it 'replaces rich text with img to the embedded image' do
- expect(nb_file.highlighted_diff_lines[58].rich_text).to include('<img')
+ expect(nb_file.highlighted_diff_lines[56].rich_text).to include('<img')
end
it 'adds image to src' do
@@ -159,11 +159,11 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
let(:commit) { project.commit("4963fefc990451a8ad34289ce266b757456fc88c") }
it 'prevents injected html to be rendered as html' do
- expect(nb_file.highlighted_diff_lines[45].rich_text).not_to include('<div>Hello')
+ expect(nb_file.highlighted_diff_lines[43].rich_text).not_to include('<div>Hello')
end
it 'keeps the injected html as part of the string' do
- expect(nb_file.highlighted_diff_lines[45].rich_text).to end_with('/div&gt;">')
+ expect(nb_file.highlighted_diff_lines[43].rich_text).to end_with('/div&gt;">')
end
end
end
diff --git a/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
deleted file mode 100644
index f93c1aa1974..00000000000
--- a/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
+++ /dev/null
@@ -1,97 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Elasticsearch::Logs::Lines do
- let(:client) { Elasticsearch::Transport::Client }
-
- let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
- let(:es_message_2) { { timestamp: "2019-12-13T14:35:35.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } }
- let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } }
- let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", pod: "production-6866bc8974-m4sk4", message: "- -\u003e /" } }
-
- let(:es_response) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/logs_response.json')) }
-
- subject { described_class.new(client) }
-
- let(:namespace) { "autodevops-deploy-9-production" }
- let(:pod_name) { "production-6866bc8974-m4sk4" }
- let(:container_name) { "auto-deploy-app" }
- let(:search) { "foo +bar "}
- let(:start_time) { "2019-12-13T14:35:34.034Z" }
- let(:end_time) { "2019-12-13T14:35:34.034Z" }
- let(:cursor) { "9999934,1572449784442" }
-
- let(:body) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query.json')) }
- let(:body_with_container) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
- let(:body_with_search) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_search.json')) }
- let(:body_with_times) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
- let(:body_with_start_time) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
- let(:body_with_end_time) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
- let(:body_with_cursor) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
- let(:body_with_filebeat_6) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_filebeat_6.json')) }
-
- RSpec::Matchers.define :a_hash_equal_to_json do |expected|
- match do |actual|
- actual.as_json == expected
- end
- end
-
- describe '#pod_logs' do
- it 'returns the logs as an array' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
-
- it 'can further filter the logs by container name' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name, container_name: container_name)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
-
- it 'can further filter the logs by search' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name, search: search)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
-
- it 'can further filter the logs by start_time and end_time' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name, start_time: start_time, end_time: end_time)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
-
- it 'can further filter the logs by only start_time' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name, start_time: start_time)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
-
- it 'can further filter the logs by only end_time' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name, end_time: end_time)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
-
- it 'can search after a cursor' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_cursor)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name, cursor: cursor)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
-
- it 'can search on filebeat 6' do
- expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_filebeat_6)).and_return(es_response)
-
- result = subject.pod_logs(namespace, pod_name: pod_name, chart_above_v2: false)
- expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
- end
- end
-end
diff --git a/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
deleted file mode 100644
index 07fa0980d36..00000000000
--- a/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Elasticsearch::Logs::Pods do
- let(:client) { Elasticsearch::Transport::Client }
-
- let(:es_query) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) }
- let(:es_response) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/pods_response.json')) }
- let(:namespace) { "autodevops-deploy-9-production" }
-
- subject { described_class.new(client) }
-
- describe '#pods' do
- it 'returns the pods' do
- expect(client).to receive(:search).with(body: es_query).and_return(es_response)
-
- result = subject.pods(namespace)
- expect(result).to eq([
- {
- name: "runner-gitlab-runner-7bbfb5dcb5-p6smb",
- container_names: %w[runner-gitlab-runner]
- },
- {
- name: "elastic-stack-elasticsearch-master-1",
- container_names: %w[elasticsearch chown sysctl]
- },
- {
- name: "ingress-nginx-ingress-controller-76449bcc8d-8qgl6",
- container_names: %w[nginx-ingress-controller]
- }
- ])
- end
- end
-end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 6e7806c5d53..d0aba70081b 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -52,14 +52,6 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect(new_issue.issue_email_participants.first.email).to eq(author_email)
end
- it 'attaches existing CRM contact' do
- contact = create(:contact, group: group, email: author_email)
- receiver.execute
- new_issue = Issue.last
-
- expect(new_issue.issue_customer_relations_contacts.last.contact).to eq(contact)
- end
-
it 'sends thank you email' do
expect { receiver.execute }.to have_enqueued_job.on_queue('mailers')
end
@@ -77,6 +69,16 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when everything is fine' do
it_behaves_like 'a new issue request'
+ it 'attaches existing CRM contacts' do
+ contact = create(:contact, group: group, email: author_email)
+ contact2 = create(:contact, group: group, email: "cc@example.com")
+ contact3 = create(:contact, group: group, email: "kk@example.org")
+ receiver.execute
+ new_issue = Issue.last
+
+ expect(new_issue.issue_customer_relations_contacts.map(&:contact)).to contain_exactly(contact, contact2, contact3)
+ end
+
context 'with legacy incoming email address' do
let(:email_raw) { fixture_file('emails/service_desk_legacy.eml') }
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
index dfa18c27d5e..ab6b1cd6171 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
@@ -99,7 +99,6 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do
:verify | true
:trial | true
:team | true
- :experience | true
end
with_them do
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb
deleted file mode 100644
index 8cd2345822e..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::Experience do
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- subject(:message) { described_class.new(group: group, user: user, series: series)}
-
- describe 'public methods' do
- context 'with series 0' do
- let(:series) { 0 }
-
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_nil
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_present
- expect(message.cta_text).to be_nil
- end
-
- describe 'feedback URL' do
- before do
- allow(message).to receive(:onboarding_progress).and_return(1)
- allow(message).to receive(:show_invite_link).and_return(true)
- end
-
- subject do
- message.feedback_link(1)
- end
-
- it { is_expected.to start_with(Gitlab::Saas.com_url) }
-
- context 'when in development' do
- let(:root_url) { 'http://example.com' }
-
- before do
- allow(message).to receive(:root_url).and_return(root_url)
- stub_rails_env('development')
- end
-
- it { is_expected.to start_with(root_url) }
- end
- end
-
- describe 'feedback URL show_invite_link query param' do
- let(:user_access) { GroupMember::DEVELOPER }
- let(:preferred_language) { 'en' }
-
- before do
- allow(message).to receive(:onboarding_progress).and_return(1)
- allow(group).to receive(:max_member_access_for_user).and_return(user_access)
- allow(user).to receive(:preferred_language).and_return(preferred_language)
- end
-
- subject do
- uri = URI.parse(message.feedback_link(1))
- Rack::Utils.parse_query(uri.query).with_indifferent_access[:show_invite_link]
- end
-
- it { is_expected.to eq('true') }
-
- context 'with less than developer access' do
- let(:user_access) { GroupMember::GUEST }
-
- it { is_expected.to eq('false') }
- end
-
- context 'with preferred language other than English' do
- let(:preferred_language) { 'nl' }
-
- it { is_expected.to eq('false') }
- end
- end
-
- describe 'feedback URL show_incentive query param' do
- let(:show_invite_link) { true }
- let(:member_count) { 2 }
- let(:query) do
- uri = URI.parse(message.feedback_link(1))
- Rack::Utils.parse_query(uri.query).with_indifferent_access
- end
-
- before do
- allow(message).to receive(:onboarding_progress).and_return(1)
- allow(message).to receive(:show_invite_link).and_return(show_invite_link)
- allow(group).to receive(:member_count).and_return(member_count)
- end
-
- subject { query[:show_incentive] }
-
- it { is_expected.to eq('true') }
-
- context 'with only one member' do
- let(:member_count) { 1 }
-
- it "is not present" do
- expect(query).not_to have_key(:show_incentive)
- end
- end
-
- context 'show_invite_link is false' do
- let(:show_invite_link) { false }
-
- it "is not present" do
- expect(query).not_to have_key(:show_incentive)
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
index 40351bef8b9..1c59d9c8208 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing do
:verify | described_class::Verify
:trial | described_class::Trial
:team | described_class::Team
- :experience | described_class::Experience
end
with_them do
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 4c1fbb93c13..b0c67cdafe1 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -45,16 +45,26 @@ RSpec.describe Gitlab::EncodingHelper do
end
context 'with corrupted diff' do
+ let(:project) { create(:project, :empty_repo) }
+ let(:repository) { project.repository }
+ let(:content) { fixture_file('encoding/Japanese.md') }
let(:corrupted_diff) do
- with_empty_bare_repository do |repo|
- content = File.read(Rails.root.join(
- 'spec/fixtures/encoding/Japanese.md').to_s)
- commit_a = commit(repo, 'Japanese.md', content)
- commit_b = commit(repo, 'Japanese.md',
- content.sub('[TODO: Link]', '[現在作業中です: Link]'))
-
- repo.diff(commit_a, commit_b).each_line.map(&:content).join
- end
+ commit_a = repository.create_file(
+ project.creator,
+ 'Japanese.md',
+ content,
+ branch_name: 'HEAD',
+ message: 'Create Japanese.md'
+ )
+ commit_b = repository.update_file(
+ project.creator,
+ 'Japanese.md',
+ content.sub('[TODO: Link]', '[現在作業中です: Link]'),
+ branch_name: 'HEAD',
+ message: 'Update Japanese.md'
+ )
+
+ repository.diff(commit_a, commit_b).map(&:diff).join
end
let(:cleaned_diff) do
@@ -69,26 +79,6 @@ RSpec.describe Gitlab::EncodingHelper do
it 'does not corrupt data but remove invalid characters' do
expect(encoded_diff).to eq(cleaned_diff)
end
-
- def commit(repo, path, content)
- oid = repo.write(content, :blob)
- index = repo.index
-
- index.read_tree(repo.head.target.tree) unless repo.empty?
-
- index.add(path: path, oid: oid, mode: 0100644)
- user = { name: 'Test', email: 'test@example.com' }
-
- Rugged::Commit.create(
- repo,
- tree: index.write_tree(repo),
- author: user,
- committer: user,
- message: "Update #{path}",
- parents: repo.empty? ? [] : [repo.head.target].compact,
- update_ref: 'HEAD'
- )
- end
end
end
diff --git a/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
new file mode 100644
index 00000000000..81e2a410962
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
@@ -0,0 +1,436 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
+ include AfterNextHelpers
+
+ let(:project) { build_stubbed(:project) }
+ let(:api_exception) { ErrorTrackingOpenAPI::ApiError.new(code: 500, response_body: 'b' * 101) }
+
+ subject(:repository) { Gitlab::ErrorTracking::ErrorRepository.build(project) }
+
+ before do
+ # Disabled in spec_helper by default thus we need to enable it here.
+ stub_feature_flags(use_click_house_database_for_error_tracking: true)
+ end
+
+ shared_examples 'exception logging' do
+ it 'logs error' do
+ expect(Gitlab::AppLogger).to receive(:error).with({
+ 'open_api.http_code' => api_exception.code,
+ 'open_api.response_body' => api_exception.response_body.truncate(100)
+ })
+
+ subject
+ end
+ end
+
+ shared_examples 'no logging' do
+ it 'does not log anything' do
+ expect(Gitlab::AppLogger).not_to receive(:debug)
+ expect(Gitlab::AppLogger).not_to receive(:info)
+ expect(Gitlab::AppLogger).not_to receive(:error)
+ end
+ end
+
+ describe '#report_error' do
+ let(:params) do
+ {
+ name: 'anything',
+ description: 'anything',
+ actor: 'anything',
+ platform: 'anything',
+ environment: 'anything',
+ level: 'anything',
+ occurred_at: Time.zone.now,
+ payload: {}
+ }
+ end
+
+ subject { repository.report_error(**params) }
+
+ it 'is not implemented' do
+ expect { subject }.to raise_error(NotImplementedError, 'Use ingestion endpoint')
+ end
+ end
+
+ describe '#find_error' do
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id) }
+
+ subject { repository.find_error(error.fingerprint) }
+
+ before do
+ allow_next_instance_of(ErrorTrackingOpenAPI::ErrorsApi) do |open_api|
+ allow(open_api).to receive(:get_error).with(project.id, error.fingerprint)
+ .and_return(error)
+
+ allow(open_api).to receive(:list_events)
+ .with(project.id, error.fingerprint, { sort: 'occurred_at_asc', limit: 1 })
+ .and_return(list_events_asc)
+
+ allow(open_api).to receive(:list_events)
+ .with(project.id, error.fingerprint, { sort: 'occurred_at_desc', limit: 1 })
+ .and_return(list_events_desc)
+ end
+ end
+
+ context 'when request succeeds' do
+ context 'without events returned' do
+ let(:list_events_asc) { [] }
+ let(:list_events_desc) { [] }
+
+ include_examples 'no logging'
+
+ it 'returns detailed error' do
+ is_expected.to have_attributes(
+ id: error.fingerprint.to_s,
+ title: error.name,
+ message: error.description,
+ culprit: error.actor,
+ first_seen: error.first_seen_at.to_s,
+ last_seen: error.last_seen_at.to_s,
+ count: error.event_count,
+ user_count: error.approximated_user_count,
+ project_id: error.project_id,
+ status: error.status,
+ tags: { level: nil, logger: nil },
+ external_url: "http://localhost/#{project.full_path}/-/error_tracking/#{error.fingerprint}/details",
+ external_base_url: "http://localhost/#{project.full_path}",
+ integrated: true
+ )
+ end
+
+ it 'returns no first and last release version' do
+ is_expected.to have_attributes(
+ first_release_version: nil,
+ last_release_version: nil
+ )
+ end
+ end
+
+ context 'with events returned' do
+ let(:first_event) { build(:error_tracking_open_api_error_event, project_id: project.id) }
+ let(:first_release) { parse_json(first_event.payload).fetch('release') }
+ let(:last_event) { build(:error_tracking_open_api_error_event, :golang, project_id: project.id) }
+ let(:last_release) { parse_json(last_event.payload).fetch('release') }
+
+ let(:list_events_asc) { [first_event] }
+ let(:list_events_desc) { [last_event] }
+
+ include_examples 'no logging'
+
+ it 'returns first and last release version' do
+ expect(first_release).to be_present
+ expect(last_release).to be_present
+
+ is_expected.to have_attributes(
+ first_release_version: first_release,
+ last_release_version: last_release
+ )
+ end
+
+ def parse_json(content)
+ Gitlab::Json.parse(content)
+ end
+ end
+ end
+
+ context 'when request fails' do
+ before do
+ allow_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:get_error)
+ .with(project.id, error.fingerprint)
+ .and_raise(api_exception)
+ end
+
+ include_examples 'exception logging'
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#list_errors' do
+ let(:errors) { [] }
+ let(:response_with_info) { [errors, 200, headers] }
+ let(:result_errors) { result.first }
+ let(:result_pagination) { result.last }
+
+ let(:headers) do
+ {
+ 'link' => [
+ '<url?cursor=next_cursor&param>; rel="next"',
+ '<url?cursor=prev_cursor&param>; rel="prev"'
+ ].join(', ')
+ }
+ end
+
+ subject(:result) { repository.list_errors(**params) }
+
+ before do
+ allow_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:list_errors_with_http_info)
+ .with(project.id, kind_of(Hash))
+ .and_return(response_with_info)
+ end
+
+ context 'with errors' do
+ let(:limit) { 3 }
+ let(:params) { { limit: limit } }
+ let(:errors_size) { limit }
+ let(:errors) { build_list(:error_tracking_open_api_error, errors_size, project_id: project.id) }
+
+ include_examples 'no logging'
+
+ it 'maps errors to models' do
+ # All errors are identical
+ error = errors.first
+
+ expect(result_errors).to all(
+ have_attributes(
+ id: error.fingerprint.to_s,
+ title: error.name,
+ message: error.description,
+ culprit: error.actor,
+ first_seen: error.first_seen_at,
+ last_seen: error.last_seen_at,
+ status: error.status,
+ count: error.event_count,
+ user_count: error.approximated_user_count
+ ))
+ end
+
+ context 'when n errors are returned' do
+ let(:errors_size) { limit }
+
+ include_examples 'no logging'
+
+ it 'returns the amount of errors' do
+ expect(result_errors.size).to eq(3)
+ end
+
+ it 'cursor links are preserved' do
+ expect(result_pagination).to have_attributes(
+ prev: 'prev_cursor',
+ next: 'next_cursor'
+ )
+ end
+ end
+
+ context 'when less errors than requested are returned' do
+ let(:errors_size) { limit - 1 }
+
+ include_examples 'no logging'
+
+ it 'returns the amount of errors' do
+ expect(result_errors.size).to eq(2)
+ end
+
+ it 'cursor link for next is removed' do
+ expect(result_pagination).to have_attributes(
+ prev: 'prev_cursor',
+ next: nil
+ )
+ end
+ end
+ end
+
+ context 'with params' do
+ let(:params) do
+ {
+ filters: { status: 'resolved', something: 'different' },
+ query: 'search term',
+ sort: 'first_seen',
+ limit: 2,
+ cursor: 'abc'
+ }
+ end
+
+ include_examples 'no logging'
+
+ it 'passes provided params to client' do
+ passed_params = {
+ sort: 'first_seen_desc',
+ status: 'resolved',
+ query: 'search term',
+ cursor: 'abc',
+ limit: 2
+ }
+
+ expect_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:list_errors_with_http_info)
+ .with(project.id, passed_params)
+ .and_return(response_with_info)
+
+ subject
+ end
+ end
+
+ context 'without explicit params' do
+ let(:params) { {} }
+
+ include_examples 'no logging'
+
+ it 'passes default params to client' do
+ passed_params = {
+ sort: 'last_seen_desc',
+ limit: 20,
+ cursor: {}
+ }
+
+ expect_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:list_errors_with_http_info)
+ .with(project.id, passed_params)
+ .and_return(response_with_info)
+
+ subject
+ end
+ end
+
+ context 'when request fails' do
+ let(:params) { {} }
+
+ before do
+ allow_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:list_errors_with_http_info)
+ .with(project.id, kind_of(Hash))
+ .and_raise(api_exception)
+ end
+
+ include_examples 'exception logging'
+
+ specify do
+ expect(result_errors).to eq([])
+ expect(result_pagination).to have_attributes(
+ next: nil,
+ prev: nil
+ )
+ end
+ end
+ end
+
+ describe '#last_event_for' do
+ let(:params) { { sort: 'occurred_at_desc', limit: 1 } }
+ let(:event) { build(:error_tracking_open_api_error_event, project_id: project.id) }
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id, fingerprint: event.fingerprint) }
+
+ subject { repository.last_event_for(error.fingerprint) }
+
+ context 'when both event and error is returned' do
+ before do
+ allow_next_instance_of(ErrorTrackingOpenAPI::ErrorsApi) do |open_api|
+ allow(open_api).to receive(:list_events).with(project.id, error.fingerprint, params)
+ .and_return([event])
+
+ allow(open_api).to receive(:get_error).with(project.id, error.fingerprint)
+ .and_return(error)
+ end
+ end
+
+ include_examples 'no logging'
+
+ it 'returns mapped error event' do
+ is_expected.to have_attributes(
+ issue_id: event.fingerprint.to_s,
+ date_received: error.last_seen_at,
+ stack_trace_entries: kind_of(Array)
+ )
+ end
+ end
+
+ context 'when event is not returned' do
+ before do
+ allow_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:list_events)
+ .with(project.id, event.fingerprint, params)
+ .and_return([])
+ end
+
+ include_examples 'no logging'
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when list_events request fails' do
+ before do
+ allow_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:list_events)
+ .with(project.id, event.fingerprint, params)
+ .and_raise(api_exception)
+ end
+
+ include_examples 'exception logging'
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when error is not returned' do
+ before do
+ allow_next_instance_of(ErrorTrackingOpenAPI::ErrorsApi) do |open_api|
+ allow(open_api).to receive(:list_events).with(project.id, error.fingerprint, params)
+ .and_return([event])
+
+ allow(open_api).to receive(:get_error).with(project.id, error.fingerprint)
+ .and_return(nil)
+ end
+ end
+
+ include_examples 'no logging'
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when get_error request fails' do
+ before do
+ allow_next_instance_of(ErrorTrackingOpenAPI::ErrorsApi) do |open_api|
+ allow(open_api).to receive(:list_events).with(project.id, error.fingerprint, params)
+ .and_return([event])
+
+ allow(open_api).to receive(:get_error).with(project.id, error.fingerprint)
+ .and_raise(api_exception)
+ end
+ end
+
+ include_examples 'exception logging'
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#update_error' do
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id) }
+ let(:update_params) { { status: 'resolved' } }
+ let(:passed_body) { ErrorTrackingOpenAPI::ErrorUpdatePayload.new(update_params) }
+
+ subject { repository.update_error(error.fingerprint, **update_params) }
+
+ before do
+ allow_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:update_error)
+ .with(project.id, error.fingerprint, passed_body)
+ .and_return(:anything)
+ end
+
+ context 'when update succeeds' do
+ include_examples 'no logging'
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when update fails' do
+ before do
+ allow_next(ErrorTrackingOpenAPI::ErrorsApi).to receive(:update_error)
+ .with(project.id, error.fingerprint, passed_body)
+ .and_raise(api_exception)
+ end
+
+ include_examples 'exception logging'
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#dsn_url' do
+ let(:public_key) { 'abc' }
+ let(:config) { ErrorTrackingOpenAPI::Configuration.default }
+
+ subject { repository.dsn_url(public_key) }
+
+ it do
+ is_expected
+ .to eq("#{config.scheme}://#{public_key}@#{config.host}/errortracking/api/v1/projects/api/#{project.id}")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking/processor/sanitizer_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sanitizer_processor_spec.rb
new file mode 100644
index 00000000000..9673bfc5cd3
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/processor/sanitizer_processor_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ErrorTracking::Processor::SanitizerProcessor, :sentry do
+ describe '.call' do
+ let(:event) { Sentry.get_current_client.event_from_exception(exception) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
+ before do
+ data.each do |key, value|
+ event.send("#{key}=", value)
+ end
+ end
+
+ after do
+ Sentry.get_current_scope.clear
+ end
+
+ context 'when event attributes contains sensitive information' do
+ let(:exception) { RuntimeError.new }
+ let(:data) do
+ {
+ contexts: {
+ jwt: 'abcdef',
+ controller: 'GraphController#execute'
+ },
+ tags: {
+ variables: %w[some sensitive information'],
+ deep_hash: {
+ sharedSecret: 'secret123'
+ }
+ },
+ user: {
+ email: 'a@a.com',
+ password: 'nobodyknows'
+ },
+ extra: {
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/-/issues/1',
+ my_token: '[FILTERED]',
+ another_token: '[FILTERED]'
+ }
+ }
+ end
+
+ it 'filters sensitive attributes' do
+ expect_next_instance_of(ActiveSupport::ParameterFilter) do |instance|
+ expect(instance).to receive(:filter).exactly(4).times.and_call_original
+ end
+
+ expect(result_hash).to include(
+ contexts: {
+ jwt: '[FILTERED]',
+ controller: 'GraphController#execute'
+ },
+ tags: {
+ variables: '[FILTERED]',
+ deep_hash: {
+ sharedSecret: '[FILTERED]'
+ }
+ },
+ user: {
+ email: 'a@a.com',
+ password: '[FILTERED]'
+ },
+ extra: {
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/-/issues/1',
+ my_token: '[FILTERED]',
+ another_token: '[FILTERED]'
+ }
+ )
+ end
+ end
+
+ context 'when request contains sensitive information' do
+ let(:exception) { RuntimeError.new }
+ let(:data) { {} }
+
+ before do
+ event.rack_env = {
+ 'HTTP_AUTHORIZATION' => 'Bearer 123456',
+ 'HTTP_PRIVATE_TOKEN' => 'abcdef',
+ 'HTTP_JOB_TOKEN' => 'secret123',
+ 'HTTP_GITLAB_WORKHORSE_PROXY_START' => 123456,
+ 'HTTP_COOKIE' => 'yummy_cookie=choco; tasty_cookie=strawberry',
+ 'QUERY_STRING' => 'token=secret&access_token=secret&job_token=secret&private_token=secret',
+ 'Content-Type' => 'application/json',
+ 'rack.input' => StringIO.new('{"name":"new_project", "some_token":"value"}')
+ }
+ end
+
+ it 'filters sensitive headers' do
+ expect(result_hash[:request][:headers]).to include(
+ 'Authorization' => '[FILTERED]',
+ 'Private-Token' => '[FILTERED]',
+ 'Job-Token' => '[FILTERED]',
+ 'Gitlab-Workhorse-Proxy-Start' => '123456'
+ )
+ end
+
+ it 'filters query string parameters' do
+ expect(result_hash[:request][:query_string]).not_to include('secret')
+ end
+
+ it 'removes cookies' do
+ expect(result_hash[:request][:cookies]).to be_empty
+ end
+
+ it 'removes data' do
+ expect(result_hash[:request][:data]).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 1ade3a51c55..fd859ae40fb 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -424,5 +424,25 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
end
+
+ context 'when request contains sensitive information' do
+ before do
+ Sentry.get_current_scope.set_rack_env({
+ 'HTTP_AUTHORIZATION' => 'Bearer 123456',
+ 'HTTP_PRIVATE_TOKEN' => 'abcdef',
+ 'HTTP_JOB_TOKEN' => 'secret123'
+ })
+ end
+
+ it 'filters sensitive data' do
+ track_exception
+
+ expect(sentry_event.to_hash[:request][:headers]).to include(
+ 'Authorization' => '[FILTERED]',
+ 'Private-Token' => '[FILTERED]',
+ 'Job-Token' => '[FILTERED]'
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/git/attributes_at_ref_parser_spec.rb b/spec/lib/gitlab/git/attributes_at_ref_parser_spec.rb
index 96cd70b4ff1..a5115989e6b 100644
--- a/spec/lib/gitlab/git/attributes_at_ref_parser_spec.rb
+++ b/spec/lib/gitlab/git/attributes_at_ref_parser_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Git::AttributesAtRefParser, :seed_helper do
+RSpec.describe Gitlab::Git::AttributesAtRefParser do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
diff --git a/spec/lib/gitlab/git/attributes_parser_spec.rb b/spec/lib/gitlab/git/attributes_parser_spec.rb
index 4bc39921e85..295d62fa052 100644
--- a/spec/lib/gitlab/git/attributes_parser_spec.rb
+++ b/spec/lib/gitlab/git/attributes_parser_spec.rb
@@ -2,9 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Git::AttributesParser, :seed_helper do
- let(:attributes_path) { File.join(SEED_STORAGE_PATH, 'with-git-attributes.git', 'info', 'attributes') }
- let(:data) { File.read(attributes_path) }
+RSpec.describe Gitlab::Git::AttributesParser do
+ let(:data) { fixture_file('gitlab/git/gitattributes') }
subject { described_class.new(data) }
@@ -141,11 +140,12 @@ RSpec.describe Gitlab::Git::AttributesParser, :seed_helper do
expect { |b| subject.each_line(&b) }.to yield_successive_args(*args)
end
- it 'does not yield when the attributes file has an unsupported encoding' do
- path = File.join(SEED_STORAGE_PATH, 'with-invalid-git-attributes.git', 'info', 'attributes')
- attrs = described_class.new(File.read(path))
+ context 'unsupported encoding' do
+ let(:data) { fixture_file('gitlab/git/gitattributes_invalid') }
- expect { |b| attrs.each_line(&b) }.not_to yield_control
+ it 'does not yield' do
+ expect { |b| subject.each_line(&b) }.not_to yield_control
+ end
end
end
end
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index 7dd7460b142..e514e128785 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -2,10 +2,10 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Blame, :seed_helper do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
-
- let(:sha) { SeedRepo::Commit::ID }
+RSpec.describe Gitlab::Git::Blame do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw }
+ let(:sha) { TestEnv::BRANCH_SHA['master'] }
let(:path) { 'CONTRIBUTING.md' }
let(:range) { nil }
@@ -37,19 +37,17 @@ RSpec.describe Gitlab::Git::Blame, :seed_helper do
end
context "ISO-8859 encoding" do
- let(:sha) { SeedRepo::EncodingCommit::ID }
let(:path) { 'encoding/iso8859.txt' }
it 'converts to UTF-8' do
expect(result.size).to eq(1)
expect(result.first[:commit]).to be_kind_of(Gitlab::Git::Commit)
- expect(result.first[:line]).to eq("Ä ü")
+ expect(result.first[:line]).to eq("Äü")
expect(result.first[:line]).to be_utf8
end
end
context "unknown encoding" do
- let(:sha) { SeedRepo::EncodingCommit::ID }
let(:path) { 'encoding/iso8859.txt' }
it 'converts to UTF-8' do
@@ -59,14 +57,12 @@ RSpec.describe Gitlab::Git::Blame, :seed_helper do
expect(result.size).to eq(1)
expect(result.first[:commit]).to be_kind_of(Gitlab::Git::Commit)
- expect(result.first[:line]).to eq(" ")
+ expect(result.first[:line]).to eq("")
expect(result.first[:line]).to be_utf8
end
end
context "renamed file" do
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository.raw_repository }
let(:commit) { project.commit('blame-on-renamed') }
let(:sha) { commit.id }
let(:path) { 'files/plain_text/renamed' }
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index 3cc52863976..97cd4777b4d 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -4,9 +4,6 @@ require "spec_helper"
RSpec.describe Gitlab::Git::Branch, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:rugged) do
- Rugged::Repository.new(File.join(TestEnv.repos_path, repository.relative_path))
- end
subject { repository.branches }
@@ -81,20 +78,6 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
end
let(:user) { create(:user) }
- let(:committer) { { email: user.email, name: user.name } }
- let(:params) do
- parents = [rugged.head.target]
- tree = parents.first.tree
-
- {
- message: +'commit message',
- author: committer,
- committer: committer,
- tree: tree,
- parents: parents
- }
- end
-
let(:stale_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago - 5.days) { create_commit } }
let(:active_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago + 5.days) { create_commit } }
let(:future_sha) { travel_to(100.days.since) { create_commit } }
@@ -137,7 +120,11 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
it { expect(repository.branches.size).to eq(SeedRepo::Repo::BRANCHES.size) }
def create_commit
- params[:message].delete!(+"\r")
- Rugged::Commit.create(rugged, params.merge(committer: committer.merge(time: Time.now)))
+ repository.multi_action(
+ user,
+ branch_name: 'HEAD',
+ message: 'commit message',
+ actions: []
+ ).newrev
end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index de342444c15..da77d8ee5d6 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -3,68 +3,8 @@
require "spec_helper"
RSpec.describe Gitlab::Git::Commit, :seed_helper do
- include GitHelpers
-
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:rugged_repo) do
- Rugged::Repository.new(File.join(TestEnv.repos_path, TEST_REPO_PATH))
- end
-
let(:commit) { described_class.find(repository, SeedRepo::Commit::ID) }
- let(:rugged_commit) { rugged_repo.lookup(SeedRepo::Commit::ID) }
-
- describe "Commit info" do
- before do
- @committer = {
- email: 'mike@smith.com',
- name: "Mike Smith",
- time: Time.new(2000, 1, 1, 0, 0, 0, "+08:00")
- }
-
- @author = {
- email: 'john@smith.com',
- name: "John Smith",
- time: Time.new(2000, 1, 1, 0, 0, 0, "-08:00")
- }
-
- @parents = [rugged_repo.head.target]
- @gitlab_parents = @parents.map { |c| described_class.find(repository, c.oid) }
- @tree = @parents.first.tree
-
- sha = Rugged::Commit.create(
- rugged_repo,
- author: @author,
- committer: @committer,
- tree: @tree,
- parents: @parents,
- message: "Refactoring specs",
- update_ref: "HEAD"
- )
-
- @raw_commit = rugged_repo.lookup(sha)
- @commit = described_class.find(repository, sha)
- end
-
- it { expect(@commit.short_id).to eq(@raw_commit.oid[0..10]) }
- it { expect(@commit.id).to eq(@raw_commit.oid) }
- it { expect(@commit.sha).to eq(@raw_commit.oid) }
- it { expect(@commit.safe_message).to eq(@raw_commit.message) }
- it { expect(@commit.created_at).to eq(@raw_commit.committer[:time]) }
- it { expect(@commit.date).to eq(@raw_commit.committer[:time]) }
- it { expect(@commit.author_email).to eq(@author[:email]) }
- it { expect(@commit.author_name).to eq(@author[:name]) }
- it { expect(@commit.committer_name).to eq(@committer[:name]) }
- it { expect(@commit.committer_email).to eq(@committer[:email]) }
- it { expect(@commit.different_committer?).to be_truthy }
- it { expect(@commit.parents).to eq(@gitlab_parents) }
- it { expect(@commit.parent_id).to eq(@parents.first.oid) }
- it { expect(@commit.no_commit_message).to eq("No commit message") }
-
- after do
- # Erase the new commit so other tests get the original repo
- rugged_repo.references.update("refs/heads/master", SeedRepo::LastCommit::ID)
- end
- end
describe "Commit info from gitaly commit" do
let(:subject) { (+"My commit").force_encoding('ASCII-8BIT') }
@@ -132,7 +72,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
shared_examples '.find' do
it "returns first head commit if without params" do
expect(described_class.last(repository).id).to eq(
- rugged_repo.head.target.oid
+ repository.commit.sha
)
end
@@ -622,19 +562,6 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
end
end
- skip 'move this test to gitaly-ruby' do
- RSpec.describe '#init_from_rugged' do
- let(:gitlab_commit) { described_class.new(repository, rugged_commit) }
-
- subject { gitlab_commit }
-
- describe '#id' do
- subject { super().id }
- it { is_expected.to eq(SeedRepo::Commit::ID) }
- end
- end
- end
-
describe '#init_from_hash' do
let(:commit) { described_class.new(repository, sample_commit_hash) }
diff --git a/spec/lib/gitlab/git/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb
index 02b00f711b4..7d81af92412 100644
--- a/spec/lib/gitlab/git/conflict/parser_spec.rb
+++ b/spec/lib/gitlab/git/conflict/parser_spec.rb
@@ -86,43 +86,68 @@ RSpec.describe Gitlab::Git::Conflict::Parser do
CONFLICT
end
- let(:lines) do
- described_class.parse(text, our_path: 'files/ruby/regex.rb', their_path: 'files/ruby/regex.rb')
- end
+ shared_examples_for 'successful parsing' do
+ let(:lines) do
+ described_class.parse(content, our_path: 'files/ruby/regex.rb', their_path: 'files/ruby/regex.rb')
+ end
- let(:old_line_numbers) do
- lines.select { |line| line[:type] != 'new' }.map { |line| line[:line_old] }
- end
+ let(:old_line_numbers) do
+ lines.select { |line| line[:type] != 'new' }.map { |line| line[:line_old] }
+ end
- let(:new_line_numbers) do
- lines.select { |line| line[:type] != 'old' }.map { |line| line[:line_new] }
- end
+ let(:new_line_numbers) do
+ lines.select { |line| line[:type] != 'old' }.map { |line| line[:line_new] }
+ end
+
+ let(:line_indexes) { lines.map { |line| line[:line_obj_index] } }
- let(:line_indexes) { lines.map { |line| line[:line_obj_index] } }
+ it 'sets our lines as new lines' do
+ expect(lines[8..13]).to all(include(type: 'new'))
+ expect(lines[26..27]).to all(include(type: 'new'))
+ expect(lines[56..57]).to all(include(type: 'new'))
+ end
- it 'sets our lines as new lines' do
- expect(lines[8..13]).to all(include(type: 'new'))
- expect(lines[26..27]).to all(include(type: 'new'))
- expect(lines[56..57]).to all(include(type: 'new'))
+ it 'sets their lines as old lines' do
+ expect(lines[14..19]).to all(include(type: 'old'))
+ expect(lines[28..29]).to all(include(type: 'old'))
+ expect(lines[58..59]).to all(include(type: 'old'))
+ end
+
+ it 'sets non-conflicted lines as both' do
+ expect(lines[0..7]).to all(include(type: nil))
+ expect(lines[20..25]).to all(include(type: nil))
+ expect(lines[30..55]).to all(include(type: nil))
+ expect(lines[60..62]).to all(include(type: nil))
+ end
+
+ it 'sets consecutive line numbers for line_obj_index, line_old, and line_new' do
+ expect(line_indexes).to eq(0.upto(62).to_a)
+ expect(old_line_numbers).to eq(1.upto(53).to_a)
+ expect(new_line_numbers).to eq(1.upto(53).to_a)
+ end
end
- it 'sets their lines as old lines' do
- expect(lines[14..19]).to all(include(type: 'old'))
- expect(lines[28..29]).to all(include(type: 'old'))
- expect(lines[58..59]).to all(include(type: 'old'))
+ context 'content has LF endings' do
+ let(:content) { text }
+
+ it_behaves_like 'successful parsing'
end
- it 'sets non-conflicted lines as both' do
- expect(lines[0..7]).to all(include(type: nil))
- expect(lines[20..25]).to all(include(type: nil))
- expect(lines[30..55]).to all(include(type: nil))
- expect(lines[60..62]).to all(include(type: nil))
+ context 'content has CRLF endings' do
+ let(:content) { text.gsub("\n", "\r\n") }
+
+ it_behaves_like 'successful parsing'
end
- it 'sets consecutive line numbers for line_obj_index, line_old, and line_new' do
- expect(line_indexes).to eq(0.upto(62).to_a)
- expect(old_line_numbers).to eq(1.upto(53).to_a)
- expect(new_line_numbers).to eq(1.upto(53).to_a)
+ context 'content has mixed LF and CRLF endings' do
+ # Simulate mixed line endings by only changing some of the lines to CRLF
+ let(:content) do
+ text.each_line.map.with_index do |line, index|
+ index.odd? ? line.gsub("\n", "\r\n") : line
+ end.join
+ end
+
+ it_behaves_like 'successful parsing'
end
end
diff --git a/spec/lib/gitlab/git/object_pool_spec.rb b/spec/lib/gitlab/git/object_pool_spec.rb
index 91960ebbede..3b1eb0319f8 100644
--- a/spec/lib/gitlab/git/object_pool_spec.rb
+++ b/spec/lib/gitlab/git/object_pool_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::Git::ObjectPool do
- include RepoHelpers
-
let(:pool_repository) { create(:pool_repository) }
let(:source_repository) { pool_repository.source_project.repository }
@@ -80,8 +78,6 @@ RSpec.describe Gitlab::Git::ObjectPool do
end
describe '#fetch' do
- let(:source_repository_path) { File.join(TestEnv.repos_path, source_repository.relative_path) }
- let(:source_repository_rugged) { Rugged::Repository.new(source_repository_path) }
let(:commit_count) { source_repository.commit_count }
context "when the object's pool repository exists" do
@@ -106,7 +102,13 @@ RSpec.describe Gitlab::Git::ObjectPool do
end
it 'fetches objects from the source repository' do
- new_commit_id = new_commit_edit_old_file(source_repository_rugged).oid
+ new_commit_id = source_repository.create_file(
+ pool_repository.source_project.owner,
+ 'a.file',
+ 'This is a file',
+ branch_name: source_repository.root_ref,
+ message: 'Add a file'
+ )
expect(subject.repository.exists?).to be false
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 47688c4b3e6..e20d5b928c4 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -352,12 +352,30 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
repository.create_branch('left-branch')
repository.create_branch('right-branch')
- left.times do
- new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', 'left-branch', 'some more content for a', 'some stuff')
+ left.times do |i|
+ repository.multi_action(
+ user,
+ branch_name: 'left-branch',
+ message: 'some more content for a',
+ actions: [{
+ action: i == 0 ? :create : :update,
+ file_path: 'encoding/CHANGELOG',
+ content: 'some stuff'
+ }]
+ )
end
- right.times do
- new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', 'right-branch', 'some more content for b', 'some stuff')
+ right.times do |i|
+ repository.multi_action(
+ user,
+ branch_name: 'right-branch',
+ message: 'some more content for b',
+ actions: [{
+ action: i == 0 ? :create : :update,
+ file_path: 'encoding/CHANGELOG',
+ content: 'some stuff'
+ }]
+ )
end
end
@@ -367,8 +385,8 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
it 'returns the correct count bounding at max_count' do
- branch_a_sha = repository_rugged.branches['left-branch'].target.oid
- branch_b_sha = repository_rugged.branches['right-branch'].target.oid
+ branch_a_sha = repository.find_branch('left-branch').dereferenced_target.sha
+ branch_b_sha = repository.find_branch('right-branch').dereferenced_target.sha
count = repository.diverging_commit_count(branch_a_sha, branch_b_sha, max_count: 1000)
@@ -392,12 +410,30 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
repository.create_branch('left-branch')
repository.create_branch('right-branch')
- left.times do
- new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', 'left-branch', 'some more content for a', 'some stuff')
+ left.times do |i|
+ repository.multi_action(
+ user,
+ branch_name: 'left-branch',
+ message: 'some more content for a',
+ actions: [{
+ action: i == 0 ? :create : :update,
+ file_path: 'encoding/CHANGELOG',
+ content: 'some stuff'
+ }]
+ )
end
- right.times do
- new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', 'right-branch', 'some more content for b', 'some stuff')
+ right.times do |i|
+ repository.multi_action(
+ user,
+ branch_name: 'right-branch',
+ message: 'some more content for b',
+ actions: [{
+ action: i == 0 ? :create : :update,
+ file_path: 'encoding/CHANGELOG',
+ content: 'some stuff'
+ }]
+ )
end
end
@@ -407,8 +443,8 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
it 'returns the correct count bounding at max_count' do
- branch_a_sha = repository_rugged.branches['left-branch'].target.oid
- branch_b_sha = repository_rugged.branches['right-branch'].target.oid
+ branch_a_sha = repository.find_branch('left-branch').dereferenced_target.sha
+ branch_b_sha = repository.find_branch('right-branch').dereferenced_target.sha
results = repository.diverging_commit_count(branch_a_sha, branch_b_sha, max_count: max_count)
@@ -469,16 +505,14 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it 'deletes the ref' do
repository.delete_refs('refs/heads/feature')
- expect(repository_rugged.references['refs/heads/feature']).to be_nil
+ expect(repository.find_branch('feature')).to be_nil
end
it 'deletes all refs' do
refs = %w[refs/heads/wip refs/tags/v1.1.0]
repository.delete_refs(*refs)
- refs.each do |ref|
- expect(repository_rugged.references[ref]).to be_nil
- end
+ expect(repository.list_refs(refs)).to be_empty
end
it 'does not fail when deleting an empty list of refs' do
@@ -491,7 +525,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
describe '#branch_names_contains_sha' do
- let(:head_id) { repository_rugged.head.target.oid }
+ let(:head_id) { repository.commit.id }
let(:new_branch) { head_id }
let(:utf8_branch) { 'branch-é' }
@@ -525,7 +559,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it 'does not error when dereferenced_target is nil' do
blob_id = repository.blob_at('master', 'README.md').id
- repository_rugged.tags.create("refs/tags/blob-tag", blob_id)
+ repository.add_tag("refs/tags/blob-tag", user: user, target: blob_id)
expect { subject }.not_to raise_error
end
@@ -559,14 +593,31 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
describe '#search_files_by_content' do
let(:repository) { mutable_repository }
- let(:repository_rugged) { mutable_repository_rugged }
let(:ref) { 'search-files-by-content-branch' }
let(:content) { 'foobarbazmepmep' }
before do
repository.create_branch(ref)
- new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', ref, 'committing something', content)
- new_commit_edit_new_file_on_branch(repository_rugged, 'anotherfile', ref, 'committing something', content)
+ repository.multi_action(
+ user,
+ branch_name: ref,
+ message: 'committing something',
+ actions: [{
+ action: :create,
+ file_path: 'encoding/CHANGELOG',
+ content: content
+ }]
+ )
+ repository.multi_action(
+ user,
+ branch_name: ref,
+ message: 'committing something',
+ actions: [{
+ action: :create,
+ file_path: 'anotherfile',
+ content: content
+ }]
+ )
end
after do
@@ -669,14 +720,42 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
before do
# Add new commits so that there's a renamed file in the commit history
- @commit_with_old_name_id = new_commit_edit_old_file(repository_rugged).oid
- @rename_commit_id = new_commit_move_file(repository_rugged).oid
- @commit_with_new_name_id = new_commit_edit_new_file(repository_rugged, "encoding/CHANGELOG", "Edit encoding/CHANGELOG", "I'm a new changelog with different text").oid
+ @commit_with_old_name_id = repository.multi_action(
+ user,
+ branch_name: repository.root_ref,
+ message: 'Update CHANGELOG',
+ actions: [{
+ action: :update,
+ file_path: 'CHANGELOG',
+ content: 'CHANGELOG'
+ }]
+ ).newrev
+ @rename_commit_id = repository.multi_action(
+ user,
+ branch_name: repository.root_ref,
+ message: 'Move CHANGELOG to encoding/',
+ actions: [{
+ action: :move,
+ previous_path: 'CHANGELOG',
+ file_path: 'encoding/CHANGELOG',
+ content: 'CHANGELOG'
+ }]
+ ).newrev
+ @commit_with_new_name_id = repository.multi_action(
+ user,
+ branch_name: repository.root_ref,
+ message: 'Edit encoding/CHANGELOG',
+ actions: [{
+ action: :update,
+ file_path: 'encoding/CHANGELOG',
+ content: "I'm a new changelog with different text"
+ }]
+ ).newrev
end
after do
# Erase our commits so other tests get the original repo
- repository_rugged.references.update("refs/heads/master", SeedRepo::LastCommit::ID)
+ repository.write_ref(repository.root_ref, SeedRepo::LastCommit::ID)
end
context "where 'follow' == true" do
@@ -1649,27 +1728,28 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
describe '#gitattribute' do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_GITATTRIBUTES_REPO_PATH, '', 'group/project') }
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
- after do
- ensure_seeds
- end
+ context 'with gitattributes' do
+ before do
+ repository.copy_gitattributes('gitattributes')
+ end
- it 'returns matching language attribute' do
- expect(repository.gitattribute("custom-highlighting/test.gitlab-custom", 'gitlab-language')).to eq('ruby')
- end
+ it 'returns matching language attribute' do
+ expect(repository.gitattribute("custom-highlighting/test.gitlab-custom", 'gitlab-language')).to eq('ruby')
+ end
- it 'returns matching language attribute with additional options' do
- expect(repository.gitattribute("custom-highlighting/test.gitlab-cgi", 'gitlab-language')).to eq('erb?parent=json')
- end
+ it 'returns matching language attribute with additional options' do
+ expect(repository.gitattribute("custom-highlighting/test.gitlab-cgi", 'gitlab-language')).to eq('erb?parent=json')
+ end
- it 'returns nil if nothing matches' do
- expect(repository.gitattribute("report.xslt", 'gitlab-language')).to eq(nil)
+ it 'returns nil if nothing matches' do
+ expect(repository.gitattribute("report.xslt", 'gitlab-language')).to eq(nil)
+ end
end
- context 'without gitattributes file' do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
-
+ context 'without gitattributes' do
it 'returns nil' do
expect(repository.gitattribute("README.md", 'gitlab-language')).to eq(nil)
end
@@ -1760,25 +1840,13 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
describe '#languages' do
it 'returns exactly the expected results' do
languages = repository.languages('4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6')
- expected_languages = [
- { value: 66.63, label: "Ruby", color: "#701516", highlight: "#701516" },
- { value: 22.96, label: "JavaScript", color: "#f1e05a", highlight: "#f1e05a" },
- { value: 7.9, label: "HTML", color: "#e34c26", highlight: "#e34c26" },
- { value: 2.51, label: "CoffeeScript", color: "#244776", highlight: "#244776" }
- ]
-
- expect(languages.size).to eq(expected_languages.size)
-
- expected_languages.size.times do |i|
- a = expected_languages[i]
- b = languages[i]
- expect(a.keys.sort).to eq(b.keys.sort)
- expect(a[:value]).to be_within(0.1).of(b[:value])
-
- non_float_keys = a.keys - [:value]
- expect(a.values_at(*non_float_keys)).to eq(b.values_at(*non_float_keys))
- end
+ expect(languages).to match_array([
+ { value: a_value_within(0.1).of(66.7), label: "Ruby", color: "#701516", highlight: "#701516" },
+ { value: a_value_within(0.1).of(22.96), label: "JavaScript", color: "#f1e05a", highlight: "#f1e05a" },
+ { value: a_value_within(0.1).of(7.9), label: "HTML", color: "#e34c26", highlight: "#e34c26" },
+ { value: a_value_within(0.1).of(2.51), label: "CoffeeScript", color: "#244776", highlight: "#244776" }
+ ])
end
it "uses the repository's HEAD when no ref is passed" do
@@ -1818,12 +1886,18 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
context 'when the branch exists' do
context 'when the commit does not exist locally' do
let(:source_branch) { 'new-branch-for-fetch-source-branch' }
- let(:source_path) { File.join(TestEnv.repos_path, source_repository.relative_path) }
- let(:source_rugged) { Rugged::Repository.new(source_path) }
- let(:new_oid) { new_commit_edit_old_file(source_rugged).oid }
- before do
- source_rugged.branches.create(source_branch, new_oid)
+ let!(:new_oid) do
+ source_repository.multi_action(
+ user,
+ branch_name: source_branch,
+ message: 'Add a file',
+ actions: [{
+ action: :create,
+ file_path: 'a.file',
+ content: 'This is a file.'
+ }]
+ ).newrev
end
it 'writes the ref' do
@@ -1869,7 +1943,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it "removes the branch from the repo" do
repository.rm_branch(branch_name, user: user)
- expect(repository_rugged.branches[branch_name]).to be_nil
+ expect(repository.find_branch(branch_name)).to be_nil
end
end
@@ -2290,11 +2364,23 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
context 'when the diff contains a rename' do
- let(:end_sha) { new_commit_move_file(repository_rugged).oid }
+ let(:end_sha) do
+ repository.multi_action(
+ user,
+ branch_name: repository.root_ref,
+ message: 'Move CHANGELOG to encoding/',
+ actions: [{
+ action: :move,
+ previous_path: 'CHANGELOG',
+ file_path: 'encoding/CHANGELOG',
+ content: 'CHANGELOG'
+ }]
+ ).newrev
+ end
after do
# Erase our commits so other tests get the original repo
- repository_rugged.references.update('refs/heads/master', SeedRepo::LastCommit::ID)
+ repository.write_ref(repository.root_ref, SeedRepo::LastCommit::ID)
end
it 'does not include the renamed file in the sparse checkout' do
@@ -2342,24 +2428,15 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
def create_remote_branch(remote_name, branch_name, source_branch_name)
- source_branch = repository.branches.find { |branch| branch.name == source_branch_name }
- repository_rugged.references.create("refs/remotes/#{remote_name}/#{branch_name}", source_branch.dereferenced_target.sha)
- end
-
- def refs(dir)
- IO.popen(%W[git -C #{dir} for-each-ref], &:read).split("\n").map do |line|
- line.split("\t").last
- end
+ source_branch = repository.find_branch(source_branch_name)
+ repository.write_ref("refs/remotes/#{remote_name}/#{branch_name}", source_branch.dereferenced_target.sha)
end
describe '#disconnect_alternates' do
let(:project) { create(:project, :repository) }
let(:pool_repository) { create(:pool_repository) }
let(:repository) { project.repository }
- let(:repository_path) { File.join(TestEnv.repos_path, repository.relative_path) }
let(:object_pool) { pool_repository.object_pool }
- let(:object_pool_path) { File.join(TestEnv.repos_path, object_pool.repository.relative_path) }
- let(:object_pool_rugged) { Rugged::Repository.new(object_pool_path) }
before do
object_pool.create # rubocop:disable Rails/SaveBang
@@ -2369,25 +2446,24 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
expect { repository.disconnect_alternates }.not_to raise_error
end
- it 'removes the alternates file' do
- object_pool.link(repository)
-
- alternates_file = File.join(repository_path, "objects", "info", "alternates")
- expect(File.exist?(alternates_file)).to be_truthy
-
- repository.disconnect_alternates
-
- expect(File.exist?(alternates_file)).to be_falsey
- end
-
it 'can still access objects in the object pool' do
object_pool.link(repository)
- new_commit = new_commit_edit_old_file(object_pool_rugged)
- expect(repository.commit(new_commit.oid).id).to eq(new_commit.oid)
+ new_commit_id = object_pool.repository.multi_action(
+ project.owner,
+ branch_name: object_pool.repository.root_ref,
+ message: 'Add a file',
+ actions: [{
+ action: :create,
+ file_path: 'a.file',
+ content: 'This is a file.'
+ }]
+ ).newrev
+
+ expect(repository.commit(new_commit_id).id).to eq(new_commit_id)
repository.disconnect_alternates
- expect(repository.commit(new_commit.oid).id).to eq(new_commit.oid)
+ expect(repository.commit(new_commit_id).id).to eq(new_commit_id)
end
end
@@ -2483,7 +2559,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it 'mirrors the source repository' do
subject
- expect(refs(new_repository_path)).to eq(refs(repository_path))
+ expect(new_repository.list_refs(['refs/'])).to eq(repository.list_refs(['refs/']))
end
end
@@ -2495,7 +2571,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it 'mirrors the source repository' do
subject
- expect(refs(new_repository_path)).to eq(refs(repository_path))
+ expect(new_repository.list_refs(['refs/'])).to eq(repository.list_refs(['refs/']))
end
context 'with keep-around refs' do
@@ -2511,8 +2587,8 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it 'includes the temporary and keep-around refs' do
subject
- expect(refs(new_repository_path)).to include(keep_around_ref)
- expect(refs(new_repository_path)).to include(tmp_ref)
+ expect(new_repository.list_refs([keep_around_ref]).map(&:name)).to match_array([keep_around_ref])
+ expect(new_repository.list_refs([tmp_ref]).map(&:name)).to match_array([tmp_ref])
end
end
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 97ba177da71..172d7a3f27b 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -3,6 +3,8 @@
require "spec_helper"
RSpec.describe Gitlab::Git::Tree, :seed_helper do
+ let_it_be(:user) { create(:user) }
+
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
shared_examples :repo do
@@ -85,51 +87,29 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
context :flat_path do
let(:filename) { 'files/flat/path/correct/content.txt' }
- let(:sha) { create_file(filename) }
let(:path) { 'files/flat' }
# rubocop: disable Rails/FindBy
# This is not ActiveRecord where..first
let(:subdir_file) { entries.first }
# rubocop: enable Rails/FindBy
- let(:repository_rugged) { Rugged::Repository.new(File.join(SEED_STORAGE_PATH, TEST_REPO_PATH)) }
-
- it { expect(subdir_file.flat_path).to eq('files/flat/path/correct') }
- end
-
- def create_file(path)
- oid = repository_rugged.write('test', :blob)
- index = repository_rugged.index
- index.add(path: filename, oid: oid, mode: 0100644)
-
- options = commit_options(
- repository_rugged,
- index,
- repository_rugged.head.target,
- 'HEAD',
- 'Add new file')
+ let!(:sha) do
+ repository.multi_action(
+ user,
+ branch_name: 'HEAD',
+ message: "Create #{filename}",
+ actions: [{
+ action: :create,
+ file_path: filename,
+ contents: 'test'
+ }]
+ ).newrev
+ end
- Rugged::Commit.create(repository_rugged, options)
- end
+ after do
+ ensure_seeds
+ end
- # Build the options hash that's passed to Rugged::Commit#create
- def commit_options(repo, index, target, ref, message)
- options = {}
- options[:tree] = index.write_tree(repo)
- options[:author] = {
- email: "test@example.com",
- name: "Test Author",
- time: Time.gm(2014, "mar", 3, 20, 15, 1)
- }
- options[:committer] = {
- email: "test@example.com",
- name: "Test Author",
- time: Time.gm(2014, "mar", 3, 20, 15, 1)
- }
- options[:message] ||= message
- options[:parents] = repo.empty? ? [] : [target].compact
- options[:update_ref] = ref
-
- options
+ it { expect(subdir_file.flat_path).to eq('files/flat/path/correct') }
end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 3a34d39c722..d5d1bef7bff 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::CommitService do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:storage_name) { project.repository_storage }
let(:relative_path) { project.disk_path + '.git' }
let(:repository) { project.repository }
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 4320c5460da..e04895d975f 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -2,9 +2,6 @@
require 'spec_helper'
-require 'google/rpc/status_pb'
-require 'google/protobuf/well_known_types'
-
RSpec.describe Gitlab::GitalyClient::OperationService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
@@ -188,7 +185,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
shared_examples 'a failed branch deletion' do
- it 'raises a PreRecieveError' do
+ it 'raises a PreReceiveError' do
expect_any_instance_of(Gitaly::OperationService::Stub)
.to receive(:user_delete_branch).with(request, kind_of(Hash))
.and_raise(custom_hook_error)
@@ -288,7 +285,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
shared_examples 'a failed merge' do
- it 'raises a PreRecieveError' do
+ it 'raises a PreReceiveError' do
expect_any_instance_of(Gitaly::OperationService::Stub)
.to receive(:user_merge_branch).with(kind_of(Enumerator), kind_of(Hash))
.and_raise(custom_hook_error)
@@ -816,14 +813,4 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
end
-
- def new_detailed_error(error_code, error_message, details)
- status_error = Google::Rpc::Status.new(
- code: error_code,
- message: error_message,
- details: [Google::Protobuf::Any.pack(details)]
- )
-
- GRPC::BadStatus.new(error_code, error_message, { "grpc-status-details-bin" => Google::Rpc::Status.encode(status_error) })
- end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 2e37c98a591..566bdbacf4a 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -241,30 +241,70 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
end
- describe '#ref_exists?', :seed_helper do
- it 'finds the master branch ref' do
- expect(client.ref_exists?('refs/heads/master')).to eq(true)
- end
+ describe '#ref_exists?' do
+ let(:ref) { 'refs/heads/master' }
- it 'returns false for an illegal tag name ref' do
- expect(client.ref_exists?('refs/tags/.this-tag-name-is-illegal')).to eq(false)
- end
+ it 'sends a ref_exists message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:ref_exists)
+ .with(gitaly_request_with_params(ref: ref), kind_of(Hash))
+ .and_return(double('ref_exists_response', value: true))
- it 'raises an argument error if the ref name parameter does not start with refs/' do
- expect { client.ref_exists?('reXXXXX') }.to raise_error(ArgumentError)
+ expect(client.ref_exists?(ref)).to be true
end
end
describe '#delete_refs' do
let(:prefixes) { %w(refs/heads refs/keep-around) }
+ subject(:delete_refs) { client.delete_refs(except_with_prefixes: prefixes) }
+
it 'sends a delete_refs message' do
expect_any_instance_of(Gitaly::RefService::Stub)
.to receive(:delete_refs)
.with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash))
.and_return(double('delete_refs_response', git_error: ""))
- client.delete_refs(except_with_prefixes: prefixes)
+ delete_refs
+ end
+
+ context 'with a references locked error' do
+ let(:references_locked_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ "error message",
+ Gitaly::DeleteRefsError.new(references_locked: Gitaly::ReferencesLockedError.new))
+ end
+
+ it 'raises ReferencesLockedError' do
+ expect_any_instance_of(Gitaly::RefService::Stub).to receive(:delete_refs)
+ .with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash))
+ .and_raise(references_locked_error)
+
+ expect { delete_refs }.to raise_error(Gitlab::Git::ReferencesLockedError)
+ end
+ end
+
+ context 'with a invalid format error' do
+ let(:invalid_refs) {['\invali.\d/1', '\.invali/d/2']}
+ let(:invalid_reference_format_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INVALID_ARGUMENT,
+ "error message",
+ Gitaly::DeleteRefsError.new(invalid_format: Gitaly::InvalidRefFormatError.new(refs: invalid_refs)))
+ end
+
+ it 'raises InvalidRefFormatError' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:delete_refs)
+ .with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash))
+ .and_raise(invalid_reference_format_error)
+
+ expect { delete_refs }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::InvalidRefFormatError)
+ expect(error.message).to eq("references have an invalid format: #{invalid_refs.join(",")}")
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index ba4ea1069d8..a3840ca843f 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -358,11 +358,7 @@ RSpec.describe Gitlab::GitalyClient do
end
end
- context 'when RequestStore is enabled and the maximum number of calls is not enforced by a feature flag', :request_store do
- before do
- stub_feature_flags(gitaly_enforce_requests_limits: false)
- end
-
+ shared_examples 'enforces maximum allowed Gitaly calls' do
it 'allows up the maximum number of allowed calls' do
expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS) }.not_to raise_error
end
@@ -408,6 +404,18 @@ RSpec.describe Gitlab::GitalyClient do
end
end
+ context 'when RequestStore is enabled and the maximum number of calls is enforced by a feature flag', :request_store do
+ include_examples 'enforces maximum allowed Gitaly calls'
+ end
+
+ context 'when RequestStore is enabled and the maximum number of calls is not enforced by a feature flag', :request_store do
+ before do
+ stub_feature_flags(gitaly_enforce_requests_limits: false)
+ end
+
+ include_examples 'enforces maximum allowed Gitaly calls'
+ end
+
context 'in production and when RequestStore is enabled', :request_store do
before do
stub_rails_env('production')
@@ -537,4 +545,44 @@ RSpec.describe Gitlab::GitalyClient do
end
end
end
+
+ describe '.decode_detailed_error' do
+ let(:detailed_error) do
+ new_detailed_error(GRPC::Core::StatusCodes::INVALID_ARGUMENT,
+ "error message",
+ Gitaly::InvalidRefFormatError.new)
+ end
+
+ let(:error_without_details) do
+ error_code = GRPC::Core::StatusCodes::INVALID_ARGUMENT
+ error_message = "error message"
+
+ status_error = Google::Rpc::Status.new(
+ code: error_code,
+ message: error_message,
+ details: nil
+ )
+
+ GRPC::BadStatus.new(
+ error_code,
+ error_message,
+ { "grpc-status-details-bin" => Google::Rpc::Status.encode(status_error) })
+ end
+
+ context 'decodes a structured error' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:error, :result) do
+ detailed_error | Gitaly::InvalidRefFormatError.new
+ error_without_details | nil
+ StandardError.new | nil
+ end
+
+ with_them do
+ it 'returns correct detailed error' do
+ expect(described_class.decode_detailed_error(error)).to eq(result)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
new file mode 100644
index 00000000000..b773598853d
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
+ subject(:importer) { described_class.new(project, user.id) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:issue) { create(:issue, project: project) }
+ let!(:label) { create(:label, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => event_type,
+ 'commit_id' => nil,
+ 'label_title' => label.title,
+ 'issue_db_id' => issue.id,
+ 'created_at' => '2022-04-26 18:30:53 UTC'
+ )
+ end
+
+ let(:event_attrs) do
+ {
+ user_id: user.id,
+ issue_id: issue.id,
+ label_id: label.id,
+ created_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ shared_examples 'new event' do
+ it 'creates a new label event' do
+ expect { importer.execute(issue_event) }.to change { issue.resource_label_events.count }
+ .from(0).to(1)
+ expect(issue.resource_label_events.last)
+ .to have_attributes(expected_event_attrs)
+ end
+ end
+
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
+ end
+
+ context 'when importing a labeled event' do
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(action: 'add') }
+
+ it_behaves_like 'new event'
+ end
+
+ context 'when importing an unlabeled event' do
+ let(:event_type) { 'unlabeled' }
+ let(:expected_event_attrs) { event_attrs.merge(action: 'remove') }
+
+ it_behaves_like 'new event'
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/closed_spec.rb b/spec/lib/gitlab/github_import/importer/events/closed_spec.rb
new file mode 100644
index 00000000000..116917d3e06
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/closed_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Closed do
+ subject(:importer) { described_class.new(project, user.id) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:issue) { create(:issue, project: project) }
+ let(:commit_id) { nil }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => 'closed',
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'commit_id' => commit_id,
+ 'issue_db_id' => issue.id
+ )
+ end
+
+ let(:expected_event_attrs) do
+ {
+ project_id: project.id,
+ author_id: user.id,
+ target_id: issue.id,
+ target_type: Issue.name,
+ action: 'closed',
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ let(:expected_state_event_attrs) do
+ {
+ user_id: user.id,
+ issue_id: issue.id,
+ state: 'closed',
+ created_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ it 'creates expected event and state event' do
+ importer.execute(issue_event)
+
+ expect(issue.events.count).to eq 1
+ expect(issue.events[0].attributes)
+ .to include expected_event_attrs
+
+ expect(issue.resource_state_events.count).to eq 1
+ expect(issue.resource_state_events[0].attributes)
+ .to include expected_state_event_attrs
+ end
+
+ context 'when closed by commit' do
+ let!(:closing_commit) { create(:commit, project: project) }
+ let(:commit_id) { closing_commit.id }
+
+ it 'creates expected event and state event' do
+ importer.execute(issue_event)
+
+ expect(issue.events.count).to eq 1
+ state_event = issue.resource_state_events.last
+ expect(state_event.source_commit).to eq commit_id[0..40]
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb b/spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb
new file mode 100644
index 00000000000..118c482a7d9
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_gitlab_redis_cache do
+ subject(:importer) { described_class.new(project, user.id) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:sawyer_stub) { Struct.new(:iid, :issuable_type, keyword_init: true) }
+
+ let(:issue) { create(:issue, project: project) }
+ let(:referenced_in) { build_stubbed(:issue, project: project) }
+ let(:commit_id) { nil }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => 'cross-referenced',
+ 'source' => {
+ 'type' => 'issue',
+ 'issue' => {
+ 'number' => referenced_in.iid,
+ 'pull_request' => pull_request_resource
+ }
+ },
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'issue_db_id' => issue.id
+ )
+ end
+
+ let(:pull_request_resource) { nil }
+ let(:expected_note_attrs) do
+ {
+ system: true,
+ noteable_type: Issue.name,
+ noteable_id: issue_event.issue_db_id,
+ project_id: project.id,
+ author_id: user.id,
+ note: expected_note_body,
+ created_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ context 'when referenced in other issue' do
+ let(:expected_note_body) { "mentioned in issue ##{issue.iid}" }
+
+ before do
+ other_issue_resource = sawyer_stub.new(iid: referenced_in.iid, issuable_type: 'Issue')
+ Gitlab::GithubImport::IssuableFinder.new(project, other_issue_resource)
+ .cache_database_id(referenced_in.iid)
+ end
+
+ it 'creates expected note' do
+ importer.execute(issue_event)
+
+ expect(issue.notes.count).to eq 1
+ expect(issue.notes[0]).to have_attributes expected_note_attrs
+ expect(issue.notes[0].system_note_metadata.action).to eq 'cross_reference'
+ end
+ end
+
+ context 'when referenced in pull request' do
+ let(:referenced_in) { build_stubbed(:merge_request, project: project) }
+ let(:pull_request_resource) { { 'id' => referenced_in.iid } }
+
+ let(:expected_note_body) { "mentioned in merge request !#{referenced_in.iid}" }
+
+ before do
+ other_issue_resource =
+ sawyer_stub.new(iid: referenced_in.iid, issuable_type: 'MergeRequest')
+ Gitlab::GithubImport::IssuableFinder.new(project, other_issue_resource)
+ .cache_database_id(referenced_in.iid)
+ end
+
+ it 'creates expected note' do
+ importer.execute(issue_event)
+
+ expect(issue.notes.count).to eq 1
+ expect(issue.notes[0]).to have_attributes expected_note_attrs
+ expect(issue.notes[0].system_note_metadata.action).to eq 'cross_reference'
+ end
+ end
+
+ context 'when referenced in out of project issue/pull_request' do
+ it 'creates expected note' do
+ importer.execute(issue_event)
+
+ expect(issue.notes.count).to eq 0
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/renamed_spec.rb b/spec/lib/gitlab/github_import/importer/events/renamed_spec.rb
new file mode 100644
index 00000000000..a8c3fbcb05d
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/renamed_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Renamed do
+ subject(:importer) { described_class.new(project, user.id) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:issue) { create(:issue, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => 'renamed',
+ 'commit_id' => nil,
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'old_title' => 'old title',
+ 'new_title' => 'new title',
+ 'issue_db_id' => issue.id
+ )
+ end
+
+ let(:expected_note_attrs) do
+ {
+ noteable_id: issue.id,
+ noteable_type: Issue.name,
+ project_id: project.id,
+ author_id: user.id,
+ note: "changed title from **{-old-} title** to **{+new+} title**",
+ system: true,
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ let(:expected_system_note_metadata_attrs) do
+ {
+ action: "title",
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ describe '#execute' do
+ it 'creates expected note' do
+ expect { importer.execute(issue_event) }.to change { issue.notes.count }
+ .from(0).to(1)
+
+ expect(issue.notes.last)
+ .to have_attributes(expected_note_attrs)
+ end
+
+ it 'creates expected system note metadata' do
+ expect { importer.execute(issue_event) }.to change { SystemNoteMetadata.count }
+ .from(0).to(1)
+
+ expect(SystemNoteMetadata.last)
+ .to have_attributes(
+ expected_system_note_metadata_attrs.merge(
+ note_id: Note.last.id
+ )
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/reopened_spec.rb b/spec/lib/gitlab/github_import/importer/events/reopened_spec.rb
new file mode 100644
index 00000000000..81653b0ecdc
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/reopened_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Reopened, :aggregate_failures do
+ subject(:importer) { described_class.new(project, user.id) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:issue) { create(:issue, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => 'reopened',
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'issue_db_id' => issue.id
+ )
+ end
+
+ let(:expected_event_attrs) do
+ {
+ project_id: project.id,
+ author_id: user.id,
+ target_id: issue.id,
+ target_type: Issue.name,
+ action: 'reopened',
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ let(:expected_state_event_attrs) do
+ {
+ user_id: user.id,
+ state: 'reopened',
+ created_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ it 'creates expected event and state event' do
+ importer.execute(issue_event)
+
+ expect(issue.events.count).to eq 1
+ expect(issue.events[0].attributes)
+ .to include expected_event_attrs
+
+ expect(issue.resource_state_events.count).to eq 1
+ expect(issue.resource_state_events[0].attributes)
+ .to include expected_state_event_attrs
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
new file mode 100644
index 00000000000..da32a3b3766
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab_redis_cache do
+ let(:importer) { described_class.new(issue_event, project, client) }
+
+ let(:project) { create(:project) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:user) { create(:user) }
+ let(:issue) { create(:issue, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => { 'id' => actor_id, 'login' => 'alice' },
+ 'event' => event_name,
+ 'commit_id' => '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ 'commit_url' =>
+ 'https://api.github.com/repos/octocat/Hello-World/commits/570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'performed_via_github_app' => nil
+ )
+ end
+
+ let(:actor_id) { user.id }
+ let(:event_name) { 'closed' }
+
+ shared_examples 'triggers specific event importer' do |importer_class|
+ it importer_class.name do
+ specific_importer = double(importer_class.name) # rubocop:disable RSpec/VerifiedDoubles
+
+ expect(importer_class)
+ .to receive(:new).with(project, user.id)
+ .and_return(specific_importer)
+ expect(specific_importer).to receive(:execute).with(issue_event)
+
+ importer.execute
+ end
+ end
+
+ describe '#execute' do
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:author_id_for)
+ .with(issue_event, author_key: :actor)
+ .and_return(user.id, true)
+ end
+
+ issue_event.attributes[:issue_db_id] = issue.id
+ end
+
+ context "when it's closed issue event" do
+ let(:event_name) { 'closed' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::Closed
+ end
+
+ context "when it's reopened issue event" do
+ let(:event_name) { 'reopened' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::Reopened
+ end
+
+ context "when it's labeled issue event" do
+ let(:event_name) { 'labeled' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedLabel
+ end
+
+ context "when it's unlabeled issue event" do
+ let(:event_name) { 'unlabeled' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedLabel
+ end
+
+ context "when it's renamed issue event" do
+ let(:event_name) { 'renamed' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::Renamed
+ end
+
+ context "when it's cross-referenced issue event" do
+ let(:event_name) { 'cross-referenced' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::CrossReferenced
+ end
+
+ context "when it's unknown issue event" do
+ let(:event_name) { 'fake' }
+
+ it 'logs warning and skips' do
+ expect(Gitlab::GithubImport::Logger).to receive(:debug)
+ .with(
+ message: 'UNSUPPORTED_EVENT_TYPE',
+ event_type: issue_event.event,
+ event_github_id: issue_event.id
+ )
+
+ importer.execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index 2a06983417d..570d26cdf2d 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -131,6 +131,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
title: 'My Issue',
author_id: user.id,
project_id: project.id,
+ namespace_id: project.project_namespace_id,
description: 'This is my issue',
milestone_id: milestone.id,
state_id: 1,
@@ -160,6 +161,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
title: 'My Issue',
author_id: project.creator_id,
project_id: project.id,
+ namespace_id: project.project_namespace_id,
description: "*Created by: alice*\n\nThis is my issue",
milestone_id: milestone.id,
state_id: 1,
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
new file mode 100644
index 00000000000..087faeffe02
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter do
+ let(:client) { double }
+
+ let_it_be(:project) { create(:project, :import_started, import_source: 'http://somegithub.com') }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ subject { described_class.new(project, client, parallel: parallel) }
+
+ let(:parallel) { true }
+
+ it { is_expected.to include_module(Gitlab::GithubImport::ParallelScheduling) }
+
+ describe '#importer_class' do
+ it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::IssueEventImporter) }
+ end
+
+ describe '#representation_class' do
+ it { expect(subject.representation_class).to eq(Gitlab::GithubImport::Representation::IssueEvent) }
+ end
+
+ describe '#sidekiq_worker_class' do
+ it { expect(subject.sidekiq_worker_class).to eq(Gitlab::GithubImport::ImportIssueEventWorker) }
+ end
+
+ describe '#object_type' do
+ it { expect(subject.object_type).to eq(:issue_event) }
+ end
+
+ describe '#collection_method' do
+ it { expect(subject.collection_method).to eq(:issue_timeline) }
+ end
+
+ describe '#page_counter_id' do
+ it { expect(subject.page_counter_id(issue)).to eq("issues/#{issue.iid}/issue_timeline") }
+ end
+
+ describe '#id_for_already_imported_cache' do
+ let(:event) { instance_double('Event', id: 1) }
+
+ it { expect(subject.id_for_already_imported_cache(event)).to eq(1) }
+ end
+
+ describe '#collection_options' do
+ it do
+ expect(subject.collection_options)
+ .to eq({ state: 'all', sort: 'created', direction: 'asc' })
+ end
+ end
+
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
+ let(:issue_event) do
+ struct = Struct.new(:id, :event, :created_at, :issue_db_id, keyword_init: true)
+ struct.new(id: rand(10), event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
+ end
+
+ let(:page) do
+ instance_double(
+ Gitlab::GithubImport::Client::Page,
+ number: 1, objects: [issue_event]
+ )
+ end
+
+ let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
+
+ before do
+ allow(client).to receive(:each_page)
+ .once
+ .with(
+ :issue_timeline,
+ project.import_source,
+ issue.iid,
+ { state: 'all', sort: 'created', direction: 'asc', page: 1 }
+ ).and_yield(page)
+ end
+
+ it 'imports each issue event page by page' do
+ counter = 0
+ subject.each_object_to_import do |object|
+ expect(object).to eq issue_event
+ expect(issue_event.issue_db_id).to eq issue.id
+ counter += 1
+ end
+ expect(counter).to eq 1
+ end
+
+ it 'triggers page number increment' do
+ expect(Gitlab::GithubImport::PageCounter)
+ .to receive(:new).with(project, 'issues/1/issue_timeline')
+ .and_return(page_counter)
+ expect(page_counter).to receive(:current).and_return(1)
+ expect(page_counter)
+ .to receive(:set).with(page.number).and_return(true)
+
+ counter = 0
+ subject.each_object_to_import { counter += 1 }
+ expect(counter).to eq 1
+ end
+
+ context 'when page is already processed' do
+ before do
+ page_counter = Gitlab::GithubImport::PageCounter.new(
+ project, subject.page_counter_id(issue)
+ )
+ page_counter.set(page.number)
+ end
+
+ it "doesn't process this page" do
+ counter = 0
+ subject.each_object_to_import { counter += 1 }
+ expect(counter).to eq 0
+ end
+ end
+
+ context 'when event is already processed' do
+ it "doesn't process this event" do
+ subject.mark_as_imported(issue_event)
+
+ counter = 0
+ subject.each_object_to_import { counter += 1 }
+ expect(counter).to eq 0
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 2d159580b5f..ad45469a4c3 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -12,6 +12,54 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
end
end
+ describe '.convert_ref_links' do
+ let_it_be(:project) { create(:project) }
+
+ let(:paragraph) { FFaker::Lorem.paragraph }
+ let(:sentence) { FFaker::Lorem.sentence }
+ let(:issue_id) { rand(100) }
+ let(:pull_id) { rand(100) }
+
+ let(:text_in) do
+ <<-TEXT
+ #{paragraph}
+ https://github.com/#{project.import_source}/issues/#{issue_id}
+ #{sentence}
+ https://github.com/#{project.import_source}/pull/#{pull_id}
+ TEXT
+ end
+
+ let(:text_out) do
+ <<-TEXT
+ #{paragraph}
+ http://localhost/#{project.full_path}/-/issues/#{issue_id}
+ #{sentence}
+ http://localhost/#{project.full_path}/-/merge_requests/#{pull_id}
+ TEXT
+ end
+
+ it { expect(described_class.convert_ref_links(text_in, project)).to eq text_out }
+
+ context 'when Github EE with custom domain name' do
+ let(:github_domain) { 'https://custom.github.com/' }
+ let(:text_in) do
+ <<-TEXT
+ #{paragraph}
+ #{github_domain}#{project.import_source}/issues/#{issue_id}
+ #{sentence}
+ #{github_domain}#{project.import_source}/pull/#{pull_id}
+ TEXT
+ end
+
+ before do
+ allow(Gitlab::Auth::OAuth::Provider)
+ .to receive(:config_for).with('github').and_return({ 'url' => github_domain })
+ end
+
+ it { expect(described_class.convert_ref_links(text_in, project)).to eq text_out }
+ end
+ end
+
describe '#to_s' do
it 'returns the text when the author was found' do
author = double(:author, login: 'Alice')
diff --git a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
new file mode 100644
index 00000000000..23da8276f64
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
@@ -0,0 +1,156 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
+ shared_examples 'an IssueEvent' do
+ it 'returns an instance of IssueEvent' do
+ expect(issue_event).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned IssueEvent' do
+ it 'includes the issue event id' do
+ expect(issue_event.id).to eq(6501124486)
+ end
+
+ it 'includes the issue event "event"' do
+ expect(issue_event.event).to eq('closed')
+ end
+
+ it 'includes the issue event commit_id' do
+ expect(issue_event.commit_id).to eq('570e7b2abdd848b95f2f578043fc23bd6f6fd24d')
+ end
+
+ it 'includes the issue event source' do
+ expect(issue_event.source).to eq({ type: 'issue', id: 123456 })
+ end
+
+ it 'includes the issue_db_id' do
+ expect(issue_event.issue_db_id).to eq(100500)
+ end
+
+ context 'when actor data present' do
+ it 'includes the actor details' do
+ expect(issue_event.actor)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(issue_event.actor.id).to eq(4)
+ expect(issue_event.actor.login).to eq('alice')
+ end
+ end
+
+ context 'when actor data is empty' do
+ let(:with_actor) { false }
+
+ it 'does not return such info' do
+ expect(issue_event.actor).to eq nil
+ end
+ end
+
+ context 'when label data is present' do
+ it 'includes the label_title' do
+ expect(issue_event.label_title).to eq('label title')
+ end
+ end
+
+ context 'when label data is empty' do
+ let(:with_label) { false }
+
+ it 'does not return such info' do
+ expect(issue_event.label_title).to eq nil
+ end
+ end
+
+ context 'when rename field is present' do
+ it 'includes the old_title and new_title fields' do
+ expect(issue_event.old_title).to eq('old title')
+ expect(issue_event.new_title).to eq('new title')
+ end
+ end
+
+ context 'when rename field is empty' do
+ let(:with_rename) { false }
+
+ it 'does not return such info' do
+ expect(issue_event.old_title).to eq nil
+ expect(issue_event.new_title).to eq nil
+ end
+ end
+
+ it 'includes the created timestamp' do
+ expect(issue_event.created_at).to eq('2022-04-26 18:30:53 UTC')
+ end
+ end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ expect(issue_event.github_identifiers).to eq({ id: 6501124486 })
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ event_resource = Struct.new(
+ :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label,
+ :rename, :issue_db_id, :created_at, :performed_via_github_app, :source,
+ keyword_init: true
+ )
+ user_resource = Struct.new(:id, :login, keyword_init: true)
+ event_resource.new(
+ id: 6501124486,
+ node_id: 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ url: 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ actor: with_actor ? user_resource.new(id: 4, login: 'alice') : nil,
+ event: 'closed',
+ commit_id: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ commit_url: 'https://api.github.com/repos/octocat/Hello-World/commits'\
+ '/570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ rename: with_rename ? { from: 'old title', to: 'new title' } : nil,
+ source: { type: 'issue', id: 123456 },
+ issue_db_id: 100500,
+ label: with_label ? { name: 'label title' } : nil,
+ created_at: '2022-04-26 18:30:53 UTC',
+ performed_via_github_app: nil
+ )
+ end
+
+ let(:with_actor) { true }
+ let(:with_label) { true }
+ let(:with_rename) { true }
+
+ it_behaves_like 'an IssueEvent' do
+ let(:issue_event) { described_class.from_api_response(response) }
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'an IssueEvent' do
+ let(:hash) do
+ {
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => (with_actor ? { 'id' => 4, 'login' => 'alice' } : nil),
+ 'event' => 'closed',
+ 'commit_id' => '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ 'commit_url' =>
+ 'https://api.github.com/repos/octocat/Hello-World/commits/570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ 'label_title' => (with_label ? 'label title' : nil),
+ 'old_title' => with_rename ? 'old title' : nil,
+ 'new_title' => with_rename ? 'new title' : nil,
+ 'source' => { 'type' => 'issue', 'id' => 123456 },
+ "issue_db_id" => 100500,
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'performed_via_github_app' => nil
+ }
+ end
+
+ let(:with_actor) { true }
+ let(:with_label) { true }
+ let(:with_rename) { true }
+
+ let(:issue_event) { described_class.from_json_hash(hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/single_endpoint_notes_importing_spec.rb b/spec/lib/gitlab/github_import/single_endpoint_notes_importing_spec.rb
new file mode 100644
index 00000000000..64dbc939348
--- /dev/null
+++ b/spec/lib/gitlab/github_import/single_endpoint_notes_importing_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::SingleEndpointNotesImporting do
+ let(:importer_class) do
+ Class.new do
+ def self.name
+ 'MyImporter'
+ end
+
+ include(Gitlab::GithubImport::SingleEndpointNotesImporting)
+ end
+ end
+
+ let(:importer_instance) { importer_class.new }
+
+ describe '#parent_collection' do
+ it { expect { importer_instance.parent_collection }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#parent_imported_cache_key' do
+ it { expect { importer_instance.parent_imported_cache_key }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#page_counter_id' do
+ it { expect { importer_instance.page_counter_id(build(:merge_request)) }.to raise_error(NotImplementedError) }
+ end
+end
diff --git a/spec/lib/gitlab/gitlab_import/importer_spec.rb b/spec/lib/gitlab/gitlab_import/importer_spec.rb
index eb4c404e454..984c690add6 100644
--- a/spec/lib/gitlab/gitlab_import/importer_spec.rb
+++ b/spec/lib/gitlab/gitlab_import/importer_spec.rb
@@ -21,8 +21,8 @@ RSpec.describe Gitlab::GitlabImport::Importer do
'name' => 'John Doe'
}
}
- ])
- stub_request('issues/3/notes', [])
+ ].to_json)
+ stub_request('issues/3/notes', [].to_json)
end
it 'persists issues' do
diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb
index 9c399e78d80..919335bc9fa 100644
--- a/spec/lib/gitlab/gpg/commit_spec.rb
+++ b/spec/lib/gitlab/gpg/commit_spec.rb
@@ -3,6 +3,34 @@
require 'spec_helper'
RSpec.describe Gitlab::Gpg::Commit do
+ let_it_be(:project) { create(:project, :repository, path: 'sample-project') }
+
+ let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
+ let(:committer_email) { GpgHelpers::User1.emails.first }
+ let(:user_email) { committer_email }
+ let(:public_key) { GpgHelpers::User1.public_key }
+ let(:user) { create(:user, email: user_email) }
+ let(:commit) { create(:commit, project: project, sha: commit_sha, committer_email: committer_email) }
+ let(:crypto) { instance_double(GPGME::Crypto) }
+ let(:mock_signature_data?) { true }
+ # gpg_keys must be pre-loaded so that they can be found during signature verification.
+ let!(:gpg_key) { create(:gpg_key, key: public_key, user: user) }
+
+ let(:signature_data) do
+ [
+ GpgHelpers::User1.signed_commit_signature,
+ GpgHelpers::User1.signed_commit_base_data
+ ]
+ end
+
+ before do
+ if mock_signature_data?
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit_sha)
+ .and_return(signature_data)
+ end
+ end
+
describe '#signature' do
shared_examples 'returns the cached signature on second call' do
it 'returns the cached signature on second call' do
@@ -17,11 +45,8 @@ RSpec.describe Gitlab::Gpg::Commit do
end
end
- let!(:project) { create :project, :repository, path: 'sample-project' }
- let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
-
context 'unsigned commit' do
- let!(:commit) { create :commit, project: project, sha: commit_sha }
+ let(:signature_data) { nil }
it 'returns nil' do
expect(described_class.new(commit).signature).to be_nil
@@ -29,20 +54,12 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'invalid signature' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User1.emails.first }
-
- let!(:user) { create(:user, email: GpgHelpers::User1.emails.first) }
-
- before do
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(
- [
- # Corrupt the key
- GpgHelpers::User1.signed_commit_signature.tr('=', 'a'),
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
+ let(:signature_data) do
+ [
+ # Corrupt the key
+ GpgHelpers::User1.signed_commit_signature.tr('=', 'a'),
+ GpgHelpers::User1.signed_commit_base_data
+ ]
end
it 'returns nil' do
@@ -53,25 +70,6 @@ RSpec.describe Gitlab::Gpg::Commit do
context 'known key' do
context 'user matches the key uid' do
context 'user email matches the email committer' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User1.emails.first }
-
- let!(:user) { create(:user, email: GpgHelpers::User1.emails.first) }
-
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User1.public_key, user: user
- end
-
- before do
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
- end
-
it 'returns a valid signature' do
signature = described_class.new(commit).signature
@@ -112,32 +110,13 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'valid key signed using recent version of Gnupg' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User1.emails.first }
-
- let!(:user) { create(:user, email: GpgHelpers::User1.emails.first) }
-
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User1.public_key, user: user
- end
-
- let!(:crypto) { instance_double(GPGME::Crypto) }
-
before do
- fake_signature = [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
-
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(fake_signature)
- end
-
- it 'returns a valid signature' do
verified_signature = double('verified-signature', fingerprint: GpgHelpers::User1.fingerprint, valid?: true)
allow(GPGME::Crypto).to receive(:new).and_return(crypto)
allow(crypto).to receive(:verify).and_yield(verified_signature)
+ end
+ it 'returns a valid signature' do
signature = described_class.new(commit).signature
expect(signature).to have_attributes(
@@ -153,33 +132,14 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'valid key signed using older version of Gnupg' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User1.emails.first }
-
- let!(:user) { create(:user, email: GpgHelpers::User1.emails.first) }
-
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User1.public_key, user: user
- end
-
- let!(:crypto) { instance_double(GPGME::Crypto) }
-
before do
- fake_signature = [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
-
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(fake_signature)
- end
-
- it 'returns a valid signature' do
keyid = GpgHelpers::User1.fingerprint.last(16)
verified_signature = double('verified-signature', fingerprint: keyid, valid?: true)
allow(GPGME::Crypto).to receive(:new).and_return(crypto)
allow(crypto).to receive(:verify).and_yield(verified_signature)
+ end
+ it 'returns a valid signature' do
signature = described_class.new(commit).signature
expect(signature).to have_attributes(
@@ -195,32 +155,13 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'commit with multiple signatures' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User1.emails.first }
-
- let!(:user) { create(:user, email: GpgHelpers::User1.emails.first) }
-
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User1.public_key, user: user
- end
-
- let!(:crypto) { instance_double(GPGME::Crypto) }
-
before do
- fake_signature = [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
-
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(fake_signature)
- end
-
- it 'returns an invalid signatures error' do
verified_signature = double('verified-signature', fingerprint: GpgHelpers::User1.fingerprint, valid?: true)
allow(GPGME::Crypto).to receive(:new).and_return(crypto)
allow(crypto).to receive(:verify).and_yield(verified_signature).and_yield(verified_signature)
+ end
+ it 'returns an invalid signatures error' do
signature = described_class.new(commit).signature
expect(signature).to have_attributes(
@@ -236,27 +177,18 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'commit signed with a subkey' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User3.emails.first }
-
- let!(:user) { create(:user, email: GpgHelpers::User3.emails.first) }
-
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User3.public_key, user: user
- end
+ let(:committer_email) { GpgHelpers::User3.emails.first }
+ let(:public_key) { GpgHelpers::User3.public_key }
let(:gpg_key_subkey) do
gpg_key.subkeys.find_by(fingerprint: GpgHelpers::User3.subkey_fingerprints.last)
end
- before do
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User3.signed_commit_signature,
- GpgHelpers::User3.signed_commit_base_data
- ]
- )
+ let(:signature_data) do
+ [
+ GpgHelpers::User3.signed_commit_signature,
+ GpgHelpers::User3.signed_commit_base_data
+ ]
end
it 'returns a valid signature' do
@@ -275,7 +207,7 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'user email does not match the committer email, but is the same user' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User2.emails.first }
+ let(:committer_email) { GpgHelpers::User2.emails.first }
let(:user) do
create(:user, email: GpgHelpers::User1.emails.first).tap do |user|
@@ -283,21 +215,6 @@ RSpec.describe Gitlab::Gpg::Commit do
end
end
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User1.public_key, user: user
- end
-
- before do
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
- end
-
it 'returns an invalid signature' do
expect(described_class.new(commit).signature).to have_attributes(
commit_sha: commit_sha,
@@ -314,24 +231,8 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'user email does not match the committer email' do
- let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User2.emails.first }
-
- let(:user) { create(:user, email: GpgHelpers::User1.emails.first) }
-
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User1.public_key, user: user
- end
-
- before do
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
- end
+ let(:committer_email) { GpgHelpers::User2.emails.first }
+ let(:user_email) { GpgHelpers::User1.emails.first }
it 'returns an invalid signature' do
expect(described_class.new(commit).signature).to have_attributes(
@@ -350,24 +251,8 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'user does not match the key uid' do
- let!(:commit) { create :commit, project: project, sha: commit_sha }
-
- let(:user) { create(:user, email: GpgHelpers::User2.emails.first) }
-
- let!(:gpg_key) do
- create :gpg_key, key: GpgHelpers::User1.public_key, user: user
- end
-
- before do
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
- end
+ let(:user_email) { GpgHelpers::User2.emails.first }
+ let(:public_key) { GpgHelpers::User1.public_key }
it 'returns an invalid signature' do
expect(described_class.new(commit).signature).to have_attributes(
@@ -386,18 +271,7 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'unknown key' do
- let!(:commit) { create :commit, project: project, sha: commit_sha }
-
- before do
- allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
- .with(Gitlab::Git::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
- end
+ let(:gpg_key) { nil }
it 'returns an invalid signature' do
expect(described_class.new(commit).signature).to have_attributes(
@@ -415,15 +289,15 @@ RSpec.describe Gitlab::Gpg::Commit do
end
context 'multiple commits with signatures' do
- let(:first_signature) { create(:gpg_signature) }
-
- let(:gpg_key) { create(:gpg_key, key: GpgHelpers::User2.public_key) }
- let(:second_signature) { create(:gpg_signature, gpg_key: gpg_key) }
+ let(:mock_signature_data?) { false }
+ let!(:first_signature) { create(:gpg_signature) }
+ let!(:gpg_key) { create(:gpg_key, key: GpgHelpers::User2.public_key) }
+ let!(:second_signature) { create(:gpg_signature, gpg_key: gpg_key) }
let!(:first_commit) { create(:commit, project: project, sha: first_signature.commit_sha) }
let!(:second_commit) { create(:commit, project: project, sha: second_signature.commit_sha) }
- let(:commits) do
+ let!(:commits) do
[first_commit, second_commit].map do |commit|
gpg_commit = described_class.new(commit)
@@ -442,4 +316,21 @@ RSpec.describe Gitlab::Gpg::Commit do
end
end
end
+
+ describe '#update_signature!' do
+ let!(:gpg_key) { nil }
+
+ let(:signature) { described_class.new(commit).signature }
+
+ it 'updates signature record' do
+ signature
+
+ create(:gpg_key, key: public_key, user: user)
+
+ stored_signature = CommitSignatures::GpgSignature.find_by_commit_sha(commit_sha)
+ expect { described_class.new(commit).update_signature!(stored_signature) }.to(
+ change { signature.reload.verification_status }.from('unknown_key').to('verified')
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/grape_logging/loggers/response_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/response_logger_spec.rb
new file mode 100644
index 00000000000..94e880d979d
--- /dev/null
+++ b/spec/lib/gitlab/grape_logging/loggers/response_logger_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GrapeLogging::Loggers::ResponseLogger do
+ let(:logger) { described_class.new }
+
+ describe '#parameters' do
+ let(:response1) { 'response1' }
+ let(:response) { [response1] }
+
+ subject { logger.parameters(nil, response) }
+
+ it { expect(subject).to eq({ response_bytes: response1.bytesize }) }
+
+ context 'with multiple response parts' do
+ let(:response2) { 'response2' }
+ let(:response) { [response1, response2] }
+
+ it { expect(subject).to eq({ response_bytes: response1.bytesize + response2.bytesize }) }
+ end
+
+ context 'with log_response_length disabled' do
+ before do
+ stub_feature_flags(log_response_length: false)
+ end
+
+ it { expect(subject).to eq({}) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
index 97613edee5e..8a2b5ae0d38 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_updated_at, column_order_created_at, column_order_id])) }
it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
end
end
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 61a79d90546..6574b3e3131 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.order(:updated_at) }
it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
end
it 'includes the :id even when not specified in the order' do
@@ -104,7 +104,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.order(:updated_at).order(:created_at) }
it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
end
end
@@ -112,7 +112,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
end
end
end
diff --git a/spec/lib/gitlab/harbor/client_spec.rb b/spec/lib/gitlab/harbor/client_spec.rb
index bc5b593370a..4e80b8b53e3 100644
--- a/spec/lib/gitlab/harbor/client_spec.rb
+++ b/spec/lib/gitlab/harbor/client_spec.rb
@@ -3,12 +3,277 @@
require 'spec_helper'
RSpec.describe Gitlab::Harbor::Client do
- let(:harbor_integration) { build(:harbor_integration) }
+ let_it_be(:harbor_integration) { create(:harbor_integration) }
subject(:client) { described_class.new(harbor_integration) }
+ describe '#initialize' do
+ context 'if integration is nil' do
+ let(:harbor_integration) { nil }
+
+ it 'raises ConfigError' do
+ expect { client }.to raise_error(described_class::ConfigError)
+ end
+ end
+
+ context 'integration is provided' do
+ it 'is initialized successfully' do
+ expect { client }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#get_repositories' do
+ context 'with valid params' do
+ let(:mock_response) do
+ [
+ {
+ "artifact_count": 1,
+ "creation_time": "2022-03-13T09:36:43.240Z",
+ "id": 1,
+ "name": "jihuprivate/busybox",
+ "project_id": 4,
+ "pull_count": 0,
+ "update_time": "2022-03-13T09:36:43.240Z"
+ }
+ ]
+ end
+
+ let(:mock_repositories) do
+ {
+ body: mock_response,
+ total_count: 2
+ }
+ end
+
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 200, body: mock_response.to_json, headers: { "x-total-count": 2 })
+ end
+
+ it 'get repositories' do
+ expect(client.get_repositories({}).deep_stringify_keys).to eq(mock_repositories.deep_stringify_keys)
+ end
+ end
+
+ context 'when harbor project does not exist' do
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 404, body: {}.to_json)
+ end
+
+ it 'raises Gitlab::Harbor::Client::Error' do
+ expect do
+ client.get_repositories({})
+ end.to raise_error(Gitlab::Harbor::Client::Error, 'request error')
+ end
+ end
+
+ context 'with invalid response' do
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 200, body: '[not json}')
+ end
+
+ it 'raises Gitlab::Harbor::Client::Error' do
+ expect do
+ client.get_repositories({})
+ end.to raise_error(Gitlab::Harbor::Client::Error, 'invalid response format')
+ end
+ end
+ end
+
+ describe '#get_artifacts' do
+ context 'with valid params' do
+ let(:mock_response) do
+ [
+ {
+ "digest": "sha256:661e8e44e5d7290fbd42d0495ab4ff6fdf1ad251a9f358969b3264a22107c14d",
+ "icon": "sha256:0048162a053eef4d4ce3fe7518615bef084403614f8bca43b40ae2e762e11e06",
+ "id": 1,
+ "project_id": 1,
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-04-23T08:04:08.901Z",
+ "repository_id": 1,
+ "size": 126745886,
+ "tags": [
+ {
+ "artifact_id": 1,
+ "id": 1,
+ "immutable": false,
+ "name": "2",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-04-23T08:04:08.920Z",
+ "repository_id": 1,
+ "signed": false
+ }
+ ],
+ "type": "IMAGE"
+ }
+ ]
+ end
+
+ let(:mock_artifacts) do
+ {
+ body: mock_response,
+ total_count: 1
+ }
+ end
+
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 200, body: mock_response.to_json, headers: { "x-total-count": 1 })
+ end
+
+ it 'get artifacts' do
+ expect(client.get_artifacts({ repository_name: 'test' })
+ .deep_stringify_keys).to eq(mock_artifacts.deep_stringify_keys)
+ end
+ end
+
+ context 'when harbor repository does not exist' do
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 404, body: {}.to_json)
+ end
+
+ it 'raises Gitlab::Harbor::Client::Error' do
+ expect do
+ client.get_artifacts({ repository_name: 'test' })
+ end.to raise_error(Gitlab::Harbor::Client::Error, 'request error')
+ end
+ end
+
+ context 'with invalid response' do
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 200, body: '[not json}')
+ end
+
+ it 'raises Gitlab::Harbor::Client::Error' do
+ expect do
+ client.get_artifacts({ repository_name: 'test' })
+ end.to raise_error(Gitlab::Harbor::Client::Error, 'invalid response format')
+ end
+ end
+ end
+
+ describe '#get_tags' do
+ context 'with valid params' do
+ let(:mock_response) do
+ [
+ {
+ "artifact_id": 1,
+ "id": 1,
+ "immutable": false,
+ "name": "2",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-04-23T08:04:08.920Z",
+ "repository_id": 1,
+ "signed": false
+ }
+ ]
+ end
+
+ let(:mock_tags) do
+ {
+ body: mock_response,
+ total_count: 1
+ }
+ end
+
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts/1/tags")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 200, body: mock_response.to_json, headers: { "x-total-count": 1 })
+ end
+
+ it 'get tags' do
+ expect(client.get_tags({ repository_name: 'test', artifact_name: '1' })
+ .deep_stringify_keys).to eq(mock_tags.deep_stringify_keys)
+ end
+ end
+
+ context 'when harbor artifact does not exist' do
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts/1/tags")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 404, body: {}.to_json)
+ end
+
+ it 'raises Gitlab::Harbor::Client::Error' do
+ expect do
+ client.get_tags({ repository_name: 'test', artifact_name: '1' })
+ end.to raise_error(Gitlab::Harbor::Client::Error, 'request error')
+ end
+ end
+
+ context 'with invalid response' do
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts/1/tags")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 200, body: '[not json}')
+ end
+
+ it 'raises Gitlab::Harbor::Client::Error' do
+ expect do
+ client.get_tags({ repository_name: 'test', artifact_name: '1' })
+ end.to raise_error(Gitlab::Harbor::Client::Error, 'invalid response format')
+ end
+ end
+ end
+
describe '#ping' do
- let!(:harbor_ping_request) { stub_harbor_request("https://demo.goharbor.io/api/v2.0/ping") }
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/ping")
+ .with(
+ headers: {
+ 'Content-Type': 'application/json'
+ })
+ .to_return(status: 200, body: 'pong')
+ end
it "calls api/v2.0/ping successfully" do
expect(client.ping).to eq(success: true)
diff --git a/spec/lib/gitlab/harbor/query_spec.rb b/spec/lib/gitlab/harbor/query_spec.rb
new file mode 100644
index 00000000000..dcb9a16b27b
--- /dev/null
+++ b/spec/lib/gitlab/harbor/query_spec.rb
@@ -0,0 +1,375 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Harbor::Query do
+ let_it_be(:harbor_integration) { create(:harbor_integration) }
+
+ let(:params) { {} }
+
+ subject(:query) { described_class.new(harbor_integration, ActionController::Parameters.new(params)) }
+
+ describe 'Validations' do
+ context 'page' do
+ context 'with valid page' do
+ let(:params) { { page: 1 } }
+
+ it 'initialize successfully' do
+ expect(query.valid?).to eq(true)
+ end
+ end
+
+ context 'with invalid page' do
+ let(:params) { { page: -1 } }
+
+ it 'initialize failed' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+ end
+
+ context 'limit' do
+ context 'with valid limit' do
+ let(:params) { { limit: 1 } }
+
+ it 'initialize successfully' do
+ expect(query.valid?).to eq(true)
+ end
+ end
+
+ context 'with invalid limit' do
+ context 'with limit less than 0' do
+ let(:params) { { limit: -1 } }
+
+ it 'initialize failed' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+
+ context 'with limit greater than 25' do
+ let(:params) { { limit: 26 } }
+
+ it 'initialize failed' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+ end
+ end
+
+ context 'repository_id' do
+ context 'with valid repository_id' do
+ let(:params) { { repository_id: 'test' } }
+
+ it 'initialize successfully' do
+ expect(query.valid?).to eq(true)
+ end
+ end
+
+ context 'with invalid repository_id' do
+ let(:params) { { repository_id: 'test@@' } }
+
+ it 'initialize failed' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+ end
+
+ context 'artifact_id' do
+ context 'with valid artifact_id' do
+ let(:params) { { artifact_id: 'test' } }
+
+ it 'initialize successfully' do
+ expect(query.valid?).to eq(true)
+ end
+ end
+
+ context 'with invalid artifact_id' do
+ let(:params) { { artifact_id: 'test@@' } }
+
+ it 'initialize failed' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+ end
+
+ context 'sort' do
+ context 'with valid sort' do
+ let(:params) { { sort: 'creation_time desc' } }
+
+ it 'initialize successfully' do
+ expect(query.valid?).to eq(true)
+ end
+ end
+
+ context 'with invalid sort' do
+ let(:params) { { sort: 'blabla desc' } }
+
+ it 'initialize failed' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+ end
+
+ context 'search' do
+ context 'with valid search' do
+ let(:params) { { search: 'name=desc' } }
+
+ it 'initialize successfully' do
+ expect(query.valid?).to eq(true)
+ end
+ end
+
+ context 'with invalid search' do
+ let(:params) { { search: 'blabla' } }
+
+ it 'initialize failed' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+ end
+ end
+
+ describe '#repositories' do
+ let(:response) { { total_count: 0, repositories: [] } }
+
+ def expect_query_option_include(expected_params)
+ expect_next_instance_of(Gitlab::Harbor::Client) do |client|
+ expect(client).to receive(:get_repositories)
+ .with(hash_including(expected_params))
+ .and_return(response)
+ end
+
+ query.repositories
+ end
+
+ context 'when params is {}' do
+ it 'fills default params' do
+ expect_query_option_include(page_size: 10, page: 1)
+ end
+ end
+
+ context 'when params contains options' do
+ let(:params) { { search: 'name=bu', sort: 'creation_time desc', limit: 20, page: 3 } }
+
+ it 'fills params with standard of Harbor' do
+ expect_query_option_include(q: 'name=~bu', sort: '-creation_time', page_size: 20, page: 3)
+ end
+ end
+
+ context 'when params contains invalid sort option' do
+ let(:params) { { search: 'name=bu', sort: 'blabla desc', limit: 20, page: 3 } }
+
+ it 'ignores invalid sort params' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+
+ context 'when client.get_repositories returns data' do
+ let(:response_with_data) do
+ {
+ total_count: 1,
+ body:
+ [
+ {
+ "id": 3,
+ "name": "testproject/thirdbusybox",
+ "artifact_count": 1,
+ "creation_time": "2022-03-15T07:12:14.479Z",
+ "update_time": "2022-03-15T07:12:14.479Z",
+ "project_id": 3,
+ "pull_count": 0
+ }.with_indifferent_access
+ ]
+ }
+ end
+
+ it 'returns the right repositories data' do
+ expect_next_instance_of(Gitlab::Harbor::Client) do |client|
+ expect(client).to receive(:get_repositories)
+ .with(hash_including(page_size: 10, page: 1))
+ .and_return(response_with_data)
+ end
+
+ expect(query.repositories.first).to include(
+ "name": "testproject/thirdbusybox",
+ "artifact_count": 1
+ )
+ end
+ end
+ end
+
+ describe '#artifacts' do
+ let(:response) { { total_count: 0, artifacts: [] } }
+
+ def expect_query_option_include(expected_params)
+ expect_next_instance_of(Gitlab::Harbor::Client) do |client|
+ expect(client).to receive(:get_artifacts)
+ .with(hash_including(expected_params))
+ .and_return(response)
+ end
+
+ query.artifacts
+ end
+
+ context 'when params is {}' do
+ it 'fills default params' do
+ expect_query_option_include(page_size: 10, page: 1)
+ end
+ end
+
+ context 'when params contains options' do
+ let(:params) do
+ { search: 'tags=1', repository_id: 'jihuprivate', sort: 'creation_time desc', limit: 20, page: 3 }
+ end
+
+ it 'fills params with standard of Harbor' do
+ expect_query_option_include(q: 'tags=~1', sort: '-creation_time', page_size: 20, page: 3)
+ end
+ end
+
+ context 'when params contains invalid sort option' do
+ let(:params) { { search: 'tags=1', repository_id: 'jihuprivate', sort: 'blabla desc', limit: 20, page: 3 } }
+
+ it 'ignores invalid sort params' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+
+ context 'when client.get_artifacts returns data' do
+ let(:response_with_data) do
+ {
+ total_count: 1,
+ body:
+ [
+ {
+ "digest": "sha256:14d4f50961544fdb669075c442509f194bdc4c0e344bde06e35dbd55af842a38",
+ "icon": "sha256:0048162a053eef4d4ce3fe7518615bef084403614f8bca43b40ae2e762e11e06",
+ "id": 5,
+ "project_id": 14,
+ "push_time": "2022-03-22T09:04:56.170Z",
+ "repository_id": 5,
+ "size": 774790,
+ "tags": [
+ {
+ "artifact_id": 5,
+ "id": 7,
+ "immutable": false,
+ "name": "2",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-03-22T09:05:04.844Z",
+ "repository_id": 5
+ },
+ {
+ "artifact_id": 5,
+ "id": 6,
+ "immutable": false,
+ "name": "1",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-03-22T09:04:56.186Z",
+ "repository_id": 5
+ }
+ ],
+ "type": "IMAGE"
+ }.with_indifferent_access
+ ]
+ }
+ end
+
+ it 'returns the right artifacts data' do
+ expect_next_instance_of(Gitlab::Harbor::Client) do |client|
+ expect(client).to receive(:get_artifacts)
+ .with(hash_including(page_size: 10, page: 1))
+ .and_return(response_with_data)
+ end
+
+ artifact = query.artifacts.first
+
+ expect(artifact).to include(
+ "digest": "sha256:14d4f50961544fdb669075c442509f194bdc4c0e344bde06e35dbd55af842a38",
+ "push_time": "2022-03-22T09:04:56.170Z"
+ )
+ expect(artifact["tags"].size).to eq(2)
+ end
+ end
+ end
+
+ describe '#tags' do
+ let(:response) { { total_count: 0, tags: [] } }
+
+ def expect_query_option_include(expected_params)
+ expect_next_instance_of(Gitlab::Harbor::Client) do |client|
+ expect(client).to receive(:get_tags)
+ .with(hash_including(expected_params))
+ .and_return(response)
+ end
+
+ query.tags
+ end
+
+ context 'when params is {}' do
+ it 'fills default params' do
+ expect_query_option_include(page_size: 10, page: 1)
+ end
+ end
+
+ context 'when params contains options' do
+ let(:params) { { repository_id: 'jihuprivate', sort: 'creation_time desc', limit: 20, page: 3 } }
+
+ it 'fills params with standard of Harbor' do
+ expect_query_option_include(sort: '-creation_time', page_size: 20, page: 3)
+ end
+ end
+
+ context 'when params contains invalid sort option' do
+ let(:params) { { repository_id: 'jihuprivate', artifact_id: 'test', sort: 'blabla desc', limit: 20, page: 3 } }
+
+ it 'ignores invalid sort params' do
+ expect(query.valid?).to eq(false)
+ end
+ end
+
+ context 'when client.get_tags returns data' do
+ let(:response_with_data) do
+ {
+ total_count: 2,
+ body:
+ [
+ {
+ "artifact_id": 5,
+ "id": 7,
+ "immutable": false,
+ "name": "2",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-03-22T09:05:04.844Z",
+ "repository_id": 5
+ },
+ {
+ "artifact_id": 5,
+ "id": 6,
+ "immutable": false,
+ "name": "1",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-03-22T09:04:56.186Z",
+ "repository_id": 5
+ }.with_indifferent_access
+ ]
+ }
+ end
+
+ it 'returns the right tags data' do
+ expect_next_instance_of(Gitlab::Harbor::Client) do |client|
+ expect(client).to receive(:get_tags)
+ .with(hash_including(page_size: 10, page: 1))
+ .and_return(response_with_data)
+ end
+
+ tag = query.tags.first
+
+ expect(tag).to include(
+ "immutable": false,
+ "push_time": "2022-03-22T09:05:04.844Z"
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hash_digest/facade_spec.rb b/spec/lib/gitlab/hash_digest/facade_spec.rb
deleted file mode 100644
index b352744513e..00000000000
--- a/spec/lib/gitlab/hash_digest/facade_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::HashDigest::Facade do
- describe '.hexdigest' do
- let(:plaintext) { 'something that is plaintext' }
-
- let(:sha256_hash) { OpenSSL::Digest::SHA256.hexdigest(plaintext) }
- let(:md5_hash) { Digest::MD5.hexdigest(plaintext) } # rubocop:disable Fips/MD5
-
- it 'uses SHA256' do
- expect(described_class.hexdigest(plaintext)).to eq(sha256_hash)
- end
-
- context 'when feature flags is not available' do
- before do
- allow(Feature).to receive(:feature_flags_available?).and_return(false)
- end
-
- it 'uses MD5' do
- expect(described_class.hexdigest(plaintext)).to eq(md5_hash)
- end
- end
-
- context 'when active_support_hash_digest_sha256 FF is disabled' do
- before do
- stub_feature_flags(active_support_hash_digest_sha256: false)
- end
-
- it 'uses MD5' do
- expect(described_class.hexdigest(plaintext)).to eq(md5_hash)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
index 771fc0218e2..25b84a67ab2 100644
--- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -7,13 +7,12 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
let(:builder) { described_class.new(merge_request) }
- describe '#build' do
- let(:data) { builder.build }
+ describe '.safe_hook_attributes' do
+ let(:safe_attribute_keys) { described_class.safe_hook_attributes }
it 'includes safe attribute' do
- %w[
+ expected_safe_attribute_keys = %i[
assignee_id
- assignee_ids
author_id
blocking_discussions_resolved
created_at
@@ -32,17 +31,21 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
milestone_id
source_branch
source_project_id
- state
+ state_id
target_branch
target_project_id
time_estimate
title
updated_at
updated_by_id
- ].each do |key|
- expect(data).to include(key)
- end
+ ].freeze
+
+ expect(safe_attribute_keys).to match_array(expected_safe_attribute_keys)
end
+ end
+
+ describe '#build' do
+ let(:data) { builder.build }
%i[source target].each do |key|
describe "#{key} key" do
@@ -52,17 +55,30 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
end
end
+ it 'includes safe attributes' do
+ expect(data).to include(*described_class.safe_hook_attributes)
+ end
+
it 'includes additional attrs' do
- expect(data).to include(:source)
- expect(data).to include(:target)
- expect(data).to include(:last_commit)
- expect(data).to include(:work_in_progress)
- expect(data).to include(:total_time_spent)
- expect(data).to include(:time_change)
- expect(data).to include(:human_time_estimate)
- expect(data).to include(:human_total_time_spent)
- expect(data).to include(:human_time_change)
- expect(data).to include(:labels)
+ expected_additional_attributes = %w[
+ description
+ url
+ last_commit
+ work_in_progress
+ total_time_spent
+ time_change
+ human_total_time_spent
+ human_time_change
+ human_time_estimate
+ assignee_ids
+ assignee_id
+ labels
+ state
+ blocking_discussions_resolved
+ first_contribution
+ ].freeze
+
+ expect(data).to include(*expected_additional_attributes)
end
context 'when the MR has an image in the description' do
diff --git a/spec/lib/gitlab/http_connection_adapter_spec.rb b/spec/lib/gitlab/http_connection_adapter_spec.rb
index cde8376febd..a241a4b6490 100644
--- a/spec/lib/gitlab/http_connection_adapter_spec.rb
+++ b/spec/lib/gitlab/http_connection_adapter_spec.rb
@@ -15,18 +15,6 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do
stub_all_dns('https://example.org', ip_address: '93.184.216.34')
end
- context 'with use_read_total_timeout option' do
- let(:options) { { use_read_total_timeout: true } }
-
- it 'sets up the connection using the Gitlab::NetHttpAdapter' do
- expect(connection).to be_a(Gitlab::NetHttpAdapter)
- expect(connection.address).to eq('93.184.216.34')
- expect(connection.hostname_override).to eq('example.org')
- expect(connection.addr_port).to eq('example.org')
- expect(connection.port).to eq(443)
- end
- end
-
context 'when local requests are allowed' do
let(:options) { { allow_local_requests: true } }
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index c2fb987d195..929fd37ee40 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -83,67 +83,25 @@ RSpec.describe Gitlab::HTTP do
subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
- shared_examples 'tracks the timeout but does not raise an error' do
- specify :aggregate_failures do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- an_instance_of(Gitlab::HTTP::ReadTotalTimeout)
- ).once
-
- expect { request_slow_responder }.not_to raise_error
- end
-
- it 'still calls the block' do
- expect { |b| described_class.post('http://example.org', **options, &b) }.to yield_successive_args('a', 'b')
- end
- end
-
- shared_examples 'does not track or raise timeout error' do
- specify :aggregate_failures do
- expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
-
- expect { request_slow_responder }.not_to raise_error
- end
- end
-
- it_behaves_like 'tracks the timeout but does not raise an error'
-
- context 'and use_read_total_timeout option is truthy' do
- let(:options) { { use_read_total_timeout: true } }
-
- it 'raises an error' do
- expect { request_slow_responder }.to raise_error(Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
- end
+ it 'raises an error' do
+ expect { request_slow_responder }.to raise_error(Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
end
context 'and timeout option is greater than DEFAULT_READ_TOTAL_TIMEOUT' do
let(:options) { { timeout: 10.seconds } }
- it_behaves_like 'does not track or raise timeout error'
+ it 'does not raise an error' do
+ expect { request_slow_responder }.not_to raise_error
+ end
end
context 'and stream_body option is truthy' do
let(:options) { { stream_body: true } }
- it_behaves_like 'does not track or raise timeout error'
-
- context 'but skip_read_total_timeout option is falsey' do
- let(:options) { { stream_body: true, skip_read_total_timeout: false } }
-
- it_behaves_like 'tracks the timeout but does not raise an error'
+ it 'does not raise an error' do
+ expect { request_slow_responder }.not_to raise_error
end
end
-
- context 'and skip_read_total_timeout option is truthy' do
- let(:options) { { skip_read_total_timeout: true } }
-
- it_behaves_like 'does not track or raise timeout error'
- end
-
- context 'and skip_read_total_timeout option is falsely' do
- let(:options) { { skip_read_total_timeout: false } }
-
- it_behaves_like 'tracks the timeout but does not raise an error'
- end
end
it 'calls a block' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 9d516c8d7ac..af910b08fae 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -420,6 +420,8 @@ project:
- zentao_integration
# dingtalk_integration JiHu-specific, see https://jihulab.com/gitlab-cn/gitlab/-/merge_requests/417
- dingtalk_integration
+# dingtalk_integration JiHu-specific, see https://jihulab.com/gitlab-cn/gitlab/-/merge_requests/640
+- feishu_integration
- redmine_integration
- youtrack_integration
- custom_issue_tracker_integration
@@ -557,7 +559,6 @@ project:
- packages
- package_files
- packages_cleanup_policy
-- tracing_setting
- alerting_setting
- project_setting
- webide_pipelines
@@ -604,6 +605,7 @@ project:
- incident_management_oncall_schedules
- incident_management_oncall_rotations
- incident_management_escalation_policies
+- incident_management_issuable_escalation_statuses
- debian_distributions
- merge_request_metrics
- security_orchestration_policy_configuration
@@ -695,8 +697,6 @@ epic_issues:
feature_flag_issues:
- issue
- feature_flag
-tracing_setting:
-- project
reviews:
- project
- merge_request
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 03f522ae490..3f73a730744 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -171,4 +171,27 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
expect(described_class.batch_size(exportable)).to eq(described_class::BATCH_SIZE)
end
end
+
+ describe '#serialize_relation' do
+ context 'when record is a merge request' do
+ let(:json_writer) do
+ Class.new do
+ def write_relation_array(_, _, enumerator)
+ enumerator.each { _1 }
+ end
+ end.new
+ end
+
+ it 'removes cached external diff' do
+ merge_request = create(:merge_request, source_project: exportable, target_project: exportable)
+ cache_dir = merge_request.merge_request_diff.send(:external_diff_cache_dir)
+
+ expect(subject).to receive(:remove_cached_external_diff).with(merge_request).twice
+
+ subject.serialize_relation({ merge_requests: { include: [] } })
+
+ expect(Dir.exist?(cache_dir)).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/members_mapper_spec.rb b/spec/lib/gitlab/import_export/members_mapper_spec.rb
index 87ca899a87d..d7ad34255c1 100644
--- a/spec/lib/gitlab/import_export/members_mapper_spec.rb
+++ b/spec/lib/gitlab/import_export/members_mapper_spec.rb
@@ -258,7 +258,7 @@ RSpec.describe Gitlab::ImportExport::MembersMapper do
end
before do
- group.add_users([user, user2], GroupMember::DEVELOPER)
+ group.add_members([user, user2], GroupMember::DEVELOPER)
end
it 'maps the project member' do
@@ -281,7 +281,7 @@ RSpec.describe Gitlab::ImportExport::MembersMapper do
end
before do
- group.add_users([user, user2], GroupMember::DEVELOPER)
+ group.add_members([user, user2], GroupMember::DEVELOPER)
end
it 'maps the importer' do
@@ -315,7 +315,7 @@ RSpec.describe Gitlab::ImportExport::MembersMapper do
shared_examples_for 'it fetches the access level from parent group' do
before do
- group.add_users([user], group_access_level)
+ group.add_members([user], group_access_level)
end
it "and resolves it correctly" do
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index d3397e89f1f..157cd408da9 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -383,21 +383,52 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
- it 'restores releases with links & milestones' do
- release = @project.releases.last
- link = release.links.last
+ context 'restores releases' do
+ it 'with links & milestones' do
+ release = @project.releases.last
+ link = release.links.last
+
+ aggregate_failures do
+ expect(release.tag).to eq('release-1.2')
+ expect(release.description).to eq('Some release notes')
+ expect(release.name).to eq('release-1.2')
+ expect(release.sha).to eq('903de3a8bd5573f4a049b1457d28bc1592ba6bf9')
+ expect(release.released_at).to eq('2019-12-27T10:17:14.615Z')
+ expect(release.milestone_releases.count).to eq(1)
+ expect(release.milestone_releases.first.milestone.title).to eq('test milestone')
+
+ expect(link.url).to eq('http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download')
+ expect(link.name).to eq('release-1.2.dmg')
+ end
+ end
- aggregate_failures do
- expect(release.tag).to eq('release-1.1')
- expect(release.description).to eq('Some release notes')
- expect(release.name).to eq('release-1.1')
- expect(release.sha).to eq('901de3a8bd5573f4a049b1457d28bc1592ba6bf9')
- expect(release.released_at).to eq('2019-12-26T10:17:14.615Z')
- expect(release.milestone_releases.count).to eq(1)
- expect(release.milestone_releases.first.milestone.title).to eq('test milestone')
-
- expect(link.url).to eq('http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download')
- expect(link.name).to eq('release-1.1.dmg')
+ context 'with author' do
+ it 'as ghost user when imported release author is empty' do
+ release = @project.releases.first
+
+ aggregate_failures do
+ expect(release.tag).to eq('release-1.0')
+ expect(release.author_id).to eq(User.select(:id).ghost.id)
+ end
+ end
+
+ it 'as existing member when imported release author is matched with existing user' do
+ release = @project.releases.second
+
+ aggregate_failures do
+ expect(release.tag).to eq('release-1.1')
+ expect(release.author_id).to eq(@existing_members.first.id)
+ end
+ end
+
+ it 'as import user when imported release author cannot be matched' do
+ release = @project.releases.last
+
+ aggregate_failures do
+ expect(release.tag).to eq('release-1.2')
+ expect(release.author_id).to eq(@user.id)
+ end
+ end
end
end
@@ -441,7 +472,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
it 'has a new CI build token' do
- expect(Ci::Build.where(token: 'abcd')).to be_empty
+ expect(Ci::Build.find_by_token('abcd')).to be_nil
end
end
@@ -568,20 +599,10 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
context 'when there is an existing build with build token' do
before do
- create(:ci_build, token: 'abcd')
- end
-
- it_behaves_like 'restores project successfully',
- issues: 1,
- labels: 2,
- label_with_priorities: 'A project label',
- milestones: 1,
- first_issue_labels: 1
- end
-
- context 'when there is an existing build with build token' do
- before do
- create(:ci_build, token: 'abcd')
+ create(:ci_build).tap do |job|
+ job.set_token('abcd')
+ job.save!
+ end
end
it_behaves_like 'restores project successfully',
@@ -885,7 +906,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
context 'with group visibility' do
before do
group = create(:group, visibility_level: group_visibility)
- group.add_users([user], GroupMember::MAINTAINER)
+ group.add_members([user], GroupMember::MAINTAINER)
project.update!(group: group)
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index d7f07a1eadf..bd60bb53d49 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -564,8 +564,6 @@ Project:
- suggestion_commit_message
- merge_commit_template
- squash_commit_template
-ProjectTracingSetting:
-- external_url
Author:
- name
ProjectFeature:
diff --git a/spec/lib/gitlab/issuable/clone/attributes_rewriter_spec.rb b/spec/lib/gitlab/issuable/clone/attributes_rewriter_spec.rb
new file mode 100644
index 00000000000..dbb753d5b9f
--- /dev/null
+++ b/spec/lib/gitlab/issuable/clone/attributes_rewriter_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Issuable::Clone::AttributesRewriter do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project1) { create(:project, :public, group: group) }
+ let_it_be(:project2) { create(:project, :public, group: group) }
+ let_it_be(:original_issue) { create(:issue, project: project1) }
+
+ let(:new_attributes) { described_class.new(user, original_issue, project2).execute }
+
+ context 'with missing target parent' do
+ it 'raises an ArgumentError' do
+ expect { described_class.new(user, original_issue, nil) }.to raise_error ArgumentError
+ end
+ end
+
+ context 'setting labels' do
+ it 'sets labels present in the new project and group labels' do
+ project1_label_1 = create(:label, title: 'label1', project: project1)
+ project1_label_2 = create(:label, title: 'label2', project: project1)
+ project2_label_1 = create(:label, title: 'label1', project: project2)
+ group_label = create(:group_label, title: 'group_label', group: group)
+ create(:label, title: 'label3', project: project2)
+
+ original_issue.update!(labels: [project1_label_1, project1_label_2, group_label])
+
+ expect(new_attributes[:label_ids]).to match_array([project2_label_1.id, group_label.id])
+ end
+
+ it 'does not set any labels when not used on the original issue' do
+ expect(new_attributes[:label_ids]).to be_empty
+ end
+ end
+
+ context 'setting milestones' do
+ it 'sets milestone to nil when old issue milestone is not in the new project' do
+ milestone = create(:milestone, title: 'milestone', project: project1)
+
+ original_issue.update!(milestone: milestone)
+
+ expect(new_attributes[:milestone_id]).to be_nil
+ end
+
+ it 'copies the milestone when old issue milestone title is in the new project' do
+ milestone_project1 = create(:milestone, title: 'milestone', project: project1)
+ milestone_project2 = create(:milestone, title: 'milestone', project: project2)
+
+ original_issue.update!(milestone: milestone_project1)
+
+ expect(new_attributes[:milestone_id]).to eq(milestone_project2.id)
+ end
+
+ it 'copies the milestone when old issue milestone is a group milestone' do
+ milestone = create(:milestone, title: 'milestone', group: group)
+
+ original_issue.update!(milestone: milestone)
+
+ expect(new_attributes[:milestone_id]).to eq(milestone.id)
+ end
+
+ context 'when include_milestone is false' do
+ let(:new_attributes) { described_class.new(user, original_issue, project2).execute(include_milestone: false) }
+
+ it 'does not return any milestone' do
+ milestone = create(:milestone, title: 'milestone', group: group)
+
+ original_issue.update!(milestone: milestone)
+
+ expect(new_attributes[:milestone_id]).to be_nil
+ end
+ end
+ end
+
+ context 'when target parent is a group' do
+ let(:new_attributes) { described_class.new(user, original_issue, group).execute }
+
+ context 'setting labels' do
+ let(:project_label1) { create(:label, title: 'label1', project: project1) }
+ let!(:project_label2) { create(:label, title: 'label2', project: project1) }
+ let(:group_label1) { create(:group_label, title: 'group_label', group: group) }
+ let!(:group_label2) { create(:group_label, title: 'label2', group: group) }
+
+ it 'keeps group labels and merges project labels where possible' do
+ original_issue.update!(labels: [project_label1, project_label2, group_label1])
+
+ expect(new_attributes[:label_ids]).to match_array([group_label1.id, group_label2.id])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/issuable/clone/copy_resource_events_service_spec.rb b/spec/lib/gitlab/issuable/clone/copy_resource_events_service_spec.rb
new file mode 100644
index 00000000000..1700939f49e
--- /dev/null
+++ b/spec/lib/gitlab/issuable/clone/copy_resource_events_service_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Issuable::Clone::CopyResourceEventsService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project1) { create(:project, :public, group: group) }
+ let_it_be(:project2) { create(:project, :public, group: group) }
+ let_it_be(:new_issue) { create(:issue, project: project2) }
+ let_it_be_with_reload(:original_issue) { create(:issue, project: project1) }
+
+ subject { described_class.new(user, original_issue, new_issue) }
+
+ it 'copies the resource label events' do
+ resource_label_events = create_list(:resource_label_event, 2, issue: original_issue)
+
+ subject.execute
+
+ expected = resource_label_events.map(&:label_id)
+
+ expect(new_issue.resource_label_events.map(&:label_id)).to match_array(expected)
+ end
+
+ context 'with existing milestone events' do
+ let!(:milestone1_project1) { create(:milestone, title: 'milestone1', project: project1) }
+ let!(:milestone2_project1) { create(:milestone, title: 'milestone2', project: project1) }
+ let!(:milestone3_project1) { create(:milestone, title: 'milestone3', project: project1) }
+
+ let!(:milestone1_project2) { create(:milestone, title: 'milestone1', project: project2) }
+ let!(:milestone2_project2) { create(:milestone, title: 'milestone2', project: project2) }
+
+ before do
+ original_issue.update!(milestone: milestone2_project1)
+
+ create_event(milestone1_project1)
+ create_event(milestone2_project1)
+ create_event(nil, 'remove')
+ create_event(milestone3_project1)
+ end
+
+ it 'copies existing resource milestone events' do
+ subject.execute
+
+ new_issue_milestone_events = new_issue.reload.resource_milestone_events
+ expect(new_issue_milestone_events.count).to eq(3)
+
+ expect_milestone_event(
+ new_issue_milestone_events.first, milestone: milestone1_project2, action: 'add', state: 'opened'
+ )
+ expect_milestone_event(
+ new_issue_milestone_events.second, milestone: milestone2_project2, action: 'add', state: 'opened'
+ )
+ expect_milestone_event(
+ new_issue_milestone_events.third, milestone: nil, action: 'remove', state: 'opened'
+ )
+ end
+
+ def create_event(milestone, action = 'add')
+ create(:resource_milestone_event, issue: original_issue, milestone: milestone, action: action)
+ end
+
+ def expect_milestone_event(event, expected_attrs)
+ expect(event.milestone_id).to eq(expected_attrs[:milestone]&.id)
+ expect(event.action).to eq(expected_attrs[:action])
+ expect(event.state).to eq(expected_attrs[:state])
+ end
+ end
+
+ context 'with existing state events' do
+ let!(:event1) { create(:resource_state_event, issue: original_issue, state: 'opened') }
+ let!(:event2) { create(:resource_state_event, issue: original_issue, state: 'closed') }
+ let!(:event3) { create(:resource_state_event, issue: original_issue, state: 'reopened') }
+
+ it 'copies existing state events as expected' do
+ subject.execute
+
+ state_events = new_issue.reload.resource_state_events
+ expect(state_events.size).to eq(3)
+
+ expect_state_event(state_events.first, issue: new_issue, state: 'opened')
+ expect_state_event(state_events.second, issue: new_issue, state: 'closed')
+ expect_state_event(state_events.third, issue: new_issue, state: 'reopened')
+ end
+
+ def expect_state_event(event, expected_attrs)
+ expect(event.issue_id).to eq(expected_attrs[:issue]&.id)
+ expect(event.state).to eq(expected_attrs[:state])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
index 198d2db234c..30ad24472b4 100644
--- a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::JiraImport::IssueSerializer do
let_it_be(:group_label) { create(:group_label, group: group, title: 'dev') }
let_it_be(:current_user) { create(:user) }
let_it_be(:user) { create(:user) }
+ let_it_be(:issue_type_id) { WorkItems::Type.default_issue_type.id }
let(:iid) { 5 }
let(:key) { 'PROJECT-5' }
@@ -54,7 +55,7 @@ RSpec.describe Gitlab::JiraImport::IssueSerializer do
let(:params) { { iid: iid } }
- subject { described_class.new(project, jira_issue, current_user.id, params).execute }
+ subject { described_class.new(project, jira_issue, current_user.id, issue_type_id, params).execute }
let(:expected_description) do
<<~MD
@@ -74,6 +75,7 @@ RSpec.describe Gitlab::JiraImport::IssueSerializer do
expect(subject).to eq(
iid: iid,
project_id: project.id,
+ namespace_id: project.project_namespace_id,
description: expected_description.strip,
title: "[#{key}] #{summary}",
state_id: 1,
@@ -81,7 +83,8 @@ RSpec.describe Gitlab::JiraImport::IssueSerializer do
created_at: created_at,
author_id: current_user.id,
assignee_ids: nil,
- label_ids: [project_label.id, group_label.id] + Label.reorder(id: :asc).last(2).pluck(:id)
+ label_ids: [project_label.id, group_label.id] + Label.reorder(id: :asc).last(2).pluck(:id),
+ work_item_type_id: issue_type_id
)
end
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index 565a9ad17e1..1bc052ee0b6 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
let_it_be(:project) { create(:project) }
let_it_be(:jira_import) { create(:jira_import_state, project: project, user: current_user) }
let_it_be(:jira_integration) { create(:jira_integration, project: project) }
+ let_it_be(:default_issue_type_id) { WorkItems::Type.default_issue_type.id }
subject { described_class.new(project) }
@@ -47,12 +48,22 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
count.times do |i|
if raise_exception_on_even_mocks && i.even?
- expect(Gitlab::JiraImport::IssueSerializer).to receive(:new)
- .with(project, jira_issues[i], current_user.id, { iid: next_iid + 1 }).and_raise('Some error')
+ expect(Gitlab::JiraImport::IssueSerializer).to receive(:new).with(
+ project,
+ jira_issues[i],
+ current_user.id,
+ default_issue_type_id,
+ { iid: next_iid + 1 }
+ ).and_raise('Some error')
else
next_iid += 1
- expect(Gitlab::JiraImport::IssueSerializer).to receive(:new)
- .with(project, jira_issues[i], current_user.id, { iid: next_iid }).and_return(serializer)
+ expect(Gitlab::JiraImport::IssueSerializer).to receive(:new).with(
+ project,
+ jira_issues[i],
+ current_user.id,
+ default_issue_type_id,
+ { iid: next_iid }
+ ).and_return(serializer)
end
end
end
diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb
index 58b05be6ff9..090b79c5d3c 100644
--- a/spec/lib/gitlab/lograge/custom_options_spec.rb
+++ b/spec/lib/gitlab/lograge/custom_options_spec.rb
@@ -25,7 +25,8 @@ RSpec.describe Gitlab::Lograge::CustomOptions do
remote_ip: '192.168.1.2',
ua: 'Nyxt',
queue_duration_s: 0.2,
- etag_route: '/etag'
+ etag_route: '/etag',
+ response_bytes: 1234
}
end
@@ -55,6 +56,20 @@ RSpec.describe Gitlab::Lograge::CustomOptions do
expect(subject[:user_id]).to eq('test')
end
+ it 'adds the response length' do
+ expect(subject[:response_bytes]).to eq(1234)
+ end
+
+ context 'with log_response_length disabled' do
+ before do
+ stub_feature_flags(log_response_length: false)
+ end
+
+ it 'does not add the response length' do
+ expect(subject).not_to include(:response_bytes)
+ end
+ end
+
it 'adds Cloudflare headers' do
expect(subject[:cf_ray]).to eq(event.payload[:cf_ray])
expect(subject[:cf_request_id]).to eq(event.payload[:cf_request_id])
diff --git a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
index d22bef5bda9..81910773dfa 100644
--- a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
attribute :author
attribute :project
+
+ before_validation -> { self.work_item_type_id = ::WorkItems::Type.default_issue_type.id }
end
end
diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb
new file mode 100644
index 00000000000..8b82078bcb9
--- /dev/null
+++ b/spec/lib/gitlab/memory/watchdog_spec.rb
@@ -0,0 +1,308 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
+ context 'watchdog' do
+ let(:logger) { instance_double(::Logger) }
+ let(:handler) { instance_double(described_class::NullHandler) }
+
+ let(:heap_frag_limit_gauge) { instance_double(::Prometheus::Client::Gauge) }
+ let(:heap_frag_violations_counter) { instance_double(::Prometheus::Client::Counter) }
+ let(:heap_frag_violations_handled_counter) { instance_double(::Prometheus::Client::Counter) }
+
+ let(:sleep_time) { 0.1 }
+ let(:max_heap_fragmentation) { 0.2 }
+
+ subject(:watchdog) do
+ described_class.new(handler: handler, logger: logger, sleep_time_seconds: sleep_time,
+ max_strikes: max_strikes, max_heap_fragmentation: max_heap_fragmentation)
+ end
+
+ before do
+ allow(handler).to receive(:on_high_heap_fragmentation).and_return(true)
+
+ allow(logger).to receive(:warn)
+ allow(logger).to receive(:info)
+
+ allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(fragmentation)
+ end
+
+ after do
+ watchdog.stop
+ end
+
+ context 'when starting up' do
+ let(:fragmentation) { 0 }
+ let(:max_strikes) { 0 }
+
+ it 'sets the heap fragmentation limit gauge' do
+ allow(Gitlab::Metrics).to receive(:gauge).and_return(heap_frag_limit_gauge)
+
+ expect(heap_frag_limit_gauge).to receive(:set).with({}, max_heap_fragmentation)
+ end
+
+ context 'when no settings are set in the environment' do
+ it 'initializes with defaults' do
+ watchdog = described_class.new(handler: handler, logger: logger)
+
+ expect(watchdog.max_heap_fragmentation).to eq(described_class::DEFAULT_HEAP_FRAG_THRESHOLD)
+ expect(watchdog.max_strikes).to eq(described_class::DEFAULT_MAX_STRIKES)
+ expect(watchdog.sleep_time_seconds).to eq(described_class::DEFAULT_SLEEP_TIME_SECONDS)
+ end
+ end
+
+ context 'when settings are passed through the environment' do
+ before do
+ stub_env('GITLAB_MEMWD_MAX_HEAP_FRAG', 1)
+ stub_env('GITLAB_MEMWD_MAX_STRIKES', 2)
+ stub_env('GITLAB_MEMWD_SLEEP_TIME_SEC', 3)
+ end
+
+ it 'initializes with these settings' do
+ watchdog = described_class.new(handler: handler, logger: logger)
+
+ expect(watchdog.max_heap_fragmentation).to eq(1)
+ expect(watchdog.max_strikes).to eq(2)
+ expect(watchdog.sleep_time_seconds).to eq(3)
+ end
+ end
+ end
+
+ context 'when process does not exceed heap fragmentation threshold' do
+ let(:fragmentation) { max_heap_fragmentation - 0.1 }
+ let(:max_strikes) { 0 } # To rule out that we were granting too many strikes.
+
+ it 'does not signal the handler' do
+ expect(handler).not_to receive(:on_high_heap_fragmentation)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+ end
+
+ context 'when process exceeds heap fragmentation threshold permanently' do
+ let(:fragmentation) { max_heap_fragmentation + 0.1 }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_memwd_heap_frag_violations_total, anything, anything)
+ .and_return(heap_frag_violations_counter)
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_memwd_heap_frag_violations_handled_total, anything, anything)
+ .and_return(heap_frag_violations_handled_counter)
+ allow(heap_frag_violations_counter).to receive(:increment)
+ allow(heap_frag_violations_handled_counter).to receive(:increment)
+ end
+
+ context 'when process has not exceeded allowed number of strikes' do
+ let(:max_strikes) { 10 }
+
+ it 'does not signal the handler' do
+ expect(handler).not_to receive(:on_high_heap_fragmentation)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+
+ it 'does not log any events' do
+ expect(logger).not_to receive(:warn)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+
+ it 'increments the violations counter' do
+ expect(heap_frag_violations_counter).to receive(:increment)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+
+ it 'does not increment violations handled counter' do
+ expect(heap_frag_violations_handled_counter).not_to receive(:increment)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+ end
+
+ context 'when process exceeds the allowed number of strikes' do
+ let(:max_strikes) { 1 }
+
+ it 'signals the handler and resets strike counter' do
+ expect(handler).to receive(:on_high_heap_fragmentation).and_return(true)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+
+ expect(watchdog.strikes).to eq(0)
+ end
+
+ it 'logs the event' do
+ expect(::Prometheus::PidProvider).to receive(:worker_id).at_least(:once).and_return('worker_1')
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).at_least(:once).and_return(1024)
+ expect(logger).to receive(:warn).with({
+ message: 'heap fragmentation limit exceeded',
+ pid: Process.pid,
+ worker_id: 'worker_1',
+ memwd_handler_class: 'RSpec::Mocks::InstanceVerifyingDouble',
+ memwd_sleep_time_s: sleep_time,
+ memwd_max_heap_frag: max_heap_fragmentation,
+ memwd_cur_heap_frag: fragmentation,
+ memwd_max_strikes: max_strikes,
+ memwd_cur_strikes: max_strikes + 1,
+ memwd_rss_bytes: 1024
+ })
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+
+ it 'increments both the violations and violations handled counters' do
+ expect(heap_frag_violations_counter).to receive(:increment)
+ expect(heap_frag_violations_handled_counter).to receive(:increment)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+
+ context 'when enforce_memory_watchdog ops toggle is off' do
+ before do
+ stub_feature_flags(enforce_memory_watchdog: false)
+ end
+
+ it 'always uses the NullHandler' do
+ expect(handler).not_to receive(:on_high_heap_fragmentation)
+ expect(described_class::NullHandler.instance).to(
+ receive(:on_high_heap_fragmentation).with(fragmentation).and_return(true)
+ )
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+ end
+ end
+
+ context 'when handler result is true' do
+ let(:max_strikes) { 1 }
+
+ it 'considers the event handled and stops itself' do
+ expect(handler).to receive(:on_high_heap_fragmentation).once.and_return(true)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+ end
+
+ context 'when handler result is false' do
+ let(:max_strikes) { 1 }
+
+ it 'keeps running' do
+ # Return true the third time to terminate the daemon.
+ expect(handler).to receive(:on_high_heap_fragmentation).and_return(false, false, true)
+
+ watchdog.start
+
+ sleep sleep_time * 4
+ end
+ end
+ end
+
+ context 'when process exceeds heap fragmentation threshold temporarily' do
+ let(:fragmentation) { max_heap_fragmentation }
+ let(:max_strikes) { 1 }
+
+ before do
+ allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(
+ fragmentation - 0.1,
+ fragmentation + 0.2,
+ fragmentation - 0.1,
+ fragmentation + 0.1
+ )
+ end
+
+ it 'does not signal the handler' do
+ expect(handler).not_to receive(:on_high_heap_fragmentation)
+
+ watchdog.start
+
+ sleep sleep_time * 4
+ end
+ end
+
+ context 'when gitlab_memory_watchdog ops toggle is off' do
+ let(:fragmentation) { 0 }
+ let(:max_strikes) { 0 }
+
+ before do
+ stub_feature_flags(gitlab_memory_watchdog: false)
+ end
+
+ it 'does not monitor heap fragmentation' do
+ expect(Gitlab::Metrics::Memory).not_to receive(:gc_heap_fragmentation)
+
+ watchdog.start
+
+ sleep sleep_time * 3
+ end
+ end
+ end
+
+ context 'handlers' do
+ context 'NullHandler' do
+ subject(:handler) { described_class::NullHandler.instance }
+
+ describe '#on_high_heap_fragmentation' do
+ it 'does nothing' do
+ expect(handler.on_high_heap_fragmentation(1.0)).to be(false)
+ end
+ end
+ end
+
+ context 'TermProcessHandler' do
+ subject(:handler) { described_class::TermProcessHandler.new(42) }
+
+ describe '#on_high_heap_fragmentation' do
+ it 'sends SIGTERM to the current process' do
+ expect(Process).to receive(:kill).with(:TERM, 42)
+
+ expect(handler.on_high_heap_fragmentation(1.0)).to be(true)
+ end
+ end
+ end
+
+ context 'PumaHandler' do
+ # rubocop: disable RSpec/VerifiedDoubles
+ # In tests, the Puma constant is not loaded so we cannot make this an instance_double.
+ let(:puma_worker_handle_class) { double('Puma::Cluster::WorkerHandle') }
+ let(:puma_worker_handle) { double('worker') }
+ # rubocop: enable RSpec/VerifiedDoubles
+
+ subject(:handler) { described_class::PumaHandler.new({}) }
+
+ before do
+ stub_const('::Puma::Cluster::WorkerHandle', puma_worker_handle_class)
+ end
+
+ describe '#on_high_heap_fragmentation' do
+ it 'invokes orderly termination via Puma API' do
+ expect(puma_worker_handle_class).to receive(:new).and_return(puma_worker_handle)
+ expect(puma_worker_handle).to receive(:term)
+
+ expect(handler.on_high_heap_fragmentation(1.0)).to be(true)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
index 66fba7ab683..dc5c7eb2e55 100644
--- a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
allow(settings).to receive(:enabled).and_return(true)
allow(settings).to receive(:port).and_return(0)
allow(settings).to receive(:address).and_return('127.0.0.1')
+ allow(settings).to receive(:[]).with('tls_enabled').and_return(false)
end
after do
@@ -88,6 +89,51 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
exporter
end
end
+
+ context 'with TLS enabled' do
+ let(:test_cert) { Rails.root.join('spec/fixtures/x509_certificate.crt').to_s }
+ let(:test_key) { Rails.root.join('spec/fixtures/x509_certificate_pk.key').to_s }
+
+ before do
+ allow(settings).to receive(:[]).with('tls_enabled').and_return(true)
+ allow(settings).to receive(:[]).with('tls_cert_path').and_return(test_cert)
+ allow(settings).to receive(:[]).with('tls_key_path').and_return(test_key)
+ end
+
+ it 'injects the necessary OpenSSL config for WEBrick' do
+ expect(::WEBrick::HTTPServer).to receive(:new).with(
+ a_hash_including(
+ SSLEnable: true,
+ SSLCertificate: an_instance_of(OpenSSL::X509::Certificate),
+ SSLPrivateKey: an_instance_of(OpenSSL::PKey::RSA),
+ SSLStartImmediately: true,
+ SSLExtraChainCert: []
+ ))
+
+ exporter.start
+ end
+
+ context 'with intermediate certificates' do
+ let(:test_cert) { Rails.root.join('spec/fixtures/clusters/chain_certificates.pem').to_s }
+ let(:test_key) { Rails.root.join('spec/fixtures/clusters/sample_key.key').to_s }
+
+ it 'injects them in the extra chain' do
+ expect(::WEBrick::HTTPServer).to receive(:new).with(
+ a_hash_including(
+ SSLEnable: true,
+ SSLCertificate: an_instance_of(OpenSSL::X509::Certificate),
+ SSLPrivateKey: an_instance_of(OpenSSL::PKey::RSA),
+ SSLStartImmediately: true,
+ SSLExtraChainCert: [
+ an_instance_of(OpenSSL::X509::Certificate),
+ an_instance_of(OpenSSL::X509::Certificate)
+ ]
+ ))
+
+ exporter.start
+ end
+ end
+ end
end
describe 'when thread is not alive' do
@@ -159,6 +205,7 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
allow(settings).to receive(:enabled).and_return(true)
allow(settings).to receive(:port).and_return(0)
allow(settings).to receive(:address).and_return('127.0.0.1')
+ allow(settings).to receive(:[]).with('tls_enabled').and_return(false)
stub_const('Gitlab::Metrics::Exporter::MetricsMiddleware', fake_collector)
diff --git a/spec/lib/gitlab/metrics/memory_spec.rb b/spec/lib/gitlab/metrics/memory_spec.rb
new file mode 100644
index 00000000000..fd8ca3b37c6
--- /dev/null
+++ b/spec/lib/gitlab/metrics/memory_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Metrics::Memory do
+ describe '.gc_heap_fragmentation' do
+ subject(:call) do
+ described_class.gc_heap_fragmentation(
+ heap_live_slots: gc_stat_heap_live_slots,
+ heap_eden_pages: gc_stat_heap_eden_pages
+ )
+ end
+
+ context 'when the Ruby heap is perfectly utilized' do
+ # All objects are located in a single heap page.
+ let(:gc_stat_heap_live_slots) { described_class::HEAP_SLOTS_PER_PAGE }
+ let(:gc_stat_heap_eden_pages) { 1 }
+
+ it { is_expected.to eq(0) }
+ end
+
+ context 'when the Ruby heap is greatly fragmented' do
+ # There is one object per heap page.
+ let(:gc_stat_heap_live_slots) { described_class::HEAP_SLOTS_PER_PAGE }
+ let(:gc_stat_heap_eden_pages) { described_class::HEAP_SLOTS_PER_PAGE }
+
+ # The heap can never be "perfectly fragmented" because that would require
+ # zero objects per page.
+ it { is_expected.to be > 0.99 }
+ end
+
+ context 'when the Ruby heap is semi-fragmented' do
+ # All objects are spread over two pages i.e. each page is 50% utilized.
+ let(:gc_stat_heap_live_slots) { described_class::HEAP_SLOTS_PER_PAGE }
+ let(:gc_stat_heap_eden_pages) { 2 }
+
+ it { is_expected.to eq(0.5) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index dfae5aa6784..b1566ffa7b4 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -125,5 +125,11 @@ RSpec.describe Gitlab::Metrics::Samplers::RubySampler do
sampler.sample
end
+
+ it 'adds a heap fragmentation metric' do
+ expect(sampler.metrics[:heap_fragmentation]).to receive(:set).with({}, anything)
+
+ sampler.sample
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/sli_spec.rb b/spec/lib/gitlab/metrics/sli_spec.rb
index 102ea442b3a..d100f66be19 100644
--- a/spec/lib/gitlab/metrics/sli_spec.rb
+++ b/spec/lib/gitlab/metrics/sli_spec.rb
@@ -172,11 +172,11 @@ RSpec.describe Gitlab::Metrics::Sli do
fake_counter
end
- def fake_total_counter(name)
- fake_prometheus_counter("gitlab_sli:#{name}:total")
+ def fake_total_counter(name, separator = '_')
+ fake_prometheus_counter(['gitlab_sli', name, 'total'].join(separator))
end
- def fake_numerator_counter(name, numerator_name)
- fake_prometheus_counter("gitlab_sli:#{name}:#{numerator_name}_total")
+ def fake_numerator_counter(name, numerator_name, separator = '_')
+ fake_prometheus_counter(["gitlab_sli", name, "#{numerator_name}_total"].join(separator))
end
end
diff --git a/spec/lib/gitlab/pages/cache_control_spec.rb b/spec/lib/gitlab/pages/cache_control_spec.rb
new file mode 100644
index 00000000000..6ed823427fb
--- /dev/null
+++ b/spec/lib/gitlab/pages/cache_control_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pages::CacheControl do
+ it 'fails with invalid type' do
+ expect { described_class.new(type: :unknown, id: nil) }
+ .to raise_error(ArgumentError, "type must be :namespace or :project")
+ end
+
+ describe '.for_namespace' do
+ let(:subject) { described_class.for_namespace(1) }
+
+ it { expect(subject.cache_key).to eq('pages_domain_for_namespace_1') }
+
+ describe '#clear_cache' do
+ it 'clears the cache' do
+ expect(Rails.cache)
+ .to receive(:delete)
+ .with('pages_domain_for_namespace_1')
+
+ subject.clear_cache
+ end
+ end
+ end
+
+ describe '.for_project' do
+ let(:subject) { described_class.for_project(1) }
+
+ it { expect(subject.cache_key).to eq('pages_domain_for_project_1') }
+
+ describe '#clear_cache' do
+ it 'clears the cache' do
+ expect(Rails.cache)
+ .to receive(:delete)
+ .with('pages_domain_for_project_1')
+
+ subject.clear_cache
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pages/deployment_update_spec.rb b/spec/lib/gitlab/pages/deployment_update_spec.rb
new file mode 100644
index 00000000000..cf109248f36
--- /dev/null
+++ b/spec/lib/gitlab/pages/deployment_update_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pages::DeploymentUpdate do
+ let_it_be(:project, refind: true) { create(:project, :repository) }
+
+ let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+
+ let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
+ let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
+
+ let(:file) { fixture_file_upload("spec/fixtures/pages.zip") }
+ let(:empty_file) { fixture_file_upload("spec/fixtures/pages_empty.zip") }
+ let(:empty_metadata_filename) { "spec/fixtures/pages_empty.zip.meta" }
+ let(:metadata_filename) { "spec/fixtures/pages.zip.meta" }
+ let(:metadata) { fixture_file_upload(metadata_filename) if File.exist?(metadata_filename) }
+
+ subject(:pages_deployment_update) { described_class.new(project, build) }
+
+ context 'for new artifacts' do
+ context 'for a valid job' do
+ let!(:artifacts_archive) { create(:ci_job_artifact, :correct_checksum, file: file, job: build) }
+
+ before do
+ create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
+
+ build.reload
+ end
+
+ it 'is valid' do
+ expect(pages_deployment_update).to be_valid
+ end
+
+ context 'when missing artifacts metadata' do
+ before do
+ expect(build).to receive(:artifacts_metadata?).and_return(false)
+ end
+
+ it 'is invalid' do
+ expect(pages_deployment_update).not_to be_valid
+ expect(pages_deployment_update.errors.full_messages).to include('missing artifacts metadata')
+ end
+ end
+ end
+
+ it 'is invalid for invalid archive' do
+ create(:ci_job_artifact, :archive, file: invalid_file, job: build)
+
+ expect(pages_deployment_update).not_to be_valid
+ expect(pages_deployment_update.errors.full_messages).to include('missing artifacts metadata')
+ end
+ end
+
+ describe 'maximum pages artifacts size' do
+ let(:metadata) { spy('metadata') } # rubocop: disable RSpec/VerifiedDoubles
+
+ before do
+ file = fixture_file_upload('spec/fixtures/pages.zip')
+ metafile = fixture_file_upload('spec/fixtures/pages.zip.meta')
+
+ create(:ci_job_artifact, :archive, :correct_checksum, file: file, job: build)
+ create(:ci_job_artifact, :metadata, file: metafile, job: build)
+
+ allow(build).to receive(:artifacts_metadata_entry)
+ .and_return(metadata)
+ end
+
+ context 'when maximum pages size is set to zero' do
+ before do
+ stub_application_setting(max_pages_size: 0)
+ end
+
+ context "when size is above the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(1.megabyte)
+ allow(metadata).to receive(:entries).and_return([])
+ end
+
+ it 'is valid' do
+ expect(pages_deployment_update).to be_valid
+ end
+ end
+ end
+
+ context 'when size is limited on the instance level' do
+ before do
+ stub_application_setting(max_pages_size: 100)
+ end
+
+ context "when size is below the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(1.megabyte)
+ allow(metadata).to receive(:entries).and_return([])
+ end
+
+ it 'is valid' do
+ expect(pages_deployment_update).to be_valid
+ end
+ end
+
+ context "when size is above the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(101.megabyte)
+ allow(metadata).to receive(:entries).and_return([])
+ end
+
+ it 'is invalid' do
+ expect(pages_deployment_update).not_to be_valid
+ expect(pages_deployment_update.errors.full_messages)
+ .to include('artifacts for pages are too large: 105906176')
+ end
+ end
+ end
+ end
+
+ context 'when retrying the job' do
+ let!(:older_deploy_job) do
+ create(
+ :generic_commit_status,
+ :failed,
+ pipeline: pipeline,
+ ref: build.ref,
+ stage: 'deploy',
+ name: 'pages:deploy'
+ )
+ end
+
+ before do
+ create(:ci_job_artifact, :correct_checksum, file: file, job: build)
+ create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
+ build.reload
+ end
+
+ it 'marks older pages:deploy jobs retried' do
+ expect(pages_deployment_update).to be_valid
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index ac2695977c4..879c874b134 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -15,6 +15,22 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
end
end
+ describe '.enforced_for_type?' do
+ subject { described_class.enforced_for_type?(relation) }
+
+ context 'when relation is Group' do
+ let(:relation) { Group.all }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when relation is AuditEvent' do
+ let(:relation) { AuditEvent.all }
+
+ it { is_expected.to be false }
+ end
+ end
+
describe '.available?' do
let(:request_context) { double('request_context', params: { order_by: order_by, sort: sort }) }
let(:cursor_based_request_context) { Gitlab::Pagination::Keyset::CursorBasedRequestContext.new(request_context) }
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index abbb3a21cd4..c1fc73603d6 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -680,4 +680,28 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
end
end
+
+ describe '#attribute_names' do
+ let(:expected_attribute_names) { %w(id name) }
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'name',
+ order_expression: Project.arel_table['name'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ subject { order.attribute_names }
+
+ it { is_expected.to match_array(expected_attribute_names) }
+ end
end
diff --git a/spec/lib/gitlab/quick_actions/users_extractor_spec.rb b/spec/lib/gitlab/quick_actions/users_extractor_spec.rb
new file mode 100644
index 00000000000..d00f52bb056
--- /dev/null
+++ b/spec/lib/gitlab/quick_actions/users_extractor_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::QuickActions::UsersExtractor do
+ subject(:extractor) { described_class.new(current_user, project: project, group: group, target: target, text: text) }
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:target) { create(:issue, project: project) }
+
+ let_it_be(:pancakes) { create(:user, username: 'pancakes') }
+ let_it_be(:waffles) { create(:user, username: 'waffles') }
+ let_it_be(:syrup) { create(:user, username: 'syrup') }
+
+ before do
+ allow(target).to receive(:allows_multiple_assignees?).and_return(false)
+ end
+
+ context 'when the text is nil' do
+ let(:text) { nil }
+
+ it 'returns an empty array' do
+ expect(extractor.execute).to be_empty
+ end
+ end
+
+ context 'when the text is blank' do
+ let(:text) { ' ' }
+
+ it 'returns an empty array' do
+ expect(extractor.execute).to be_empty
+ end
+ end
+
+ context 'when there are users to be found' do
+ context 'when using usernames' do
+ let(:text) { 'me, pancakes waffles and syrup' }
+
+ it 'finds the users' do
+ expect(extractor.execute).to contain_exactly(current_user, pancakes, waffles, syrup)
+ end
+ end
+
+ context 'when there are too many users' do
+ let(:text) { 'me, pancakes waffles and syrup' }
+
+ before do
+ stub_const("#{described_class}::MAX_QUICK_ACTION_USERS", 2)
+ end
+
+ it 'complains' do
+ expect { extractor.execute }.to raise_error(described_class::TooManyError)
+ end
+ end
+
+ context 'when using references' do
+ let(:text) { 'me, @pancakes @waffles and @syrup' }
+
+ it 'finds the users' do
+ expect(extractor.execute).to contain_exactly(current_user, pancakes, waffles, syrup)
+ end
+ end
+
+ context 'when using a mixture of usernames and references' do
+ let(:text) { 'me, @pancakes waffles and @syrup' }
+
+ it 'finds the users' do
+ expect(extractor.execute).to contain_exactly(current_user, pancakes, waffles, syrup)
+ end
+ end
+
+ context 'when one or more users cannot be found' do
+ let(:text) { 'me, @bacon @pancakes, chicken waffles and @syrup' }
+
+ it 'reports an error' do
+ expect { extractor.execute }.to raise_error(described_class::MissingError, include('bacon', 'chicken'))
+ end
+ end
+
+ context 'when trying to find group members' do
+ let(:group) { create(:group, path: 'breakfast-foods') }
+ let(:text) { group.to_reference }
+
+ it 'reports an error' do
+ [pancakes, waffles].each { group.add_developer(_1) }
+
+ expect { extractor.execute }.to raise_error(described_class::MissingError, include('breakfast-foods'))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index e127c89c303..50ebf43a05e 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -507,7 +507,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
secondary_store.flushdb
end
- describe "command execution in a transaction" do
+ describe "command execution in a pipelined command" do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
before do
@@ -557,7 +557,15 @@ RSpec.describe Gitlab::Redis::MultiStore do
include_examples 'verify that store contains values', :secondary_store
end
- describe 'return values from a transaction' do
+ describe 'return values from a pipelined command' do
+ RSpec::Matchers.define :pipeline_diff_error_with_stacktrace do |message|
+ match do |object|
+ expect(object).to be_a(Gitlab::Redis::MultiStore::PipelinedDiffError)
+ expect(object.backtrace).not_to be_nil
+ expect(object.message).to eq(message)
+ end
+ end
+
subject do
multi_store.send(name) do |redis|
redis.get(key1)
@@ -585,7 +593,10 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'returns the value from the secondary store, logging an error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ pipeline_diff_error_with_stacktrace(
+ 'Pipelined command executed on both stores successfully but results differ between them. ' \
+ "Result from the primary: [#{value1.inspect}]. Result from the secondary: [#{value2.inspect}]."
+ ),
hash_including(command_name: name, instance_name: instance_name)
).and_call_original
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
@@ -601,7 +612,10 @@ RSpec.describe Gitlab::Redis::MultiStore do
it 'returns the value from the secondary store, logging an error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ pipeline_diff_error_with_stacktrace(
+ 'Pipelined command executed on both stores successfully but results differ between them. ' \
+ "Result from the primary: [nil]. Result from the secondary: [#{value2.inspect}]."
+ ),
hash_including(command_name: name, instance_name: instance_name)
)
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index d48e8183650..a3afbed18e2 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -968,4 +968,18 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('abc!abc') }
it { is_expected.not_to match((['abc'] * 100).join('.') + '!') }
end
+
+ describe '.x509_subject_key_identifier_regex' do
+ subject { described_class.x509_subject_key_identifier_regex }
+
+ it { is_expected.to match('AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB') }
+ it { is_expected.to match('CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD') }
+ it { is_expected.to match('79:FB:C1:E5:6B:53:8B:0A') }
+ it { is_expected.to match('79:fb:c1:e5:6b:53:8b:0a') }
+
+ it { is_expected.not_to match('') }
+ it { is_expected.not_to match('CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:GG') }
+ it { is_expected.not_to match('random string') }
+ it { is_expected.not_to match('12321342545356434523412341245452345623453542345234523453245') }
+ end
end
diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb
index 1760796c5a0..774a362617a 100644
--- a/spec/lib/gitlab/security/scan_configuration_spec.rb
+++ b/spec/lib/gitlab/security/scan_configuration_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
let(:configured) { true }
context 'with a core scanner' do
- where(type: %i(sast sast_iac secret_detection))
+ where(type: %i(sast sast_iac secret_detection container_scanning))
with_them do
it { is_expected.to be_truthy }
diff --git a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
index 4a952a2040a..01b7270d761 100644
--- a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
@@ -129,7 +129,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
allow(Sidekiq).to receive(:options).and_return(timeout: 9)
end
- it 'return true when everything is within limit' do
+ it 'return true when everything is within limit', :aggregate_failures do
expect(memory_killer).to receive(:get_rss).and_return(100)
expect(memory_killer).to receive(:get_soft_limit_rss).and_return(200)
expect(memory_killer).to receive(:get_hard_limit_rss).and_return(300)
@@ -144,7 +144,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(subject).to be true
end
- it 'return false when rss exceeds hard_limit_rss' do
+ it 'return false when rss exceeds hard_limit_rss', :aggregate_failures do
expect(memory_killer).to receive(:get_rss).at_least(:once).and_return(400)
expect(memory_killer).to receive(:get_soft_limit_rss).at_least(:once).and_return(200)
expect(memory_killer).to receive(:get_hard_limit_rss).at_least(:once).and_return(300)
@@ -159,12 +159,12 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original
- expect(memory_killer).to receive(:log_rss_out_of_range).with(400, 300, 200)
+ expect(memory_killer).to receive(:out_of_range_description).with(400, 300, 200, true)
expect(subject).to be false
end
- it 'return false when rss exceed hard_limit_rss after a while' do
+ it 'return false when rss exceed hard_limit_rss after a while', :aggregate_failures do
expect(memory_killer).to receive(:get_rss).and_return(250, 400, 400)
expect(memory_killer).to receive(:get_soft_limit_rss).at_least(:once).and_return(200)
expect(memory_killer).to receive(:get_hard_limit_rss).at_least(:once).and_return(300)
@@ -180,12 +180,13 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(Gitlab::Metrics::System).to receive(:monotonic_time).twice.and_call_original
expect(memory_killer).to receive(:sleep).with(check_interval_seconds)
- expect(memory_killer).to receive(:log_rss_out_of_range).with(400, 300, 200)
+ expect(memory_killer).to receive(:out_of_range_description).with(400, 300, 200, false)
+ expect(memory_killer).to receive(:out_of_range_description).with(400, 300, 200, true)
expect(subject).to be false
end
- it 'return true when rss below soft_limit_rss after a while within GRACE_BALLOON_SECONDS' do
+ it 'return true when rss below soft_limit_rss after a while within GRACE_BALLOON_SECONDS', :aggregate_failures do
expect(memory_killer).to receive(:get_rss).and_return(250, 100)
expect(memory_killer).to receive(:get_soft_limit_rss).and_return(200, 200)
expect(memory_killer).to receive(:get_hard_limit_rss).and_return(300, 300)
@@ -201,15 +202,15 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(Gitlab::Metrics::System).to receive(:monotonic_time).twice.and_call_original
expect(memory_killer).to receive(:sleep).with(check_interval_seconds)
- expect(memory_killer).not_to receive(:log_rss_out_of_range)
+ expect(memory_killer).to receive(:out_of_range_description).with(100, 300, 200, false)
expect(subject).to be true
end
- context 'when exceeding GRACE_BALLOON_SECONDS' do
+ context 'when exceeds GRACE_BALLOON_SECONDS' do
let(:grace_balloon_seconds) { 0 }
- it 'return false when rss exceed soft_limit_rss' do
+ it 'return false when rss exceed soft_limit_rss', :aggregate_failures do
allow(memory_killer).to receive(:get_rss).and_return(250)
allow(memory_killer).to receive(:get_soft_limit_rss).and_return(200)
allow(memory_killer).to receive(:get_hard_limit_rss).and_return(300)
@@ -222,8 +223,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
.with(:above_soft_limit)
.and_call_original
- expect(memory_killer).to receive(:log_rss_out_of_range)
- .with(250, 300, 200)
+ expect(memory_killer).to receive(:out_of_range_description).with(250, 300, 200, true)
expect(subject).to be false
end
@@ -318,7 +318,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
subject { memory_killer.send(:signal_pgroup, signal, explanation) }
- it 'send signal to this proces if it is not group leader' do
+ it 'send signal to this process if it is not group leader' do
expect(Process).to receive(:getpgrp).and_return(pid + 1)
expect(Sidekiq.logger).to receive(:warn).once
@@ -351,12 +351,34 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
let(:current_rss) { 100 }
let(:soft_limit_rss) { 200 }
let(:hard_limit_rss) { 300 }
+ let(:jid) { 1 }
let(:reason) { 'rss out of range reason description' }
+ let(:queue) { 'default' }
+ let(:running_jobs) { [{ jid: jid, worker_class: 'DummyWorker' }] }
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ before do
+ stub_const("DummyWorker", worker)
+
+ allow(memory_killer).to receive(:get_rss).and_return(*current_rss)
+ allow(memory_killer).to receive(:get_soft_limit_rss).and_return(soft_limit_rss)
+ allow(memory_killer).to receive(:get_hard_limit_rss).and_return(hard_limit_rss)
+
+ memory_killer.send(:refresh_state, :running)
+ end
- subject { memory_killer.send(:log_rss_out_of_range, current_rss, hard_limit_rss, soft_limit_rss) }
+ subject { memory_killer.send(:log_rss_out_of_range) }
it 'invoke sidekiq logger warn' do
- expect(memory_killer).to receive(:out_of_range_description).with(current_rss, hard_limit_rss, soft_limit_rss).and_return(reason)
+ expect(memory_killer).to receive(:out_of_range_description).with(current_rss, hard_limit_rss, soft_limit_rss, true).and_return(reason)
expect(Sidekiq.logger).to receive(:warn)
.with(
class: described_class.to_s,
@@ -365,9 +387,12 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
current_rss: current_rss,
hard_limit_rss: hard_limit_rss,
soft_limit_rss: soft_limit_rss,
- reason: reason)
+ reason: reason,
+ running_jobs: running_jobs)
- subject
+ Gitlab::SidekiqDaemon::Monitor.instance.within_job(DummyWorker, jid, queue) do
+ subject
+ end
end
end
@@ -375,8 +400,9 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
let(:hard_limit) { 300 }
let(:soft_limit) { 200 }
let(:grace_balloon_seconds) { 12 }
+ let(:deadline_exceeded) { true }
- subject { memory_killer.send(:out_of_range_description, rss, hard_limit, soft_limit) }
+ subject { memory_killer.send(:out_of_range_description, rss, hard_limit, soft_limit, deadline_exceeded) }
context 'when rss > hard_limit' do
let(:rss) { 400 }
@@ -389,9 +415,20 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
context 'when rss <= hard_limit' do
let(:rss) { 300 }
- it 'tells reason' do
- stub_const("#{described_class}::GRACE_BALLOON_SECONDS", grace_balloon_seconds)
- expect(subject).to eq("current_rss(#{rss}) > soft_limit_rss(#{soft_limit}) longer than GRACE_BALLOON_SECONDS(#{grace_balloon_seconds})")
+ context 'deadline exceeded' do
+ let(:deadline_exceeded) { true }
+
+ it 'tells reason' do
+ stub_const("#{described_class}::GRACE_BALLOON_SECONDS", grace_balloon_seconds)
+ expect(subject).to eq("current_rss(#{rss}) > soft_limit_rss(#{soft_limit}) longer than GRACE_BALLOON_SECONDS(#{grace_balloon_seconds})")
+ end
+ end
+ context 'deadline not exceeded' do
+ let(:deadline_exceeded) { false }
+
+ it 'tells reason' do
+ expect(subject).to eq("current_rss(#{rss}) > soft_limit_rss(#{soft_limit})")
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 7d31979a393..117b37ffda3 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -169,6 +169,16 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
subject.call(worker, job, :test) { nil }
end
end
+
+ context 'when job is interrupted' do
+ let(:job) { { 'interrupted_count' => 1 } }
+
+ it 'sets sidekiq_jobs_interrupted_total metric' do
+ expect(interrupted_total_metric).to receive(:increment)
+
+ subject.call(worker, job, :test) { nil }
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
index 2b94eaa2db9..2554a15d97e 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
@@ -5,46 +5,83 @@ require 'spec_helper'
RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
include StubENV
+ let(:snowplow_micro_settings) do
+ {
+ enabled: true,
+ address: address
+ }
+ end
+
+ let(:address) { "gdk.test:9091" }
+
before do
- stub_application_setting(snowplow_enabled: true)
- stub_env('SNOWPLOW_MICRO_ENABLE', '1')
allow(Rails.env).to receive(:development?).and_return(true)
end
describe '#hostname' do
- context 'when SNOWPLOW_MICRO_URI is set' do
+ context 'when snowplow_micro config is set' do
+ let(:address) { '127.0.0.1:9091' }
+
before do
- stub_env('SNOWPLOW_MICRO_URI', 'http://gdk.test:9091')
+ stub_config(snowplow_micro: snowplow_micro_settings)
end
- it 'returns hostname URI part' do
- expect(subject.hostname).to eq('gdk.test:9091')
+ it 'returns proper URI' do
+ expect(subject.hostname).to eq('127.0.0.1:9091')
+ expect(subject.uri.scheme).to eq('http')
+ end
+
+ context 'when gitlab config has https scheme' do
+ before do
+ stub_config_setting(https: true)
+ end
+
+ it 'returns proper URI' do
+ expect(subject.hostname).to eq('127.0.0.1:9091')
+ expect(subject.uri.scheme).to eq('https')
+ end
end
end
- context 'when SNOWPLOW_MICRO_URI is without protocol' do
+ context 'when snowplow_micro config is not set' do
before do
- stub_env('SNOWPLOW_MICRO_URI', 'gdk.test:9091')
+ allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
end
- it 'returns hostname URI part' do
- expect(subject.hostname).to eq('gdk.test:9091')
+ context 'when SNOWPLOW_MICRO_URI has scheme and port' do
+ before do
+ stub_env('SNOWPLOW_MICRO_URI', 'http://gdk.test:9091')
+ end
+
+ it 'returns hostname URI part' do
+ expect(subject.hostname).to eq('gdk.test:9091')
+ end
end
- end
- context 'when SNOWPLOW_MICRO_URI is hostname only' do
- before do
- stub_env('SNOWPLOW_MICRO_URI', 'uriwithoutport')
+ context 'when SNOWPLOW_MICRO_URI is without protocol' do
+ before do
+ stub_env('SNOWPLOW_MICRO_URI', 'gdk.test:9091')
+ end
+
+ it 'returns hostname URI part' do
+ expect(subject.hostname).to eq('gdk.test:9091')
+ end
end
- it 'returns hostname URI with default HTTP port' do
- expect(subject.hostname).to eq('uriwithoutport:80')
+ context 'when SNOWPLOW_MICRO_URI is hostname only' do
+ before do
+ stub_env('SNOWPLOW_MICRO_URI', 'uriwithoutport')
+ end
+
+ it 'returns hostname URI with default HTTP port' do
+ expect(subject.hostname).to eq('uriwithoutport:80')
+ end
end
- end
- context 'when SNOWPLOW_MICRO_URI is not set' do
- it 'returns localhost hostname' do
- expect(subject.hostname).to eq('localhost:9090')
+ context 'when SNOWPLOW_MICRO_URI is not set' do
+ it 'returns localhost hostname' do
+ expect(subject.hostname).to eq('localhost:9090')
+ end
end
end
end
@@ -53,7 +90,7 @@ RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
let_it_be(:group) { create :group }
before do
- stub_env('SNOWPLOW_MICRO_URI', 'http://gdk.test:9091')
+ stub_config(snowplow_micro: snowplow_micro_settings)
end
it 'includes protocol with the correct value' do
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
index 06cc2d3800c..1d4725cf405 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
+RSpec.describe Gitlab::Tracking::Destinations::Snowplow, :do_not_stub_snowplow_by_default do
let(:emitter) { SnowplowTracker::Emitter.new('localhost', buffer_size: 1) }
let(:tracker) { SnowplowTracker::Tracker.new(emitter, SnowplowTracker::Subject.new, 'namespace', 'app_id') }
diff --git a/spec/lib/gitlab/tracking/incident_management_spec.rb b/spec/lib/gitlab/tracking/incident_management_spec.rb
index fbcb9bf3e4c..ef7816aa0db 100644
--- a/spec/lib/gitlab/tracking/incident_management_spec.rb
+++ b/spec/lib/gitlab/tracking/incident_management_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::Tracking::IncidentManagement do
described_class.track_from_params(params)
end
- context 'known params' do
+ context 'known params', :do_not_stub_snowplow_by_default do
known_params = described_class.tracking_keys
known_params.each do |key, values|
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index 508b33949a8..cfb83bc0528 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -93,30 +93,11 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
context 'with incorrect argument type' do
- context 'when standard_context_type_check FF is disabled' do
- before do
- stub_feature_flags(standard_context_type_check: false)
- end
-
- subject { described_class.new(project: create(:group)) }
-
- it 'does not call `track_and_raise_for_dev_exception`' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
- snowplow_context
- end
- end
+ subject { described_class.new(project: create(:group)) }
- context 'when standard_context_type_check FF is enabled' do
- before do
- stub_feature_flags(standard_context_type_check: true)
- end
-
- subject { described_class.new(project: create(:group)) }
-
- it 'does call `track_and_raise_for_dev_exception`' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- snowplow_context
- end
+ it 'does call `track_and_raise_for_dev_exception`' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ snowplow_context
end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index cc973be8be9..dd62c832f6f 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -10,11 +10,11 @@ RSpec.describe Gitlab::Tracking do
stub_application_setting(snowplow_cookie_domain: '.gitfoo.com')
stub_application_setting(snowplow_app_id: '_abc123_')
- described_class.instance_variable_set("@snowplow", nil)
+ described_class.instance_variable_set("@tracker", nil)
end
after do
- described_class.instance_variable_set("@snowplow", nil)
+ described_class.instance_variable_set("@tracker", nil)
end
describe '.options' do
@@ -34,6 +34,26 @@ RSpec.describe Gitlab::Tracking do
end
end
+ shared_examples 'delegates to SnowplowMicro destination with proper options' do
+ it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::SnowplowMicro
+
+ it 'returns useful client options' do
+ expected_fields = {
+ namespace: 'gl',
+ hostname: 'localhost:9090',
+ cookieDomain: '.gitlab.com',
+ appId: '_abc123_',
+ protocol: 'http',
+ port: 9090,
+ forceSecureTracker: false,
+ formTracking: true,
+ linkClickTracking: true
+ }
+
+ expect(subject.options(nil)).to match(expected_fields)
+ end
+ end
+
context 'when destination is Snowplow' do
it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow
@@ -53,26 +73,31 @@ RSpec.describe Gitlab::Tracking do
context 'when destination is SnowplowMicro' do
before do
- stub_env('SNOWPLOW_MICRO_ENABLE', '1')
allow(Rails.env).to receive(:development?).and_return(true)
end
- it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::SnowplowMicro
+ context "enabled with yml config" do
+ let(:snowplow_micro_settings) do
+ {
+ enabled: true,
+ address: "localhost:9090"
+ }
+ end
- it 'returns useful client options' do
- expected_fields = {
- namespace: 'gl',
- hostname: 'localhost:9090',
- cookieDomain: '.gitlab.com',
- appId: '_abc123_',
- protocol: 'http',
- port: 9090,
- forceSecureTracker: false,
- formTracking: true,
- linkClickTracking: true
- }
+ before do
+ stub_config(snowplow_micro: snowplow_micro_settings)
+ end
- expect(subject.options(nil)).to match(expected_fields)
+ it_behaves_like 'delegates to SnowplowMicro destination with proper options'
+ end
+
+ context "enabled with env variable" do
+ before do
+ allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
+ stub_env('SNOWPLOW_MICRO_ENABLE', '1')
+ end
+
+ it_behaves_like 'delegates to SnowplowMicro destination with proper options'
end
end
diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb
index 3021d92244e..f45005fcc9b 100644
--- a/spec/lib/gitlab/tree_summary_spec.rb
+++ b/spec/lib/gitlab/tree_summary_spec.rb
@@ -30,50 +30,31 @@ RSpec.describe Gitlab::TreeSummary do
describe '#summarize' do
let(:project) { create(:project, :custom_repo, files: { 'a.txt' => '' }) }
- subject(:summarized) { summary.summarize }
+ subject(:entries) { summary.summarize }
- it 'returns an array of entries, and an array of commits' do
- expect(summarized).to be_a(Array)
- expect(summarized.size).to eq(2)
+ it 'returns an array of entries' do
+ expect(entries).to be_a(Array)
+ expect(entries.size).to eq(1)
- entries, commits = *summarized
aggregate_failures do
expect(entries).to contain_exactly(
a_hash_including(file_name: 'a.txt', commit: have_attributes(id: commit.id))
)
- expect(commits).to match_array(entries.map { |entry| entry[:commit] })
- end
- end
-
- context 'when offset is over the limit' do
- let(:offset) { 100 }
-
- it 'returns an empty array' do
- expect(summarized).to eq([[], []])
+ expect(summary.resolved_commits.values).to match_array(entries.map { |entry| entry[:commit] })
end
end
context 'with caching', :use_clean_rails_memory_store_caching do
subject { Rails.cache.fetch(key) }
- context 'Repository tree cache' do
- let(:key) { ['projects', project.id, 'content', commit.id, path] }
-
- it 'creates a cache for repository content' do
- summarized
-
- is_expected.to eq([{ file_name: 'a.txt', type: :blob }])
- end
- end
-
context 'Commits list cache' do
let(:offset) { 0 }
let(:limit) { 25 }
- let(:key) { ['projects', project.id, 'last_commits', commit.id, path, offset, limit] }
+ let(:key) { ['projects', project.id, 'last_commits', commit.id, path, offset, limit + 1] }
it 'creates a cache for commits list' do
- summarized
+ entries
is_expected.to eq('a.txt' => commit.to_hash)
end
@@ -93,7 +74,7 @@ RSpec.describe Gitlab::TreeSummary do
let(:expected_message) { message[0...1021] + '...' }
it 'truncates commit message to 1 kilobyte' do
- summarized
+ entries
is_expected.to include('long.txt' => a_hash_including(message: expected_message))
end
@@ -102,7 +83,7 @@ RSpec.describe Gitlab::TreeSummary do
end
end
- describe '#summarize (entries)' do
+ describe '#fetch_logs' do
let(:limit) { 4 }
custom_files = {
@@ -116,33 +97,32 @@ RSpec.describe Gitlab::TreeSummary do
let!(:project) { create(:project, :custom_repo, files: custom_files) }
let(:commit) { repo.head_commit }
- subject(:entries) { summary.summarize.first }
+ subject(:entries) { summary.fetch_logs.first }
it 'summarizes the entries within the window' do
is_expected.to contain_exactly(
- a_hash_including(type: :tree, file_name: 'directory'),
- a_hash_including(type: :blob, file_name: 'a.txt'),
- a_hash_including(type: :blob, file_name: ':file'),
- a_hash_including(type: :tree, file_name: ':dir')
+ a_hash_including('file_name' => 'directory'),
+ a_hash_including('file_name' => 'a.txt'),
+ a_hash_including('file_name' => ':file'),
+ a_hash_including('file_name' => ':dir')
# b.txt is excluded by the limit
)
end
it 'references the commit and commit path in entries' do
# There are 2 trees and the summary is not ordered
- entry = entries.find { |entry| entry[:commit].id == commit.id }
+ entry = entries.find { |entry| entry['commit']['id'] == commit.id }
expected_commit_path = Gitlab::Routing.url_helpers.project_commit_path(project, commit)
- expect(entry[:commit]).to be_a(::Commit)
- expect(entry[:commit_path]).to eq(expected_commit_path)
- expect(entry[:commit_title_html]).to eq(commit.message)
+ expect(entry['commit_path']).to eq(expected_commit_path)
+ expect(entry['commit_title_html']).to eq(commit.message)
end
context 'in a good subdirectory' do
let(:path) { 'directory' }
it 'summarizes the entries in the subdirectory' do
- is_expected.to contain_exactly(a_hash_including(type: :blob, file_name: 'c.txt'))
+ is_expected.to contain_exactly(a_hash_including('file_name' => 'c.txt'))
end
end
@@ -150,7 +130,7 @@ RSpec.describe Gitlab::TreeSummary do
let(:path) { ':dir' }
it 'summarizes the entries in the subdirectory' do
- is_expected.to contain_exactly(a_hash_including(type: :blob, file_name: 'test.txt'))
+ is_expected.to contain_exactly(a_hash_including('file_name' => 'test.txt'))
end
end
@@ -164,7 +144,25 @@ RSpec.describe Gitlab::TreeSummary do
let(:offset) { 4 }
it 'returns entries from the offset' do
- is_expected.to contain_exactly(a_hash_including(type: :blob, file_name: 'b.txt'))
+ is_expected.to contain_exactly(a_hash_including('file_name' => 'b.txt'))
+ end
+ end
+
+ context 'next offset' do
+ subject { summary.fetch_logs.last }
+
+ context 'when there are more entries to fetch' do
+ it 'returns next offset' do
+ is_expected.to eq(4)
+ end
+ end
+
+ context 'when there are no more entries to fetch' do
+ let(:limit) { 5 }
+
+ it 'returns next offset' do
+ is_expected.to be_nil
+ end
end
end
end
@@ -178,10 +176,11 @@ RSpec.describe Gitlab::TreeSummary do
let(:project) { create(:project, :repository) }
let(:commit) { repo.commit(test_commit_sha) }
let(:limit) { nil }
- let(:entries) { summary.summarize.first }
+ let(:entries) { summary.summarize }
subject(:commits) do
- summary.summarize.last
+ summary.summarize
+ summary.resolved_commits.values
end
it 'returns an Array of ::Commit objects' do
@@ -227,7 +226,7 @@ RSpec.describe Gitlab::TreeSummary do
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:issue) { create(:issue, project: project) }
- let(:entries) { summary.summarize.first }
+ let(:entries) { summary.summarize }
let(:entry) { entries.find { |entry| entry[:file_name] == 'issue.txt' } }
before_all do
@@ -264,67 +263,6 @@ RSpec.describe Gitlab::TreeSummary do
end
end
- describe '#more?' do
- let(:path) { 'tmp/more' }
-
- where(:num_entries, :offset, :limit, :expected_result) do
- 0 | 0 | 0 | false
- 0 | 0 | 1 | false
-
- 1 | 0 | 0 | true
- 1 | 0 | 1 | false
- 1 | 1 | 0 | false
- 1 | 1 | 1 | false
-
- 2 | 0 | 0 | true
- 2 | 0 | 1 | true
- 2 | 0 | 2 | false
- 2 | 0 | 3 | false
- 2 | 1 | 0 | true
- 2 | 1 | 1 | false
- 2 | 2 | 0 | false
- 2 | 2 | 1 | false
- end
-
- with_them do
- before do
- create_file('dummy', path: 'other') if num_entries == 0
- 1.upto(num_entries) { |n| create_file(n, path: path) }
- end
-
- subject { summary.more? }
-
- it { is_expected.to eq(expected_result) }
- end
- end
-
- describe '#next_offset' do
- let(:path) { 'tmp/next_offset' }
-
- where(:num_entries, :offset, :limit, :expected_result) do
- 0 | 0 | 0 | 0
- 0 | 0 | 1 | 1
- 0 | 1 | 0 | 1
- 0 | 1 | 1 | 1
-
- 1 | 0 | 0 | 0
- 1 | 0 | 1 | 1
- 1 | 1 | 0 | 1
- 1 | 1 | 1 | 2
- end
-
- with_them do
- before do
- create_file('dummy', path: 'other') if num_entries == 0
- 1.upto(num_entries) { |n| create_file(n, path: path) }
- end
-
- subject { summary.next_offset }
-
- it { is_expected.to eq(expected_result) }
- end
- end
-
def create_file(unique, path:)
repo.create_file(
project.creator,
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 070586319a5..a1bddcb3a47 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
milestone: '14.1',
default_generation: 'generation_1',
key_path: 'uuid',
- product_group: 'group::product analytics',
+ product_group: 'product_analytics',
time_frame: 'none',
data_source: 'database',
distribution: %w(ee ce),
@@ -270,7 +270,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
milestone: '14.1',
default_generation: 'generation_1',
key_path: 'counter.category.event',
- product_group: 'group::product analytics',
+ product_group: 'product_analytics',
time_frame: 'none',
data_source: 'database',
distribution: %w(ee ce),
diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb
index 10ae94e746b..8e0fce37e46 100644
--- a/spec/lib/gitlab/usage/metric_spec.rb
+++ b/spec/lib/gitlab/usage/metric_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Usage::Metric do
description: "Count of Issues created",
product_section: "dev",
product_stage: "plan",
- product_group: "group::plan",
+ product_group: "plan",
product_category: "issue_tracking",
value_type: "number",
status: "active",
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb
deleted file mode 100644
index 8a0ce61de74..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UniqueActiveUsersMetric do
- let_it_be(:user1) { create(:user, last_activity_on: 1.day.ago) }
- let_it_be(:user2) { create(:user, last_activity_on: 5.days.ago) }
- let_it_be(:user3) { create(:user, last_activity_on: 50.days.ago) }
- let_it_be(:user4) { create(:user) }
- let_it_be(:user5) { create(:user, user_type: 1, last_activity_on: 5.days.ago ) } # support bot
- let_it_be(:user6) { create(:user, state: 'blocked') }
-
- context '28d' do
- let(:start) { 30.days.ago.to_date.to_s }
- let(:finish) { 2.days.ago.to_date.to_s }
- let(:expected_value) { 1 }
- let(:expected_query) do
- "SELECT COUNT(\"users\".\"id\") FROM \"users\" WHERE (\"users\".\"state\" IN ('active')) AND " \
- "(\"users\".\"user_type\" IS NULL OR \"users\".\"user_type\" IN (6, 4)) AND \"users\".\"last_activity_on\" " \
- "BETWEEN '#{start}' AND '#{finish}'"
- end
-
- it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' }
- end
-
- context 'all' do
- let(:expected_value) { 4 }
-
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all' }
- end
-end
diff --git a/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb b/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb
index 76548483cfa..9d2711c49c6 100644
--- a/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb
@@ -46,4 +46,54 @@ RSpec.describe Gitlab::Usage::ServicePing::InstrumentedPayload do
expect(described_class.new(['counts.ci_builds'], :with_value).build).to eq({})
end
end
+
+ context 'with broken metric definition file' do
+ let(:key_path) { 'counts.broken_metric_definition_test' }
+ let(:definitions) { [Gitlab::Usage::MetricDefinition.new(key_path, key_path: key_path)] }
+
+ subject(:build_metric) { described_class.new([key_path], :with_value).build }
+
+ before do
+ allow(Gitlab::Usage::MetricDefinition).to receive(:with_instrumentation_class).and_return(definitions)
+ allow_next_instance_of(Gitlab::Usage::Metric) do |instance|
+ allow(instance).to receive(:with_value).and_raise(error)
+ end
+ end
+
+ context 'when instrumentation class name is incorrect' do
+ let(:error) { NameError.new("uninitialized constant Gitlab::Usage::Metrics::Instrumentations::IDontExists") }
+
+ it 'tracks error and return fallback', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ expect(build_metric).to eql(counts: { broken_metric_definition_test: -1 })
+ end
+ end
+
+ context 'when instrumentation class raises TypeError' do
+ let(:error) { TypeError.new("nil can't be coerced into BigDecimal") }
+
+ it 'tracks error and return fallback', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ expect(build_metric).to eql(counts: { broken_metric_definition_test: -1 })
+ end
+ end
+
+ context 'when instrumentation class raises ArgumentError' do
+ let(:error) { ArgumentError.new("wrong number of arguments (given 2, expected 0)") }
+
+ it 'tracks error and return fallback', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ expect(build_metric).to eql(counts: { broken_metric_definition_test: -1 })
+ end
+ end
+
+ context 'when instrumentation class raises StandardError' do
+ let(:error) { StandardError.new("something went very wrong") }
+
+ it 'tracks error and return fallback', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ expect(build_metric).to eql(counts: { broken_metric_definition_test: -1 })
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index dbc34681660..bfbabd858f0 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
let(:user1) { build(:user, id: 1) }
let(:user2) { build(:user, id: 2) }
let(:user3) { build(:user, id: 3) }
+ let(:project) { build(:project) }
let(:time) { Time.zone.now }
shared_examples 'tracks and counts action' do
@@ -15,10 +16,9 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
specify do
aggregate_failures do
- expect(track_action(author: user1)).to be_truthy
- expect(track_action(author: user1)).to be_truthy
- expect(track_action(author: user2)).to be_truthy
- expect(track_action(author: user3, time: time - 3.days)).to be_truthy
+ expect(track_action(author: user1, project: project)).to be_truthy
+ expect(track_action(author: user2, project: project)).to be_truthy
+ expect(track_action(author: user3, time: time - 3.days, project: project)).to be_truthy
expect(count_unique(date_from: time, date_to: Date.today)).to eq(2)
expect(count_unique(date_from: time - 5.days, date_to: Date.tomorrow)).to eq(3)
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
it 'does not track edit actions if author is not present' do
- expect(track_action(author: nil)).to be_nil
+ expect(track_action(author: nil, project: project)).to be_nil
end
end
@@ -67,16 +67,16 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
it 'can return the count of actions per user deduplicated' do
- described_class.track_web_ide_edit_action(author: user1)
- described_class.track_live_preview_edit_action(author: user1)
- described_class.track_snippet_editor_edit_action(author: user1)
- described_class.track_sfe_edit_action(author: user1)
- described_class.track_web_ide_edit_action(author: user2, time: time - 2.days)
- described_class.track_web_ide_edit_action(author: user3, time: time - 3.days)
- described_class.track_live_preview_edit_action(author: user2, time: time - 2.days)
- described_class.track_live_preview_edit_action(author: user3, time: time - 3.days)
- described_class.track_snippet_editor_edit_action(author: user3, time: time - 3.days)
- described_class.track_sfe_edit_action(author: user3, time: time - 3.days)
+ described_class.track_web_ide_edit_action(author: user1, project: project)
+ described_class.track_live_preview_edit_action(author: user1, project: project)
+ described_class.track_snippet_editor_edit_action(author: user1, project: project)
+ described_class.track_sfe_edit_action(author: user1, project: project)
+ described_class.track_web_ide_edit_action(author: user2, time: time - 2.days, project: project)
+ described_class.track_web_ide_edit_action(author: user3, time: time - 3.days, project: project)
+ described_class.track_live_preview_edit_action(author: user2, time: time - 2.days, project: project)
+ described_class.track_live_preview_edit_action(author: user3, time: time - 3.days, project: project)
+ described_class.track_snippet_editor_edit_action(author: user3, time: time - 3.days, project: project)
+ described_class.track_sfe_edit_action(author: user3, time: time - 3.days, project: project)
expect(described_class.count_edit_using_editor(date_from: time, date_to: Date.today)).to eq(1)
expect(described_class.count_edit_using_editor(date_from: time - 5.days, date_to: Date.tomorrow)).to eq(3)
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 77cf94daa3f..54d49b432f4 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -19,6 +19,82 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
# Monday 6th of June
reference_time = Time.utc(2020, 6, 1)
travel_to(reference_time) { example.run }
+ described_class.clear_memoization(:known_events)
+ end
+
+ context 'migration to instrumentation classes data collection' do
+ let_it_be(:instrumented_events) do
+ ::Gitlab::Usage::MetricDefinition.all.map do |definition|
+ next unless definition.attributes[:instrumentation_class] == 'RedisHLLMetric' && definition.available?
+
+ definition.attributes.dig(:options, :events)&.sort
+ end.compact.to_set
+ end
+
+ def not_instrumented_events(category)
+ described_class
+ .events_for_category(category)
+ .sort
+ .reject do |event|
+ instrumented_events.include?([event])
+ end
+ end
+
+ def not_instrumented_aggregate(category)
+ events = described_class.events_for_category(category).sort
+
+ return unless described_class::CATEGORIES_FOR_TOTALS.include?(category)
+ return unless described_class.send(:eligible_for_totals?, events)
+ return if instrumented_events.include?(events)
+
+ events
+ end
+
+ describe 'Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS' do
+ it 'includes only fully migrated categories' do
+ wrong_skipped_events = described_class::CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS.map do |category|
+ next if not_instrumented_events(category).empty? && not_instrumented_aggregate(category).nil?
+
+ [category, [not_instrumented_events(category), not_instrumented_aggregate(category)].compact]
+ end.compact.to_h
+
+ expect(wrong_skipped_events).to be_empty
+ end
+
+ context 'with not instrumented category' do
+ let(:instrumented_events) { [] }
+
+ it 'can detect not migrated category' do
+ wrong_skipped_events = described_class::CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS.map do |category|
+ next if not_instrumented_events(category).empty? && not_instrumented_aggregate(category).nil?
+
+ [category, [not_instrumented_events(category), not_instrumented_aggregate(category)].compact]
+ end.compact.to_h
+
+ expect(wrong_skipped_events).not_to be_empty
+ end
+ end
+ end
+
+ describe '.unique_events_data' do
+ context 'with use_redis_hll_instrumentation_classes feature enabled' do
+ it 'does not include instrumented categories' do
+ stub_feature_flags(use_redis_hll_instrumentation_classes: true)
+
+ expect(described_class.unique_events_data.keys)
+ .not_to include(*described_class::CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS)
+ end
+ end
+
+ context 'with use_redis_hll_instrumentation_classes feature disabled' do
+ it 'includes instrumented categories' do
+ stub_feature_flags(use_redis_hll_instrumentation_classes: false)
+
+ expect(described_class.unique_events_data.keys)
+ .to include(*described_class::CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS)
+ end
+ end
+ end
end
describe '.categories' do
@@ -53,11 +129,40 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'growth',
'work_items',
'ci_users',
- 'error_tracking'
+ 'error_tracking',
+ 'manage'
)
end
end
+ describe '.known_events' do
+ let(:ce_temp_dir) { Dir.mktmpdir }
+ let(:ce_temp_file) { Tempfile.new(%w[common .yml], ce_temp_dir) }
+ let(:ce_event) do
+ {
+ "name" => "ce_event",
+ "redis_slot" => "analytics",
+ "category" => "analytics",
+ "expiry" => 84,
+ "aggregation" => "weekly"
+ }
+ end
+
+ before do
+ stub_const("#{described_class}::KNOWN_EVENTS_PATH", File.expand_path('*.yml', ce_temp_dir))
+ File.open(ce_temp_file.path, "w+b") { |f| f.write [ce_event].to_yaml }
+ end
+
+ it 'returns ce events' do
+ expect(described_class.known_events).to include(ce_event)
+ end
+
+ after do
+ ce_temp_file.unlink
+ FileUtils.remove_entry(ce_temp_dir) if Dir.exist?(ce_temp_dir)
+ end
+ end
+
describe 'known_events' do
let(:feature) { 'test_hll_redis_counter_ff_check' }
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index cd3388701fe..3f44cfdcf27 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -82,11 +82,43 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
end
describe '.track_approve_mr_action' do
- subject { described_class.track_approve_mr_action(user: user) }
+ include ProjectForksHelper
+
+ let(:merge_request) { create(:merge_request, target_project: target_project, source_project: source_project) }
+ let(:source_project) { fork_project(target_project) }
+ let(:target_project) { create(:project) }
+
+ subject { described_class.track_approve_mr_action(user: user, merge_request: merge_request) }
it_behaves_like 'a tracked merge request unique event' do
let(:action) { described_class::MR_APPROVE_ACTION }
end
+
+ it 'records correct payload with Snowplow event', :snowplow do
+ stub_feature_flags(route_hll_to_snowplow_phase2: true)
+
+ subject
+
+ expect_snowplow_event(
+ category: 'merge_requests',
+ action: 'i_code_review_user_approve_mr',
+ namespace: target_project.namespace,
+ user: user,
+ project: target_project
+ )
+ end
+
+ context 'when FF is disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ end
+
+ it 'doesnt emit snowplow events', :snowplow do
+ subject
+
+ expect_no_snowplow_event
+ end
+ end
end
describe '.track_unapprove_mr_action' do
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
index 4561d898479..0264236f087 100644
--- a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -5,46 +5,6 @@ require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_gitlab_redis_shared_state do
let(:user) { build(:user, id: 1) }
- shared_examples 'counter that does not track the event' do
- it 'does not track the event' do
- expect { 3.times { track_event } }.to not_change {
- Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
- event_names: event_name,
- start_date: 2.weeks.ago,
- end_date: 2.weeks.from_now
- )
- }
- end
- end
-
- shared_examples 'work item unique counter' do
- context 'when track_work_items_activity FF is enabled' do
- it 'tracks a unique event only once' do
- expect { 3.times { track_event } }.to change {
- Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
- event_names: event_name,
- start_date: 2.weeks.ago,
- end_date: 2.weeks.from_now
- )
- }.by(1)
- end
-
- context 'when author is nil' do
- let(:user) { nil }
-
- it_behaves_like 'counter that does not track the event'
- end
- end
-
- context 'when track_work_items_activity FF is disabled' do
- before do
- stub_feature_flags(track_work_items_activity: false)
- end
-
- it_behaves_like 'counter that does not track the event'
- end
- end
-
describe '.track_work_item_created_action' do
subject(:track_event) { described_class.track_work_item_created_action(author: user) }
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 790f5b638b9..6eb00053b17 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -249,7 +249,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
)
end
- it 'includes imports usage data' do
+ it 'includes imports usage data', :clean_gitlab_redis_cache do
for_defined_days_back do
user = create(:user)
@@ -347,7 +347,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
cluster = create(:cluster, user: user)
project = create(:project, creator: user)
create(:clusters_integrations_prometheus, cluster: cluster)
- create(:project_tracing_setting)
create(:project_error_tracking_setting)
create(:incident)
create(:incident, alert_management_alert: create(:alert_management_alert))
@@ -358,7 +357,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
clusters: 2,
clusters_integrations_prometheus: 2,
operations_dashboard_default_dashboard: 2,
- projects_with_tracing_enabled: 2,
projects_with_error_tracking_enabled: 2,
projects_with_incidents: 4,
projects_with_alert_incidents: 2,
@@ -370,7 +368,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
clusters: 1,
clusters_integrations_prometheus: 1,
operations_dashboard_default_dashboard: 1,
- projects_with_tracing_enabled: 1,
projects_with_error_tracking_enabled: 1,
projects_with_incidents: 2,
projects_with_alert_incidents: 1
@@ -535,7 +532,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:groups_inheriting_slack_active]).to eq(1)
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
- expect(count_data[:projects_with_tracing_enabled]).to eq(1)
expect(count_data[:projects_with_enabled_alert_integrations]).to eq(1)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
expect(count_data[:projects_with_terraform_states]).to eq(2)
@@ -564,7 +560,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:clusters_platforms_eks]).to eq(1)
expect(count_data[:clusters_platforms_gke]).to eq(1)
expect(count_data[:clusters_platforms_user]).to eq(1)
- expect(count_data[:clusters_integrations_elastic_stack]).to eq(1)
expect(count_data[:clusters_integrations_prometheus]).to eq(1)
expect(count_data[:grafana_integrated_projects]).to eq(2)
expect(count_data[:clusters_management_project]).to eq(1)
@@ -1157,35 +1152,36 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:user2) { build(:user, id: 2) }
let(:user3) { build(:user, id: 3) }
let(:user4) { build(:user, id: 4) }
+ let(:project) { build(:project) }
before do
counter = Gitlab::UsageDataCounters::TrackUniqueEvents
- project = Event::TARGET_TYPES[:project]
+ project_type = Event::TARGET_TYPES[:project]
wiki = Event::TARGET_TYPES[:wiki]
design = Event::TARGET_TYPES[:design]
- counter.track_event(event_action: :pushed, event_target: project, author_id: 1)
- counter.track_event(event_action: :pushed, event_target: project, author_id: 1)
- counter.track_event(event_action: :pushed, event_target: project, author_id: 2)
- counter.track_event(event_action: :pushed, event_target: project, author_id: 3)
- counter.track_event(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)
+ counter.track_event(event_action: :pushed, event_target: project_type, author_id: 1)
+ counter.track_event(event_action: :pushed, event_target: project_type, author_id: 1)
+ counter.track_event(event_action: :pushed, event_target: project_type, author_id: 2)
+ counter.track_event(event_action: :pushed, event_target: project_type, author_id: 3)
+ counter.track_event(event_action: :pushed, event_target: project_type, author_id: 4, time: time - 3.days)
counter.track_event(event_action: :created, event_target: wiki, author_id: 3)
counter.track_event(event_action: :created, event_target: design, author_id: 3)
counter.track_event(event_action: :created, event_target: design, author_id: 4)
counter = Gitlab::UsageDataCounters::EditorUniqueCounter
- counter.track_web_ide_edit_action(author: user1)
- counter.track_web_ide_edit_action(author: user1)
- counter.track_sfe_edit_action(author: user1)
- counter.track_snippet_editor_edit_action(author: user1)
- counter.track_snippet_editor_edit_action(author: user1, time: time - 3.days)
+ counter.track_web_ide_edit_action(author: user1, project: project)
+ counter.track_web_ide_edit_action(author: user1, project: project)
+ counter.track_sfe_edit_action(author: user1, project: project)
+ counter.track_snippet_editor_edit_action(author: user1, project: project)
+ counter.track_snippet_editor_edit_action(author: user1, time: time - 3.days, project: project)
- counter.track_web_ide_edit_action(author: user2)
- counter.track_sfe_edit_action(author: user2)
+ counter.track_web_ide_edit_action(author: user2, project: project)
+ counter.track_sfe_edit_action(author: user2, project: project)
- counter.track_web_ide_edit_action(author: user3, time: time - 3.days)
- counter.track_snippet_editor_edit_action(author: user3)
+ counter.track_web_ide_edit_action(author: user3, time: time - 3.days, project: project)
+ counter.track_snippet_editor_edit_action(author: user3, project: project)
end
it 'returns the distinct count of user actions within the specified time period' do
@@ -1212,6 +1208,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:ignored_metrics) { ["i_package_composer_deploy_token_weekly"] }
it 'has all known_events' do
+ stub_feature_flags(use_redis_hll_instrumentation_classes: false)
expect(subject).to have_key(:redis_hll_counters)
expect(subject[:redis_hll_counters].keys).to match_array(categories)
@@ -1312,8 +1309,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
"in_product_marketing_email_team_1_sent" => -1,
"in_product_marketing_email_team_1_cta_clicked" => -1,
"in_product_marketing_email_team_2_sent" => -1,
- "in_product_marketing_email_team_2_cta_clicked" => -1,
- "in_product_marketing_email_experience_0_sent" => -1
+ "in_product_marketing_email_team_2_cta_clicked" => -1
}
expect(subject).to eq(expected_data)
@@ -1358,8 +1354,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
"in_product_marketing_email_team_1_sent" => 0,
"in_product_marketing_email_team_1_cta_clicked" => 0,
"in_product_marketing_email_team_2_sent" => 0,
- "in_product_marketing_email_team_2_cta_clicked" => 0,
- "in_product_marketing_email_experience_0_sent" => 0
+ "in_product_marketing_email_team_2_cta_clicked" => 0
}
expect(subject).to eq(expected_data)
@@ -1368,29 +1363,11 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe ".with_duration" do
- context 'with feature flag measure_service_ping_metric_collection turned off' do
- before do
- stub_feature_flags(measure_service_ping_metric_collection: false)
- end
-
- it 'does NOT record duration and return block response' do
- expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator).not_to receive(:new)
-
- expect(described_class.with_duration { 1 + 1 }).to be 2
- end
- end
+ it 'records duration' do
+ expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator)
+ .to receive(:new).with(2, kind_of(Float))
- context 'with feature flag measure_service_ping_metric_collection turned off' do
- before do
- stub_feature_flags(measure_service_ping_metric_collection: true)
- end
-
- it 'records duration' do
- expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator)
- .to receive(:new).with(2, kind_of(Float))
-
- described_class.with_duration { 1 + 1 }
- end
+ described_class.with_duration { 1 + 1 }
end
end
diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb
index b1de3e21b77..1ae45d41f2d 100644
--- a/spec/lib/gitlab/user_access_spec.rb
+++ b/spec/lib/gitlab/user_access_spec.rb
@@ -219,19 +219,19 @@ RSpec.describe Gitlab::UserAccess do
describe '#can_create_tag?' do
describe 'push to none protected tag' do
it 'returns true if user is a maintainer' do
- project.add_user(user, :maintainer)
+ project.add_member(user, :maintainer)
expect(access.can_create_tag?('random_tag')).to be_truthy
end
it 'returns true if user is a developer' do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
expect(access.can_create_tag?('random_tag')).to be_truthy
end
it 'returns false if user is a reporter' do
- project.add_user(user, :reporter)
+ project.add_member(user, :reporter)
expect(access.can_create_tag?('random_tag')).to be_falsey
end
@@ -242,19 +242,19 @@ RSpec.describe Gitlab::UserAccess do
let(:not_existing_tag) { create :protected_tag, project: project }
it 'returns true if user is a maintainer' do
- project.add_user(user, :maintainer)
+ project.add_member(user, :maintainer)
expect(access.can_create_tag?(tag.name)).to be_truthy
end
it 'returns false if user is a developer' do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
expect(access.can_create_tag?(tag.name)).to be_falsey
end
it 'returns false if user is a reporter' do
- project.add_user(user, :reporter)
+ project.add_member(user, :reporter)
expect(access.can_create_tag?(tag.name)).to be_falsey
end
@@ -266,19 +266,19 @@ RSpec.describe Gitlab::UserAccess do
end
it 'returns true if user is a maintainer' do
- project.add_user(user, :maintainer)
+ project.add_member(user, :maintainer)
expect(access.can_create_tag?(@tag.name)).to be_truthy
end
it 'returns true if user is a developer' do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
expect(access.can_create_tag?(@tag.name)).to be_truthy
end
it 'returns false if user is a reporter' do
- project.add_user(user, :reporter)
+ project.add_member(user, :reporter)
expect(access.can_create_tag?(@tag.name)).to be_falsey
end
@@ -288,19 +288,19 @@ RSpec.describe Gitlab::UserAccess do
describe '#can_delete_branch?' do
describe 'delete unprotected branch' do
it 'returns true if user is a maintainer' do
- project.add_user(user, :maintainer)
+ project.add_member(user, :maintainer)
expect(access.can_delete_branch?('random_branch')).to be_truthy
end
it 'returns true if user is a developer' do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
expect(access.can_delete_branch?('random_branch')).to be_truthy
end
it 'returns false if user is a reporter' do
- project.add_user(user, :reporter)
+ project.add_member(user, :reporter)
expect(access.can_delete_branch?('random_branch')).to be_falsey
end
@@ -310,19 +310,19 @@ RSpec.describe Gitlab::UserAccess do
let(:branch) { create(:protected_branch, project: project, name: "test") }
it 'returns true if user is a maintainer' do
- project.add_user(user, :maintainer)
+ project.add_member(user, :maintainer)
expect(access.can_delete_branch?(branch.name)).to be_truthy
end
it 'returns false if user is a developer' do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
expect(access.can_delete_branch?(branch.name)).to be_falsey
end
it 'returns false if user is a reporter' do
- project.add_user(user, :reporter)
+ project.add_member(user, :reporter)
expect(access.can_delete_branch?(branch.name)).to be_falsey
end
@@ -334,7 +334,7 @@ RSpec.describe Gitlab::UserAccess do
context 'when user cannot push_code to a project repository (eg. as a guest)' do
it 'is false' do
- project.add_user(user, :guest)
+ project.add_member(user, :guest)
expect(access.can_push_for_ref?(ref)).to be_falsey
end
@@ -342,7 +342,7 @@ RSpec.describe Gitlab::UserAccess do
context 'when user can push_code to a project repository (eg. as a developer)' do
it 'is true' do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
expect(access.can_push_for_ref?(ref)).to be_truthy
end
diff --git a/spec/lib/gitlab/version_info_spec.rb b/spec/lib/gitlab/version_info_spec.rb
index f81e3aa070a..6ed094f11c8 100644
--- a/spec/lib/gitlab/version_info_spec.rb
+++ b/spec/lib/gitlab/version_info_spec.rb
@@ -1,73 +1,170 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
-RSpec.describe 'Gitlab::VersionInfo' do
+RSpec.describe Gitlab::VersionInfo do
before do
- @unknown = Gitlab::VersionInfo.new
- @v0_0_1 = Gitlab::VersionInfo.new(0, 0, 1)
- @v0_1_0 = Gitlab::VersionInfo.new(0, 1, 0)
- @v1_0_0 = Gitlab::VersionInfo.new(1, 0, 0)
- @v1_0_1 = Gitlab::VersionInfo.new(1, 0, 1)
- @v1_1_0 = Gitlab::VersionInfo.new(1, 1, 0)
- @v2_0_0 = Gitlab::VersionInfo.new(2, 0, 0)
+ @unknown = described_class.new
+ @v0_0_1 = described_class.new(0, 0, 1)
+ @v0_1_0 = described_class.new(0, 1, 0)
+ @v1_0_0 = described_class.new(1, 0, 0)
+ @v1_0_1 = described_class.new(1, 0, 1)
+ @v1_0_1_b1 = described_class.new(1, 0, 1, '-b1')
+ @v1_0_1_rc1 = described_class.new(1, 0, 1, '-rc1')
+ @v1_0_1_rc2 = described_class.new(1, 0, 1, '-rc2')
+ @v1_1_0 = described_class.new(1, 1, 0)
+ @v1_1_0_beta1 = described_class.new(1, 1, 0, '-beta1')
+ @v2_0_0 = described_class.new(2, 0, 0)
+ @v13_10_1_1574_89 = described_class.parse("v13.10.1~beta.1574.gf6ea9389", parse_suffix: true)
+ @v13_10_1_1575_89 = described_class.parse("v13.10.1~beta.1575.gf6ea9389", parse_suffix: true)
+ @v13_10_1_1575_90 = described_class.parse("v13.10.1~beta.1575.gf6ea9390", parse_suffix: true)
end
- context '>' do
+ describe '>' do
it { expect(@v2_0_0).to be > @v1_1_0 }
it { expect(@v1_1_0).to be > @v1_0_1 }
+ it { expect(@v1_0_1_b1).to be > @v1_0_0 }
+ it { expect(@v1_0_1_rc1).to be > @v1_0_0 }
+ it { expect(@v1_0_1_rc1).to be > @v1_0_1_b1 }
+ it { expect(@v1_0_1_rc2).to be > @v1_0_1_rc1 }
+ it { expect(@v1_0_1).to be > @v1_0_1_rc1 }
+ it { expect(@v1_0_1).to be > @v1_0_1_rc2 }
it { expect(@v1_0_1).to be > @v1_0_0 }
it { expect(@v1_0_0).to be > @v0_1_0 }
+ it { expect(@v1_1_0_beta1).to be > @v1_0_1_rc2 }
+ it { expect(@v1_1_0).to be > @v1_1_0_beta1 }
it { expect(@v0_1_0).to be > @v0_0_1 }
end
- context '>=' do
- it { expect(@v2_0_0).to be >= Gitlab::VersionInfo.new(2, 0, 0) }
+ describe '>=' do
+ it { expect(@v2_0_0).to be >= described_class.new(2, 0, 0) }
it { expect(@v2_0_0).to be >= @v1_1_0 }
+ it { expect(@v1_0_1_rc2).to be >= @v1_0_1_rc1 }
end
- context '<' do
+ describe '<' do
it { expect(@v0_0_1).to be < @v0_1_0 }
it { expect(@v0_1_0).to be < @v1_0_0 }
it { expect(@v1_0_0).to be < @v1_0_1 }
it { expect(@v1_0_1).to be < @v1_1_0 }
+ it { expect(@v1_0_0).to be < @v1_0_1_rc2 }
+ it { expect(@v1_0_1_rc1).to be < @v1_0_1 }
+ it { expect(@v1_0_1_rc1).to be < @v1_0_1_rc2 }
+ it { expect(@v1_0_1_rc2).to be < @v1_0_1 }
it { expect(@v1_1_0).to be < @v2_0_0 }
+ it { expect(@v13_10_1_1574_89).to be < @v13_10_1_1575_89 }
+ it { expect(@v13_10_1_1575_89).to be < @v13_10_1_1575_90 }
end
- context '<=' do
- it { expect(@v0_0_1).to be <= Gitlab::VersionInfo.new(0, 0, 1) }
+ describe '<=' do
+ it { expect(@v0_0_1).to be <= described_class.new(0, 0, 1) }
it { expect(@v0_0_1).to be <= @v0_1_0 }
+ it { expect(@v1_0_1_b1).to be <= @v1_0_1_rc1 }
+ it { expect(@v1_0_1_rc1).to be <= @v1_0_1_rc2 }
+ it { expect(@v1_1_0_beta1).to be <= @v1_1_0 }
end
- context '==' do
- it { expect(@v0_0_1).to eq(Gitlab::VersionInfo.new(0, 0, 1)) }
- it { expect(@v0_1_0).to eq(Gitlab::VersionInfo.new(0, 1, 0)) }
- it { expect(@v1_0_0).to eq(Gitlab::VersionInfo.new(1, 0, 0)) }
+ describe '==' do
+ it { expect(@v0_0_1).to eq(described_class.new(0, 0, 1)) }
+ it { expect(@v0_1_0).to eq(described_class.new(0, 1, 0)) }
+ it { expect(@v1_0_0).to eq(described_class.new(1, 0, 0)) }
+ it { expect(@v1_0_1_rc1).to eq(described_class.new(1, 0, 1, '-rc1')) }
end
- context '!=' do
+ describe '!=' do
it { expect(@v0_0_1).not_to eq(@v0_1_0) }
+ it { expect(@v1_0_1_rc1).not_to eq(@v1_0_1_rc2) }
end
- context 'unknown' do
+ describe '.unknown' do
it { expect(@unknown).not_to be @v0_0_1 }
- it { expect(@unknown).not_to be Gitlab::VersionInfo.new }
+ it { expect(@unknown).not_to be described_class.new }
it { expect {@unknown > @v0_0_1}.to raise_error(ArgumentError) }
it { expect {@unknown < @v0_0_1}.to raise_error(ArgumentError) }
end
- context 'parse' do
- it { expect(Gitlab::VersionInfo.parse("1.0.0")).to eq(@v1_0_0) }
- it { expect(Gitlab::VersionInfo.parse("1.0.0.1")).to eq(@v1_0_0) }
- it { expect(Gitlab::VersionInfo.parse("1.0.0-ee")).to eq(@v1_0_0) }
- it { expect(Gitlab::VersionInfo.parse("1.0.0-rc1")).to eq(@v1_0_0) }
- it { expect(Gitlab::VersionInfo.parse("1.0.0-rc1-ee")).to eq(@v1_0_0) }
- it { expect(Gitlab::VersionInfo.parse("git 1.0.0b1")).to eq(@v1_0_0) }
- it { expect(Gitlab::VersionInfo.parse("git 1.0b1")).not_to be_valid }
+ describe '.parse' do
+ it { expect(described_class.parse("1.0.0")).to eq(@v1_0_0) }
+ it { expect(described_class.parse("1.0.0.1")).to eq(@v1_0_0) }
+ it { expect(described_class.parse("1.0.0-ee")).to eq(@v1_0_0) }
+ it { expect(described_class.parse("1.0.0-rc1")).to eq(@v1_0_0) }
+ it { expect(described_class.parse("1.0.0-rc1-ee")).to eq(@v1_0_0) }
+ it { expect(described_class.parse("git 1.0.0b1")).to eq(@v1_0_0) }
+ it { expect(described_class.parse("git 1.0b1")).not_to be_valid }
+
+ context 'with parse_suffix: true' do
+ let(:versions) do
+ <<-VERSIONS.lines
+ 0.0.1
+ 0.1.0
+ 1.0.0
+ 1.0.1-b1
+ 1.0.1-rc1
+ 1.0.1-rc2
+ 1.0.1
+ 1.1.0-beta1
+ 1.1.0
+ 2.0.0
+ v13.10.0-pre
+ v13.10.0-rc1
+ v13.10.0-rc2
+ v13.10.0
+ v13.10.1~beta.1574.gf6ea9389
+ v13.10.1~beta.1575.gf6ea9389
+ v13.10.1-rc1
+ v13.10.1-rc2
+ v13.10.1
+ VERSIONS
+ end
+
+ let(:parsed_versions) do
+ versions.map(&:strip).map { |version| described_class.parse(version, parse_suffix: true) }
+ end
+
+ it 'versions are returned in a correct order' do
+ expect(parsed_versions.shuffle.sort).to eq(parsed_versions)
+ end
+ end
end
- context 'to_s' do
+ describe '.to_s' do
it { expect(@v1_0_0.to_s).to eq("1.0.0") }
+ it { expect(@v1_0_1_rc1.to_s).to eq("1.0.1-rc1") }
it { expect(@unknown.to_s).to eq("Unknown") }
end
+
+ describe '.hash' do
+ it { expect(described_class.parse("1.0.0").hash).to eq(@v1_0_0.hash) }
+ it { expect(described_class.parse("1.0.0.1").hash).to eq(@v1_0_0.hash) }
+ it { expect(described_class.parse("1.0.1b1").hash).to eq(@v1_0_1.hash) }
+ it { expect(described_class.parse("1.0.1-rc1", parse_suffix: true).hash).to eq(@v1_0_1_rc1.hash) }
+ end
+
+ describe '.eql?' do
+ it { expect(described_class.parse("1.0.0").eql?(@v1_0_0)).to be_truthy }
+ it { expect(described_class.parse("1.0.0.1").eql?(@v1_0_0)).to be_truthy }
+ it { expect(@v1_0_1_rc1.eql?(@v1_0_1_rc1)).to be_truthy }
+ it { expect(@v1_0_1_rc1.eql?(@v1_0_1_rc2)).to be_falsey }
+ it { expect(@v1_0_1_rc1.eql?(@v1_0_1)).to be_falsey }
+ it { expect(@v1_0_1.eql?(@v1_0_0)).to be_falsey }
+ it { expect(@v1_1_0.eql?(@v1_0_0)).to be_falsey }
+ it { expect(@v1_0_0.eql?(@v1_0_0)).to be_truthy }
+ it { expect([@v1_0_0, @v1_1_0, @v1_0_0, @v1_0_1_rc1, @v1_0_1_rc1].uniq).to eq [@v1_0_0, @v1_1_0, @v1_0_1_rc1] }
+ end
+
+ describe '.same_minor_version?' do
+ it { expect(@v0_1_0.same_minor_version?(@v0_0_1)).to be_falsey }
+ it { expect(@v1_0_1.same_minor_version?(@v1_0_0)).to be_truthy }
+ it { expect(@v1_0_1_rc1.same_minor_version?(@v1_0_0)).to be_truthy }
+ it { expect(@v1_0_0.same_minor_version?(@v1_0_1)).to be_truthy }
+ it { expect(@v1_1_0.same_minor_version?(@v1_0_0)).to be_falsey }
+ it { expect(@v2_0_0.same_minor_version?(@v1_0_0)).to be_falsey }
+ end
+
+ describe '.without_patch' do
+ it { expect(@v0_1_0.without_patch).to eq(@v0_1_0) }
+ it { expect(@v1_0_0.without_patch).to eq(@v1_0_0) }
+ it { expect(@v1_0_1.without_patch).to eq(@v1_0_0) }
+ it { expect(@v1_0_1_rc1.without_patch).to eq(@v1_0_0) }
+ end
end
diff --git a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
index c0629c8d795..3152dc2ad2f 100644
--- a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
+++ b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
@@ -3,10 +3,11 @@
require 'spec_helper'
RSpec.describe Gitlab::WikiPages::FrontMatterParser do
- subject(:parser) { described_class.new(raw_content) }
+ subject(:parser) { described_class.new(raw_content, gate) }
let(:content) { 'This is the content' }
let(:end_divider) { '---' }
+ let(:gate) { stub_feature_flag_gate('Gate') }
let(:with_front_matter) do
<<~MD
@@ -61,6 +62,32 @@ RSpec.describe Gitlab::WikiPages::FrontMatterParser do
it { is_expected.to have_attributes(reason: :no_match) }
end
+ context 'the feature flag is disabled' do
+ let(:raw_content) { with_front_matter }
+
+ before do
+ stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
+ end
+
+ it { is_expected.to have_attributes(front_matter: be_empty, content: raw_content) }
+ end
+
+ context 'the feature flag is enabled for the gated object' do
+ let(:raw_content) { with_front_matter }
+
+ before do
+ stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => gate)
+ end
+
+ it do
+ is_expected.to have_attributes(
+ front_matter: have_correct_front_matter,
+ content: content + "\n",
+ reason: be_nil
+ )
+ end
+ end
+
context 'the end divider is ...' do
let(:end_divider) { '...' }
let(:raw_content) { with_front_matter }
diff --git a/spec/lib/gitlab/x509/certificate_spec.rb b/spec/lib/gitlab/x509/certificate_spec.rb
index 2dc30cc871d..d919b99de2a 100644
--- a/spec/lib/gitlab/x509/certificate_spec.rb
+++ b/spec/lib/gitlab/x509/certificate_spec.rb
@@ -116,9 +116,69 @@ RSpec.describe Gitlab::X509::Certificate do
end
end
+ describe '.default_cert_dir' do
+ before do
+ described_class.reset_default_cert_paths
+ end
+
+ after(:context) do
+ described_class.reset_default_cert_paths
+ end
+
+ context 'when SSL_CERT_DIR env variable is not set' do
+ before do
+ stub_env('SSL_CERT_DIR', nil)
+ end
+
+ it 'returns default directory from OpenSSL' do
+ expect(described_class.default_cert_dir).to eq(OpenSSL::X509::DEFAULT_CERT_DIR)
+ end
+ end
+
+ context 'when SSL_CERT_DIR env variable is set' do
+ before do
+ stub_env('SSL_CERT_DIR', '/tmp/foo/certs')
+ end
+
+ it 'returns specified directory' do
+ expect(described_class.default_cert_dir).to eq('/tmp/foo/certs')
+ end
+ end
+ end
+
+ describe '.default_cert_file' do
+ before do
+ described_class.reset_default_cert_paths
+ end
+
+ after(:context) do
+ described_class.reset_default_cert_paths
+ end
+
+ context 'when SSL_CERT_FILE env variable is not set' do
+ before do
+ stub_env('SSL_CERT_FILE', nil)
+ end
+
+ it 'returns default file from OpenSSL' do
+ expect(described_class.default_cert_file).to eq(OpenSSL::X509::DEFAULT_CERT_FILE)
+ end
+ end
+
+ context 'when SSL_CERT_FILE env variable is set' do
+ before do
+ stub_env('SSL_CERT_FILE', '/tmp/foo/cert.pem')
+ end
+
+ it 'returns specified file' do
+ expect(described_class.default_cert_file).to eq('/tmp/foo/cert.pem')
+ end
+ end
+ end
+
describe '.ca_certs_paths' do
it 'returns all files specified by OpenSSL defaults' do
- cert_paths = Dir["#{OpenSSL::X509::DEFAULT_CERT_DIR}/*"]
+ cert_paths = Dir["#{described_class.default_cert_dir}/*"]
expect(described_class.ca_certs_paths).to match_array(cert_paths + [sample_cert])
end
diff --git a/spec/lib/gitlab/x509/commit_spec.rb b/spec/lib/gitlab/x509/commit_spec.rb
index a81955b995e..c7d56e49fab 100644
--- a/spec/lib/gitlab/x509/commit_spec.rb
+++ b/spec/lib/gitlab/x509/commit_spec.rb
@@ -2,14 +2,21 @@
require 'spec_helper'
RSpec.describe Gitlab::X509::Commit do
- describe '#signature' do
- let(:signature) { described_class.new(commit).signature }
+ let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
+ let(:user) { create(:user, email: X509Helpers::User1.certificate_email) }
+ let(:project) { create(:project, :repository, path: X509Helpers::User1.path, creator: user) }
+ let(:commit) { project.commit_by(oid: commit_sha ) }
+ let(:signature) { Gitlab::X509::Commit.new(commit).signature }
+ let(:store) { OpenSSL::X509::Store.new }
+ let(:certificate) { OpenSSL::X509::Certificate.new(X509Helpers::User1.trust_cert) }
- context 'returns the cached signature' do
- let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
- let(:project) { create(:project, :public, :repository) }
- let(:commit) { create(:commit, project: project, sha: commit_sha) }
+ before do
+ store.add_cert(certificate) if certificate
+ allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
+ end
+ describe '#signature' do
+ context 'returns the cached signature' do
it 'on second call' do
allow_any_instance_of(described_class).to receive(:new).and_call_original
expect_any_instance_of(described_class).to receive(:create_cached_signature!).and_call_original
@@ -23,13 +30,29 @@ RSpec.describe Gitlab::X509::Commit do
end
context 'unsigned commit' do
- let!(:project) { create :project, :repository, path: X509Helpers::User1.path }
- let!(:commit_sha) { X509Helpers::User1.commit }
- let!(:commit) { create :commit, project: project, sha: commit_sha }
+ let(:project) { create :project, :repository, path: X509Helpers::User1.path }
+ let(:commit_sha) { X509Helpers::User1.commit }
+ let(:commit) { create :commit, project: project, sha: commit_sha }
it 'returns nil' do
expect(signature).to be_nil
end
end
end
+
+ describe '#update_signature!' do
+ let(:certificate) { nil }
+
+ it 'updates verification status' do
+ signature
+
+ cert = OpenSSL::X509::Certificate.new(X509Helpers::User1.trust_cert)
+ store.add_cert(cert)
+
+ stored_signature = CommitSignatures::X509CommitSignature.find_by_commit_sha(commit_sha)
+ expect { described_class.new(commit).update_signature!(stored_signature) }.to(
+ change { signature.reload.verification_status }.from('unverified').to('verified')
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb
index 0e34d5393d6..5626e49bfe1 100644
--- a/spec/lib/gitlab/x509/signature_spec.rb
+++ b/spec/lib/gitlab/x509/signature_spec.rb
@@ -107,7 +107,7 @@ RSpec.describe Gitlab::X509::Signature do
f.print certificate.to_pem
end
- stub_const("OpenSSL::X509::DEFAULT_CERT_FILE", file_path)
+ allow(Gitlab::X509::Certificate).to receive(:default_cert_file).and_return(file_path)
allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index a81ed38382b..ba49c00245e 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
+require 'google/apis/sqladmin_v1beta4'
RSpec.describe GoogleApi::CloudPlatform::Client do
let(:token) { 'token' }
@@ -8,6 +9,7 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
let(:user_agent_options) { client.instance_eval { user_agent_header } }
let(:gcp_project_id) { String('gcp_proj_id') }
let(:operation) { true }
+ let(:database_instance) { Google::Apis::SqladminV1beta4::DatabaseInstance.new(state: 'RUNNABLE') }
describe '.session_key_for_redirect_uri' do
let(:state) { 'random_string' }
@@ -350,4 +352,40 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
.to_return(status: 200, body: "", headers: {})
end
end
+
+ describe '#create_cloudsql_database' do
+ subject { client.create_cloudsql_database(:gcp_project_id, :instance_name, :database_name) }
+
+ it 'calls Google Api SQLAdminService#insert_database' do
+ expect_any_instance_of(Google::Apis::SqladminV1beta4::SQLAdminService)
+ .to receive(:insert_database)
+ .with(any_args)
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
+
+ describe '#create_cloudsql_user' do
+ subject { client.create_cloudsql_user(:gcp_project_id, :instance_name, :database_name, :user_name) }
+
+ it 'calls Google Api SQLAdminService#insert_user' do
+ expect_any_instance_of(Google::Apis::SqladminV1beta4::SQLAdminService)
+ .to receive(:insert_user)
+ .with(any_args)
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
+
+ describe '#get_cloudsql_instance' do
+ subject { client.get_cloudsql_instance(:gcp_project_id, :instance_name) }
+
+ it 'calls Google Api SQLAdminService#get_instance' do
+ expect_any_instance_of(Google::Apis::SqladminV1beta4::SQLAdminService)
+ .to receive(:get_instance)
+ .with(any_args)
+ .and_return(database_instance)
+ is_expected.to eq(database_instance)
+ end
+ end
end
diff --git a/spec/lib/initializer_connections_spec.rb b/spec/lib/initializer_connections_spec.rb
new file mode 100644
index 00000000000..4ca283c4f22
--- /dev/null
+++ b/spec/lib/initializer_connections_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe InitializerConnections do
+ describe '.with_disabled_database_connections', :reestablished_active_record_base do
+ def block_with_database_call
+ described_class.with_disabled_database_connections do
+ Project.first
+ end
+ end
+
+ def block_with_error
+ described_class.with_disabled_database_connections do
+ raise "oops, an error"
+ end
+ end
+
+ it 'prevents any database connection within the block' do
+ expect { block_with_database_call }.to raise_error(/Database connection should not be called during initializer/)
+ end
+
+ it 'does not prevent database connection if SKIP_RAISE_ON_INITIALIZE_CONNECTIONS is set' do
+ stub_env('SKIP_RAISE_ON_INITIALIZE_CONNECTIONS', '1')
+
+ expect { block_with_database_call }.not_to raise_error
+ end
+
+ it 'prevents any database connection if SKIP_RAISE_ON_INITIALIZE_CONNECTIONS is false' do
+ stub_env('SKIP_RAISE_ON_INITIALIZE_CONNECTIONS', 'false')
+
+ expect { block_with_database_call }.to raise_error(/Database connection should not be called during initializer/)
+ end
+
+ it 'restores original connection handler' do
+ # rubocop:disable Database/MultipleDatabases
+ original_handler = ActiveRecord::Base.connection_handler
+
+ expect { block_with_database_call }.to raise_error(/Database connection should not be called during initializer/)
+
+ expect(ActiveRecord::Base.connection_handler).to eq(original_handler)
+ # rubocop:enabled Database/MultipleDatabases
+ end
+
+ it 'restores original connection handler even there is an error' do
+ # rubocop:disable Database/MultipleDatabases
+ original_handler = ActiveRecord::Base.connection_handler
+
+ expect { block_with_error }.to raise_error(/an error/)
+
+ expect(ActiveRecord::Base.connection_handler).to eq(original_handler)
+ # rubocop:enabled Database/MultipleDatabases
+ end
+
+ it 'raises if any new connection_pools are established in the block' do
+ expect do
+ described_class.with_disabled_database_connections do
+ ApplicationRecord.connects_to database: { writing: :main, reading: :main }
+ end
+ end.to raise_error(/Unxpected connection_pools/)
+ end
+ end
+end
diff --git a/spec/lib/json_web_token/rsa_token_spec.rb b/spec/lib/json_web_token/rsa_token_spec.rb
index 8f0d62d8f0c..6d2026752d6 100644
--- a/spec/lib/json_web_token/rsa_token_spec.rb
+++ b/spec/lib/json_web_token/rsa_token_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe JSONWebToken::RSAToken do
end
context 'for invalid key to raise an exception' do
- let(:new_key) { OpenSSL::PKey::RSA.generate(512) }
+ let(:new_key) { OpenSSL::PKey::RSA.generate(3072) }
subject { JWT.decode(rsa_encoded, new_key, true, { algorithm: 'RS256' }) }
diff --git a/spec/lib/learn_gitlab/onboarding_spec.rb b/spec/lib/learn_gitlab/onboarding_spec.rb
index 8c7284ed7f5..3e22ce59091 100644
--- a/spec/lib/learn_gitlab/onboarding_spec.rb
+++ b/spec/lib/learn_gitlab/onboarding_spec.rb
@@ -6,11 +6,14 @@ RSpec.describe LearnGitlab::Onboarding do
describe '#completed_percentage' do
let(:completed_actions) { {} }
let(:onboarding_progress) { build(:onboarding_progress, namespace: namespace, **completed_actions) }
- let(:namespace) { build(:namespace) }
+ let(:namespace) { create(:namespace) }
let_it_be(:tracked_action_columns) do
- tracked_actions = described_class::ACTION_ISSUE_IDS.keys + described_class::ACTION_PATHS
- tracked_actions.map { |key| OnboardingProgress.column_name(key) }
+ [
+ *described_class::ACTION_ISSUE_IDS.keys,
+ *described_class::ACTION_PATHS,
+ :security_scan_enabled
+ ].map { |key| OnboardingProgress.column_name(key) }
end
before do
@@ -29,12 +32,6 @@ RSpec.describe LearnGitlab::Onboarding do
it { is_expected.to eq(0) }
end
- context 'when one action has been completed' do
- let(:completed_actions) { Hash[tracked_action_columns.first, Time.current] }
-
- it { is_expected.to eq(11) }
- end
-
context 'when all tracked actions have been completed' do
let(:completed_actions) do
tracked_action_columns.to_h { |action| [action, Time.current] }
@@ -42,5 +39,25 @@ RSpec.describe LearnGitlab::Onboarding do
it { is_expected.to eq(100) }
end
+
+ describe 'security_actions_continuous_onboarding experiment' do
+ let(:completed_actions) { Hash[tracked_action_columns.first, Time.current] }
+
+ context 'when control' do
+ before do
+ stub_experiments(security_actions_continuous_onboarding: :control)
+ end
+
+ it { is_expected.to eq(11) }
+ end
+
+ context 'when candidate' do
+ before do
+ stub_experiments(security_actions_continuous_onboarding: :candidate)
+ end
+
+ it { is_expected.to eq(9) }
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
index b11c9db4e46..ba5137e2b92 100644
--- a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
@@ -68,34 +68,6 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
it_behaves_like 'access rights checks'
end
- describe 'Logs' do
- let(:item_id) { :logs }
-
- it_behaves_like 'access rights checks'
-
- context 'when feature disabled' do
- before do
- stub_feature_flags(monitor_logging: false)
- end
-
- specify { is_expected.to be_nil }
- end
- end
-
- describe 'Tracing' do
- let(:item_id) { :tracing }
-
- it_behaves_like 'access rights checks'
-
- context 'when feature disabled' do
- before do
- stub_feature_flags(monitor_tracing: false)
- end
-
- specify { is_expected.to be_nil }
- end
- end
-
describe 'Error Tracking' do
let(:item_id) { :error_tracking }
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
index d6136dddf40..f41f7a01d88 100644
--- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -135,18 +135,20 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
describe 'Packages & Registries' do
let(:item_id) { :packages_and_registries }
+ let(:packages_enabled) { false }
before do
stub_container_registry_config(enabled: container_enabled)
+ stub_config(packages: { enabled: packages_enabled })
end
- describe 'when config registry setting is disabled' do
+ describe 'when container registry setting is disabled' do
let(:container_enabled) { false }
specify { is_expected.to be_nil }
end
- describe 'when config registry setting is enabled' do
+ describe 'when container registry setting is enabled' do
let(:container_enabled) { true }
specify { is_expected.not_to be_nil }
@@ -157,6 +159,19 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
specify { is_expected.to be_nil }
end
end
+
+ describe 'when package registry setting is enabled' do
+ let(:container_enabled) { false }
+ let(:packages_enabled) { true }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
end
describe 'Usage Quotas' do
diff --git a/spec/lib/unnested_in_filters/dsl_spec.rb b/spec/lib/unnested_in_filters/dsl_spec.rb
new file mode 100644
index 00000000000..bce4c88f94c
--- /dev/null
+++ b/spec/lib/unnested_in_filters/dsl_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UnnestedInFilters::Dsl do
+ let(:test_model) do
+ Class.new(ApplicationRecord) do
+ include UnnestedInFilters::Dsl
+
+ self.table_name = 'users'
+ end
+ end
+
+ describe '#exists?' do
+ let(:states) { %w(active banned) }
+
+ subject { test_model.where(state: states).use_unnested_filters.exists? }
+
+ context 'when there is no record in the database with given filters' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when there is a record in the database with given filters' do
+ before do
+ create(:user, state: :active)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+end
diff --git a/spec/lib/unnested_in_filters/rewriter_spec.rb b/spec/lib/unnested_in_filters/rewriter_spec.rb
new file mode 100644
index 00000000000..e2ccbd92504
--- /dev/null
+++ b/spec/lib/unnested_in_filters/rewriter_spec.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UnnestedInFilters::Rewriter do
+ let(:rewriter) { described_class.new(relation) }
+
+ before(:all) do
+ User.include(UnnestedInFilters::Dsl)
+ end
+
+ describe '#rewrite?' do
+ subject(:rewrite?) { rewriter.rewrite? }
+
+ context 'when the given relation does not have an `IN` predicate' do
+ let(:relation) { User.where(username: 'user') }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the given relation has an `IN` predicate' do
+ context 'when there is no index coverage for the used columns' do
+ let(:relation) { User.where(username: %w(user_1 user_2), state: :active) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when there is an index coverage for the used columns' do
+ let(:relation) { User.where(state: :active, user_type: [:support_bot, :alert_bot]) }
+
+ it { is_expected.to be_truthy }
+
+ context 'when there is an ordering' do
+ let(:relation) { User.where(state: %w(active blocked banned)).order(order).limit(2) }
+
+ context 'when the order is an Arel node' do
+ let(:order) { { user_type: :desc } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the order is a Keyset order' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'user_type',
+ order_expression: User.arel_table['user_type'].desc,
+ nullable: :not_nullable,
+ distinct: false
+ )
+ ])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+ end
+ end
+
+ describe '#rewrite' do
+ let(:recorded_queries) { ActiveRecord::QueryRecorder.new { rewriter.rewrite.load } }
+ let(:relation) { User.where(state: :active, user_type: %i(support_bot alert_bot)).limit(2) }
+
+ let(:expected_query) do
+ <<~SQL
+ SELECT
+ "users".*
+ FROM
+ unnest('{1,2}'::smallint[]) AS "user_types"("user_type"),
+ LATERAL (
+ SELECT
+ "users".*
+ FROM
+ "users"
+ WHERE
+ "users"."state" = 'active' AND
+ (users."user_type" = "user_types"."user_type")
+ LIMIT 2
+ ) AS users
+ LIMIT 2
+ SQL
+ end
+
+ subject(:issued_query) { recorded_queries.occurrences.each_key.first }
+
+ it 'changes the query' do
+ expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ end
+
+ context 'when there is an order' do
+ let(:relation) { User.where(state: %w(active blocked banned)).order(order).limit(2) }
+ let(:expected_query) do
+ <<~SQL
+ SELECT
+ "users".*
+ FROM
+ unnest('{active,blocked,banned}'::charactervarying[]) AS "states"("state"),
+ LATERAL (
+ SELECT
+ "users".*
+ FROM
+ "users"
+ WHERE
+ (users."state" = "states"."state")
+ ORDER BY
+ "users"."user_type" DESC
+ LIMIT 2
+ ) AS users
+ ORDER BY
+ "users"."user_type" DESC
+ LIMIT 2
+ SQL
+ end
+
+ context 'when the order is an Arel node' do
+ let(:order) { { user_type: :desc } }
+
+ it 'changes the query' do
+ expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ end
+ end
+
+ context 'when the order is a Keyset order' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'user_type',
+ order_expression: User.arel_table['user_type'].desc,
+ nullable: :not_nullable,
+ distinct: false
+ )
+ ])
+ end
+
+ it 'changes the query' do
+ expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ end
+ end
+ end
+
+ describe 'logging' do
+ subject(:load_reload) { rewriter.rewrite }
+
+ before do
+ allow(::Gitlab::AppLogger).to receive(:info)
+ end
+
+ it 'logs the call' do
+ load_reload
+
+ expect(::Gitlab::AppLogger)
+ .to have_received(:info).with(message: 'Query is being rewritten by `UnnestedInFilters`', model: 'User')
+ end
+ end
+ end
+end
diff --git a/spec/mailers/devise_mailer_spec.rb b/spec/mailers/devise_mailer_spec.rb
index 2634d7c722b..360eb827927 100644
--- a/spec/mailers/devise_mailer_spec.rb
+++ b/spec/mailers/devise_mailer_spec.rb
@@ -126,4 +126,34 @@ RSpec.describe DeviseMailer do
is_expected.to have_link("Reset password", href: "#{Gitlab.config.gitlab.url}/users/password/edit?reset_password_token=faketoken")
end
end
+
+ describe '#email_changed' do
+ subject { described_class.email_changed(user, {}) }
+
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'an email sent from GitLab'
+
+ it 'is sent to the user' do
+ is_expected.to deliver_to user.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject 'Email Changed'
+ end
+
+ it 'greets the user' do
+ is_expected.to have_body_text /Hello, #{user.name}!/
+ end
+
+ context "email contains updated id" do
+ before do
+ user.update!(email: "new_email@test.com")
+ end
+
+ it 'includes changed email id' do
+ is_expected.to have_body_text /email is being changed to new_email@test.com./
+ end
+ end
+ end
end
diff --git a/spec/mailers/emails/admin_notification_spec.rb b/spec/mailers/emails/admin_notification_spec.rb
index a233be86a83..1b770d6d4a2 100644
--- a/spec/mailers/emails/admin_notification_spec.rb
+++ b/spec/mailers/emails/admin_notification_spec.rb
@@ -18,12 +18,14 @@ RSpec.describe Emails::AdminNotification do
let(:max_project_downloads) { 5 }
let(:time_period) { 600 }
+ let(:group) { nil }
subject do
Notify.user_auto_banned_email(
admin.id, user.id,
max_project_downloads: max_project_downloads,
- within_seconds: time_period
+ within_seconds: time_period,
+ group: group
)
end
@@ -45,6 +47,10 @@ RSpec.describe Emails::AdminNotification do
is_expected.to have_body_text user.name
end
+ it 'includes the scope of the ban' do
+ is_expected.to have_body_text "banned from your GitLab instance"
+ end
+
it 'includes the reason' do
is_expected.to have_body_text "due to them downloading more than 5 project repositories within 10 minutes"
end
@@ -58,7 +64,15 @@ RSpec.describe Emails::AdminNotification do
end
it 'includes the email reason' do
- is_expected.to have_body_text "You're receiving this email because of your account on localhost"
+ is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost<\/a>}
+ end
+
+ context 'when scoped to a group' do
+ let(:group) { create(:group) }
+
+ it 'includes the scope of the ban' do
+ is_expected.to have_body_text "banned from your group (#{group.name})"
+ end
end
end
end
diff --git a/spec/mailers/emails/in_product_marketing_spec.rb b/spec/mailers/emails/in_product_marketing_spec.rb
index 7f3896a3d51..7c21e161ffe 100644
--- a/spec/mailers/emails/in_product_marketing_spec.rb
+++ b/spec/mailers/emails/in_product_marketing_spec.rb
@@ -65,7 +65,6 @@ RSpec.describe Emails::InProductMarketing do
:team | 0
:team | 1
:team | 2
- :experience | 0
:team_short | 0
:trial_short | 0
:admin_verify | 0
@@ -83,12 +82,7 @@ RSpec.describe Emails::InProductMarketing do
is_expected.to have_subject(message.subject_line)
is_expected.to have_body_text(message.title)
is_expected.to have_body_text(message.subtitle)
-
- if track == :experience
- is_expected.to have_body_text(CGI.unescapeHTML(message.feedback_link(1)))
- else
- is_expected.to have_body_text(CGI.unescapeHTML(message.cta_link))
- end
+ is_expected.to have_body_text(CGI.unescapeHTML(message.cta_link))
if track =~ /(create|verify)/
is_expected.to have_body_text(message.invite_text)
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index f4483f7e8f5..09ed27eb90f 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -151,7 +151,7 @@ RSpec.describe Emails::Profile do
end
it 'includes the email reason' do
- is_expected.to have_body_text /You're receiving this email because of your account on localhost/
+ is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost<\/a>}
end
end
end
@@ -187,7 +187,7 @@ RSpec.describe Emails::Profile do
end
it 'includes the email reason' do
- is_expected.to have_body_text /You're receiving this email because of your account on localhost/
+ is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost<\/a>}
end
context 'with User does not exist' do
@@ -222,7 +222,7 @@ RSpec.describe Emails::Profile do
end
it 'includes the email reason' do
- is_expected.to have_body_text /You're receiving this email because of your account on localhost/
+ is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost<\/a>}
end
end
@@ -266,7 +266,7 @@ RSpec.describe Emails::Profile do
end
shared_examples 'includes the email reason' do
- it { is_expected.to have_body_text /You're receiving this email because of your account on localhost/ }
+ it { is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost<\/a>} }
end
shared_examples 'valid use case' do
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index a9796c28870..8beb54bca4d 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Notify do
include EmailSpec::Helpers
include EmailSpec::Matchers
include EmailHelpers
+ include EmailsHelper
include RepoHelpers
include MembersHelper
@@ -396,7 +397,7 @@ RSpec.describe Notify do
end
end
- context 'when sent with a reason' do
+ context 'when sent with a reason', type: :helper do
subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee.id], current_user.id, NotificationReason::ASSIGNED) }
it_behaves_like 'appearance header and footer enabled'
@@ -407,15 +408,15 @@ RSpec.describe Notify do
end
it 'includes the reason in the footer' do
- text = EmailsHelper.instance_method(:notification_reason_text).bind(self).call(NotificationReason::ASSIGNED)
+ text = EmailsHelper.instance_method(:notification_reason_text).bind(self).call(reason: NotificationReason::ASSIGNED, format: :html)
is_expected.to have_body_text(text)
new_subject = described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee.id], current_user.id, NotificationReason::MENTIONED)
- text = EmailsHelper.instance_method(:notification_reason_text).bind(self).call(NotificationReason::MENTIONED)
+ text = EmailsHelper.instance_method(:notification_reason_text).bind(self).call(reason: NotificationReason::MENTIONED, format: :html)
expect(new_subject).to have_body_text(text)
new_subject = described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee.id], current_user.id, nil)
- text = EmailsHelper.instance_method(:notification_reason_text).bind(self).call(nil)
+ text = EmailsHelper.instance_method(:notification_reason_text).bind(self).call(format: :html)
expect(new_subject).to have_body_text(text)
end
end
@@ -1550,7 +1551,7 @@ RSpec.describe Notify do
end
describe 'invitations' do
- let(:owner) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
+ let(:owner) { create(:user).tap { |u| group.add_member(u, Gitlab::Access::OWNER) } }
let(:group_member) { invite_to_group(group, inviter: inviter) }
let(:inviter) { owner }
@@ -1605,7 +1606,7 @@ RSpec.describe Notify do
end
describe 'group invitation reminders' do
- let_it_be(:inviter) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
+ let_it_be(:inviter) { create(:user).tap { |u| group.add_member(u, Gitlab::Access::OWNER) } }
let(:group_member) { invite_to_group(group, inviter: inviter) }
@@ -1688,7 +1689,7 @@ RSpec.describe Notify do
describe 'group invitation accepted' do
let(:invited_user) { create(:user, name: 'invited user') }
- let(:owner) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
+ let(:owner) { create(:user).tap { |u| group.add_member(u, Gitlab::Access::OWNER) } }
let(:group_member) do
invitee = invite_to_group(group, inviter: owner)
invitee.accept_invite!(invited_user)
@@ -1714,7 +1715,7 @@ RSpec.describe Notify do
end
describe 'group invitation declined' do
- let(:owner) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
+ let(:owner) { create(:user).tap { |u| group.add_member(u, Gitlab::Access::OWNER) } }
let(:group_member) do
invitee = invite_to_group(group, inviter: owner)
invitee.decline_invite!
diff --git a/spec/metrics_server/metrics_server_spec.rb b/spec/metrics_server/metrics_server_spec.rb
index 4c188a6ba29..c7716184d48 100644
--- a/spec/metrics_server/metrics_server_spec.rb
+++ b/spec/metrics_server/metrics_server_spec.rb
@@ -171,6 +171,29 @@ RSpec.describe MetricsServer do # rubocop:disable RSpec/FilePath
described_class.spawn(target, metrics_dir: metrics_dir)
end
end
+
+ context 'when TLS settings are present' do
+ before do
+ %w(web_exporter sidekiq_exporter).each do |key|
+ settings[key]['tls_enabled'] = true
+ settings[key]['tls_cert_path'] = '/path/to/cert.pem'
+ settings[key]['tls_key_path'] = '/path/to/key.pem'
+ end
+ end
+
+ it 'sets the correct environment variables' do
+ expect(Process).to receive(:spawn).with(
+ expected_env.merge(
+ 'GME_CERT_FILE' => '/path/to/cert.pem',
+ 'GME_CERT_KEY' => '/path/to/key.pem'
+ ),
+ '/path/to/gme/gitlab-metrics-exporter',
+ hash_including(pgroup: true)
+ ).and_return(99)
+
+ described_class.spawn(target, metrics_dir: metrics_dir, path: '/path/to/gme/')
+ end
+ end
end
end
end
diff --git a/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb b/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb
new file mode 100644
index 00000000000..8cb6ab23fef
--- /dev/null
+++ b/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddUserIdAndIpAddressSuccessIndexToAuthenticationEvents do
+ let(:db) { described_class.new }
+ let(:old_index) { described_class::OLD_INDEX_NAME }
+ let(:new_index) { described_class::NEW_INDEX_NAME }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(db.connection.indexes(:authentication_events).map(&:name)).to include(old_index)
+ expect(db.connection.indexes(:authentication_events).map(&:name)).not_to include(new_index)
+ }
+
+ migration.after -> {
+ expect(db.connection.indexes(:authentication_events).map(&:name)).to include(new_index)
+ expect(db.connection.indexes(:authentication_events).map(&:name)).not_to include(old_index)
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb b/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
new file mode 100644
index 00000000000..1450811b3b9
--- /dev/null
+++ b/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+require_migration!
+
+RSpec.describe AddTmpIndexForPotentiallyMisassociatedVulnerabilityOccurrences do
+ let(:async_index) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
+ let(:index_name) { described_class::INDEX_NAME }
+
+ it "schedules the index" do
+ reversible_migration do |migration|
+ migration.before -> do
+ expect(async_index.where(name: index_name).count).to be(0)
+ end
+
+ migration.after -> do
+ expect(async_index.where(name: index_name).count).to be(1)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb b/spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb
new file mode 100644
index 00000000000..d23ca8741a2
--- /dev/null
+++ b/spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe UpdateLastRunDateForIterationsCadences, :migration do
+ let(:current_date) { Date.parse(ApplicationRecord.connection.execute("SELECT CURRENT_DATE").first["current_date"]) }
+ let(:namespaces) { table(:namespaces) }
+ let(:iterations_cadences) { table(:iterations_cadences) }
+
+ let!(:group) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:cadence_1) do
+ iterations_cadences.create!(group_id: group.id, title: "cadence 1", last_run_date: Date.today - 5.days)
+ end
+
+ let!(:cadence_2) { iterations_cadences.create!(group_id: group.id, title: "cadence 2", last_run_date: nil) }
+ let!(:cadence_3) do
+ iterations_cadences.create!(group_id: group.id, title: "cadence 2", last_run_date: nil, automatic: false)
+ end
+
+ it 'sets last_run_date to CURRENT_DATE for iterations cadences with automatic=true', :aggregate_failures do
+ migrate!
+
+ expect(cadence_1.reload.last_run_date).to eq(current_date)
+ expect(cadence_2.reload.last_run_date).to eq(current_date)
+ expect(cadence_3.reload.last_run_date).to eq(nil)
+ end
+end
diff --git a/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb b/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb
new file mode 100644
index 00000000000..52b75f0b8a9
--- /dev/null
+++ b/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillProjectStatisticsWithContainerRegistrySize do
+ let_it_be(:batched_migration) { described_class::MIGRATION_CLASS }
+
+ it 'does not schedule background jobs when Gitlab.com is false' do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+ end
+ end
+
+ it 'schedules background jobs for each batch of container_repository' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :container_repositories,
+ column_name: :project_id,
+ interval: described_class::DELAY_INTERVAL
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb b/spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb
new file mode 100644
index 00000000000..3e7f2a3457b
--- /dev/null
+++ b/spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForInactivePublicProjects do
+ context 'on gitlab.com' do
+ let(:migration) { described_class::MIGRATION }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of projects' do
+ migrate!
+
+ expect(migration).to(
+ have_scheduled_batched_migration(
+ table_name: :projects,
+ column_name: :id,
+ interval: described_class::INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+ end
+
+ context 'on self-managed instances' do
+ let(:migration) { described_class.new }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ describe '#up' do
+ it 'does not schedule background job' do
+ expect(migration).not_to receive(:queue_batched_background_migration)
+
+ migration.up
+ end
+ end
+
+ describe '#down' do
+ it 'does not delete background job' do
+ expect(migration).not_to receive(:delete_batched_background_migration)
+
+ migration.down
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb b/spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb
new file mode 100644
index 00000000000..190f1c830ae
--- /dev/null
+++ b/spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueUpdateDelayedProjectRemovalToNullForUserNamespace do
+ let(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of namespace settings' do
+ migrate!
+
+ expect(migration).to(
+ have_scheduled_batched_migration(
+ table_name: :namespace_settings,
+ column_name: :namespace_id,
+ interval: described_class::INTERVAL,
+ batch_size: described_class::BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb b/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb
new file mode 100644
index 00000000000..1f116cf6a7e
--- /dev/null
+++ b/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FinaliseProjectNamespaceMembers, :migration do
+ let(:batched_migrations) { table(:batched_background_migrations) }
+
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ shared_examples 'finalizes the migration' do
+ it 'finalizes the migration' do
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).to receive(:finalize).with('BackfillProjectMemberNamespaceId', :members, :id, [])
+ end
+ end
+ end
+
+ context 'when migration is missing' do
+ it 'warns migration not found' do
+ expect(Gitlab::AppLogger)
+ .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
+
+ migrate!
+ end
+ end
+
+ context 'with migration present' do
+ let!(:project_member_namespace_id_backfill) do
+ batched_migrations.create!(
+ job_class_name: 'BackfillProjectMemberNamespaceId',
+ table_name: :members,
+ column_name: :id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 200,
+ gitlab_schema: :gitlab_main,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+ end
+
+ context 'with different migration statuses' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :description) do
+ 0 | 'paused'
+ 1 | 'active'
+ 4 | 'failed'
+ 5 | 'finalizing'
+ end
+
+ with_them do
+ before do
+ project_member_namespace_id_backfill.update!(status: status)
+ end
+
+ it_behaves_like 'finalizes the migration'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb b/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb
new file mode 100644
index 00000000000..bd821714605
--- /dev/null
+++ b/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe UnsetEscalationPoliciesForAlertIncidents do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:alerts) { table(:alert_management_alerts) }
+ let(:escalation_policies) { table(:incident_management_escalation_policies) }
+ let(:escalation_statuses) { table(:incident_management_issuable_escalation_statuses) }
+ let(:current_time) { Time.current.change(usec: 0) }
+
+ let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
+ let!(:project_namespace) { namespaces.create!(name: 'project', path: 'project', type: 'project') }
+ let!(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: project_namespace.id) }
+ let!(:policy) { escalation_policies.create!(project_id: project.id, name: 'escalation policy') }
+
+ # Escalation status with policy from alert; Policy & escalation start time should be nullified
+ let!(:issue_1) { create_issue }
+ let!(:escalation_status_1) { create_status(issue_1, policy, current_time) }
+ let!(:alert_1) { create_alert(1, issue_1) }
+
+ # Escalation status without policy, but with alert; Should be ignored
+ let!(:issue_2) { create_issue }
+ let!(:escalation_status_2) { create_status(issue_2, nil, current_time) }
+ let!(:alert_2) { create_alert(2, issue_2) }
+
+ # Escalation status without alert, but with policy; Should be ignored
+ let!(:issue_3) { create_issue }
+ let!(:escalation_status_3) { create_status(issue_3, policy, current_time) }
+
+ # Alert without issue; Should be ignored
+ let!(:alert_3) { create_alert(3) }
+
+ it 'removes the escalation policy if the incident corresponds to an alert' do
+ expect { migrate! }
+ .to change { escalation_status_1.reload.policy_id }.from(policy.id).to(nil)
+ .and change { escalation_status_1.escalations_started_at }.from(current_time).to(nil)
+ .and not_change { policy_attrs(escalation_status_2) }
+ .and not_change { policy_attrs(escalation_status_3) }
+ end
+
+ private
+
+ def create_issue
+ issues.create!(project_id: project.id)
+ end
+
+ def create_status(issue, policy = nil, escalations_started_at = nil)
+ escalation_statuses.create!(
+ issue_id: issue.id,
+ policy_id: policy&.id,
+ escalations_started_at: escalations_started_at
+ )
+ end
+
+ def create_alert(iid, issue = nil)
+ alerts.create!(
+ project_id: project.id,
+ started_at: current_time,
+ title: "alert #{iid}",
+ iid: iid.to_s,
+ issue_id: issue&.id
+ )
+ end
+
+ def policy_attrs(escalation_status)
+ escalation_status.reload.slice(:policy_id, :escalations_started_at)
+ end
+end
diff --git a/spec/migrations/20220715163254_update_notes_in_past_spec.rb b/spec/migrations/20220715163254_update_notes_in_past_spec.rb
new file mode 100644
index 00000000000..58e6cabc129
--- /dev/null
+++ b/spec/migrations/20220715163254_update_notes_in_past_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe UpdateNotesInPast, :migration do
+ let(:notes) { table(:notes) }
+
+ it 'updates created_at when it is too much in the past' do
+ notes.create!(id: 10, note: 'note', created_at: '2009-06-01')
+ notes.create!(id: 11, note: 'note', created_at: '1970-01-01')
+ notes.create!(id: 12, note: 'note', created_at: '1600-06-01')
+
+ migrate!
+
+ expect(notes.all).to contain_exactly(
+ an_object_having_attributes(id: 10, created_at: DateTime.parse('2009-06-01')),
+ an_object_having_attributes(id: 11, created_at: DateTime.parse('1970-01-01')),
+ an_object_having_attributes(id: 12, created_at: DateTime.parse('1970-01-01'))
+ )
+ end
+end
diff --git a/spec/migrations/change_public_projects_cost_factor_spec.rb b/spec/migrations/change_public_projects_cost_factor_spec.rb
new file mode 100644
index 00000000000..78030736093
--- /dev/null
+++ b/spec/migrations/change_public_projects_cost_factor_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ChangePublicProjectsCostFactor, :migration do
+ # This is a workaround to force the migration to run against the
+ # `gitlab_ci` schema. Otherwise it only runs against `gitlab_main`.
+ around do |example| # rubocop: disable Style/MultilineIfModifier
+ with_reestablished_active_record_base do
+ reconfigure_db_connection(name: :ci)
+ example.run
+ end
+ end if Gitlab::Database.has_config?(:ci)
+
+ let(:runners) { table(:ci_runners) }
+
+ let!(:shared_1) { runners.create!(runner_type: 1, public_projects_minutes_cost_factor: 0) }
+ let!(:shared_2) { runners.create!(runner_type: 1, public_projects_minutes_cost_factor: 0) }
+ let!(:shared_3) { runners.create!(runner_type: 1, public_projects_minutes_cost_factor: 1) }
+ let!(:group_1) { runners.create!(runner_type: 2, public_projects_minutes_cost_factor: 0) }
+
+ describe '#up' do
+ context 'when on SaaS' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'updates the cost factor from 0 only for shared runners', :aggregate_failures do
+ migrate!
+
+ expect(shared_1.reload.public_projects_minutes_cost_factor).to eq(0.008)
+ expect(shared_2.reload.public_projects_minutes_cost_factor).to eq(0.008)
+ expect(shared_3.reload.public_projects_minutes_cost_factor).to eq(1)
+ expect(group_1.reload.public_projects_minutes_cost_factor).to eq(0)
+ end
+ end
+
+ context 'when on self-managed', :aggregate_failures do
+ it 'skips the migration' do
+ migrate!
+
+ expect(shared_1.public_projects_minutes_cost_factor).to eq(0)
+ expect(shared_2.public_projects_minutes_cost_factor).to eq(0)
+ expect(shared_3.public_projects_minutes_cost_factor).to eq(1)
+ expect(group_1.public_projects_minutes_cost_factor).to eq(0)
+ end
+ end
+ end
+
+ describe '#down' do
+ context 'when on SaaS' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'resets the cost factor to 0 only for shared runners that were updated', :aggregate_failures do
+ migrate!
+ schema_migrate_down!
+
+ expect(shared_1.public_projects_minutes_cost_factor).to eq(0)
+ expect(shared_2.public_projects_minutes_cost_factor).to eq(0)
+ expect(shared_3.public_projects_minutes_cost_factor).to eq(1)
+ expect(group_1.public_projects_minutes_cost_factor).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb b/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb
new file mode 100644
index 00000000000..dfc95ed9e63
--- /dev/null
+++ b/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FinalizeOrphanedRoutesCleanup, :migration do
+ let(:batched_migrations) { table(:batched_background_migrations) }
+
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ shared_examples 'finalizes the migration' do
+ it 'finalizes the migration' do
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).to receive(:finalize).with('CleanupOrphanedRoutes', :projects, :id, [])
+ end
+ end
+ end
+
+ context 'when migration is missing' do
+ it 'warns migration not found' do
+ expect(Gitlab::AppLogger)
+ .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
+
+ migrate!
+ end
+ end
+
+ context 'with migration present' do
+ let!(:project_namespace_backfill) do
+ batched_migrations.create!(
+ job_class_name: 'CleanupOrphanedRoutes',
+ table_name: :routes,
+ column_name: :id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 200,
+ gitlab_schema: :gitlab_main,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+ end
+
+ context 'with different migration statuses' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :description) do
+ 0 | 'paused'
+ 1 | 'active'
+ 4 | 'failed'
+ 5 | 'finalizing'
+ end
+
+ with_them do
+ before do
+ project_namespace_backfill.update!(status: status)
+ end
+
+ it_behaves_like 'finalizes the migration'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/populate_operation_visibility_permissions_spec.rb b/spec/migrations/populate_operation_visibility_permissions_spec.rb
new file mode 100644
index 00000000000..6737a6f84c3
--- /dev/null
+++ b/spec/migrations/populate_operation_visibility_permissions_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe PopulateOperationVisibilityPermissions, :migration do
+ let(:migration) { described_class::MIGRATION }
+
+ before do
+ stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ end
+
+ it 'schedules background migrations', :aggregate_failures do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :project_features,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb b/spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb
new file mode 100644
index 00000000000..7d1377bbeba
--- /dev/null
+++ b/spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RescheduleBackfillImportedIssueSearchData do
+ let_it_be(:reschedule_migration) { described_class::MIGRATION }
+
+ context 'when BackfillIssueSearchData.max_value is nil' do
+ it 'schedules a new batched migration with a default value' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(reschedule_migration).not_to have_scheduled_batched_migration
+ }
+ migration.after -> {
+ expect(reschedule_migration).to have_scheduled_batched_migration(
+ table_name: :issues,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_min_value: described_class::BATCH_MIN_VALUE
+ )
+ }
+ end
+ end
+ end
+
+ context 'when BackfillIssueSearchData.max_value exists' do
+ before do
+ Gitlab::Database::BackgroundMigration::BatchedMigration
+ .create!(
+ max_value: 200,
+ batch_size: 200,
+ sub_batch_size: 20,
+ interval: 120,
+ job_class_name: 'BackfillIssueSearchData',
+ table_name: 'issues',
+ column_name: 'id',
+ gitlab_schema: 'glschema'
+ )
+ end
+
+ it 'schedules a new batched migration with a custom max_value' do
+ reversible_migration do |migration|
+ migration.after -> {
+ expect(reschedule_migration).to have_scheduled_batched_migration(
+ table_name: :issues,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_min_value: 200
+ )
+ }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/schedule_set_correct_vulnerability_state_spec.rb b/spec/migrations/schedule_set_correct_vulnerability_state_spec.rb
new file mode 100644
index 00000000000..08dccf1f37a
--- /dev/null
+++ b/spec/migrations/schedule_set_correct_vulnerability_state_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ScheduleSetCorrectVulnerabilityState do
+ let_it_be(:migration) { described_class::MIGRATION_NAME }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of vulnerabilities' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :vulnerabilities,
+ column_name: :id,
+ interval: described_class::JOB_INTERVAL,
+ batch_size: described_class::MAX_BATCH_SIZE,
+ batch_class_name: described_class::BATCH_CLASS_NAME,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb
index 5bd69ad9fad..422dd9a463b 100644
--- a/spec/models/ability_spec.rb
+++ b/spec/models/ability_spec.rb
@@ -151,6 +151,38 @@ RSpec.describe Ability do
end
end
+ describe '.users_that_can_read_internal_note' do
+ shared_examples 'filtering users that can read internal note' do
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ let(:users) { [reporter, guest] }
+
+ before do
+ parent.add_guest(guest)
+ parent.add_reporter(reporter)
+ end
+
+ it 'returns users that can read internal notes' do
+ result = described_class.users_that_can_read_internal_notes(users, parent)
+
+ expect(result).to match_array([reporter])
+ end
+ end
+
+ context 'for groups' do
+ it_behaves_like 'filtering users that can read internal note' do
+ let(:parent) { create(:group) }
+ end
+ end
+
+ context 'for projects' do
+ it_behaves_like 'filtering users that can read internal note' do
+ let(:parent) { create(:project) }
+ end
+ end
+ end
+
describe '.merge_requests_readable_by_user' do
context 'with an admin when admin mode is enabled', :enable_admin_mode do
it 'returns all merge requests' do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 61f008416ea..0b3521cdd0c 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -104,6 +104,9 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:packages_cleanup_package_file_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:packages_cleanup_package_file_worker_capacity) }
+ it { is_expected.to validate_numericality_of(:package_registry_cleanup_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.not_to allow_value(nil).for(:package_registry_cleanup_policies_worker_capacity) }
+
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
it { is_expected.to validate_presence_of(:max_artifacts_size) }
@@ -305,18 +308,20 @@ RSpec.describe ApplicationSetting do
end
end
- context 'when snowplow is enabled' do
- before do
- setting.snowplow_enabled = true
- end
+ describe 'snowplow settings', :do_not_stub_snowplow_by_default do
+ context 'when snowplow is enabled' do
+ before do
+ setting.snowplow_enabled = true
+ end
- it { is_expected.not_to allow_value(nil).for(:snowplow_collector_hostname) }
- it { is_expected.to allow_value("snowplow.gitlab.com").for(:snowplow_collector_hostname) }
- it { is_expected.not_to allow_value('/example').for(:snowplow_collector_hostname) }
- end
+ it { is_expected.not_to allow_value(nil).for(:snowplow_collector_hostname) }
+ it { is_expected.to allow_value("snowplow.gitlab.com").for(:snowplow_collector_hostname) }
+ it { is_expected.not_to allow_value('/example').for(:snowplow_collector_hostname) }
+ end
- context 'when snowplow is not enabled' do
- it { is_expected.to allow_value(nil).for(:snowplow_collector_hostname) }
+ context 'when snowplow is not enabled' do
+ it { is_expected.to allow_value(nil).for(:snowplow_collector_hostname) }
+ end
end
context 'when mailgun_events_enabled is enabled' do
@@ -1065,6 +1070,35 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:sidekiq_job_limiter_compression_threshold_bytes).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:sidekiq_job_limiter_limit_bytes).only_integer.is_greater_than_or_equal_to(0) }
end
+
+ context 'prometheus settings' do
+ it 'validates metrics_method_call_threshold' do
+ allow(subject).to receive(:prometheus_metrics_enabled).and_return(true)
+
+ is_expected.to validate_numericality_of(:metrics_method_call_threshold).is_greater_than_or_equal_to(0)
+ end
+ end
+
+ context 'error tracking settings' do
+ context 'with error tracking disabled' do
+ before do
+ setting.error_tracking_enabled = false
+ end
+
+ it { is_expected.to allow_value(nil).for(:error_tracking_api_url) }
+ end
+
+ context 'with error tracking enabled' do
+ before do
+ setting.error_tracking_enabled = true
+ end
+
+ it { is_expected.to allow_value(http).for(:error_tracking_api_url) }
+ it { is_expected.to allow_value(https).for(:error_tracking_api_url) }
+ it { is_expected.not_to allow_value(ftp).for(:error_tracking_api_url) }
+ it { is_expected.to validate_presence_of(:error_tracking_api_url) }
+ end
+ end
end
context 'restrict creating duplicates' do
diff --git a/spec/models/authentication_event_spec.rb b/spec/models/authentication_event_spec.rb
index 83598fa6765..23e253c2a28 100644
--- a/spec/models/authentication_event_spec.rb
+++ b/spec/models/authentication_event_spec.rb
@@ -44,4 +44,31 @@ RSpec.describe AuthenticationEvent do
expect(described_class.providers).to match_array %w(ldapmain google_oauth2 standard two-factor two-factor-via-u2f-device two-factor-via-webauthn-device)
end
end
+
+ describe '.initial_login_or_known_ip_address?' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:ip_address) { '127.0.0.1' }
+
+ subject { described_class.initial_login_or_known_ip_address?(user, ip_address) }
+
+ context 'on first login, when no record exists yet' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'on second login from the same ip address' do
+ before do
+ create(:authentication_event, :successful, user: user, ip_address: ip_address)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'on second login from another ip address' do
+ before do
+ create(:authentication_event, :successful, user: user, ip_address: '1.2.3.4')
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/models/awareness_session_spec.rb b/spec/models/awareness_session_spec.rb
new file mode 100644
index 00000000000..854ce5957f7
--- /dev/null
+++ b/spec/models/awareness_session_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AwarenessSession, :clean_gitlab_redis_shared_state do
+ subject { AwarenessSession.for(session_id) }
+
+ let!(:user) { create(:user) }
+ let(:session_id) { 1 }
+
+ describe "when initiating a session" do
+ it "provides a string representation of the model instance" do
+ expected = "awareness_session=6b86b273ff34fce"
+
+ expect(subject.to_s).to eql(expected)
+ end
+
+ it "provides a parameterized version of the session identifier" do
+ expected = "6b86b273ff34fce"
+
+ expect(subject.to_param).to eql(expected)
+ end
+ end
+
+ describe "when a user joins a session" do
+ let(:user2) { create(:user) }
+
+ let(:presence_ttl) { 15.minutes }
+
+ it "changes number of session members" do
+ expect { subject.join(user) }.to change(subject, :size).by(1)
+ end
+
+ it "returns user as member of session with last_activity timestamp" do
+ freeze_time do
+ subject.join(user)
+
+ session_users = subject.users_with_last_activity
+ session_user, last_activity = session_users.first
+
+ expect(session_user.id).to be(user.id)
+ expect(last_activity).to be_eql(Time.now.utc)
+ end
+ end
+
+ it "maintains user ID and last_activity pairs" do
+ now = Time.zone.now
+
+ travel_to now - 1.minute do
+ subject.join(user2)
+ end
+
+ travel_to now do
+ subject.join(user)
+ end
+
+ session_users = subject.users_with_last_activity
+
+ expect(session_users[0].first.id).to eql(user.id)
+ expect(session_users[0].last.to_i).to eql(now.to_i)
+
+ expect(session_users[1].first.id).to eql(user2.id)
+ expect(session_users[1].last.to_i).to eql((now - 1.minute).to_i)
+ end
+
+ it "reports user as present" do
+ freeze_time do
+ subject.join(user)
+
+ expect(subject.present?(user, threshold: presence_ttl)).to be true
+ end
+ end
+
+ it "reports user as away after a certain time on inactivity" do
+ subject.join(user)
+
+ travel_to((presence_ttl + 1.minute).from_now) do
+ expect(subject.away?(user, threshold: presence_ttl)).to be true
+ end
+ end
+
+ it "reports user as present still when there was some activity" do
+ subject.join(user)
+
+ travel_to((presence_ttl - 1.minute).from_now) do
+ subject.touch!(user)
+ end
+
+ travel_to((presence_ttl + 1.minute).from_now) do
+ expect(subject.present?(user, threshold: presence_ttl)).to be true
+ end
+ end
+
+ it "creates user and session awareness keys in store" do
+ subject.join(user)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ keys = redis.scan_each(match: "gitlab:awareness:*").to_a
+
+ expect(keys.size).to be(2)
+ end
+ end
+
+ it "sets a timeout for user and session key" do
+ subject.join(user)
+ subject_id = Digest::SHA256.hexdigest(session_id.to_s)[0, 15]
+
+ Gitlab::Redis::SharedState.with do |redis|
+ ttl_session = redis.ttl("gitlab:awareness:session:#{subject_id}:users")
+ ttl_user = redis.ttl("gitlab:awareness:user:#{user.id}:sessions")
+
+ expect(ttl_session).to be > 0
+ expect(ttl_user).to be > 0
+ end
+ end
+
+ it "fetches user(s) from database" do
+ subject.join(user)
+
+ expect(subject.users.first).to eql(user)
+ end
+
+ it "fetches and filters online user(s) from database" do
+ subject.join(user)
+
+ travel 2.hours do
+ subject.join(user2)
+
+ online_users = subject.online_users_with_last_activity
+ online_user, _ = online_users.first
+
+ expect(online_users.size).to be 1
+ expect(online_user).to eql(user2)
+ end
+ end
+ end
+
+ describe "when a user leaves a session" do
+ it "changes number of session members" do
+ subject.join(user)
+
+ expect { subject.leave(user) }.to change(subject, :size).by(-1)
+ end
+
+ it "destroys the session when it was the last user" do
+ subject.join(user)
+
+ expect { subject.leave(user) }.to change(subject, :id).to(nil)
+ end
+ end
+
+ describe "when last user leaves a session" do
+ it "session and user keys are removed" do
+ subject.join(user)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect { subject.leave(user) }
+ .to change { redis.scan_each(match: "gitlab:awareness:*").to_a.size }
+ .to(0)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_report_result_spec.rb b/spec/models/ci/build_report_result_spec.rb
index 3f53c6c1c0e..09ea19cf077 100644
--- a/spec/models/ci/build_report_result_spec.rb
+++ b/spec/models/ci/build_report_result_spec.rb
@@ -70,10 +70,4 @@ RSpec.describe Ci::BuildReportResult do
expect(build_report_result.tests_skipped).to eq(0)
end
end
-
- describe '#tests_total' do
- it 'returns the total count' do
- expect(build_report_result.tests_total).to eq(2)
- end
- end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 6ad6bb16eb5..e0166ba64a4 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -43,30 +43,10 @@ RSpec.describe Ci::Build do
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
shared_examples 'calling proper BuildFinishedWorker' do
- context 'when ci_build_finished_worker_namespace_changed feature flag enabled' do
- before do
- stub_feature_flags(ci_build_finished_worker_namespace_changed: build.project)
- end
-
- it 'calls Ci::BuildFinishedWorker' do
- expect(Ci::BuildFinishedWorker).to receive(:perform_async)
- expect(::BuildFinishedWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
- context 'when ci_build_finished_worker_namespace_changed feature flag disabled' do
- before do
- stub_feature_flags(ci_build_finished_worker_namespace_changed: false)
- end
-
- it 'calls ::BuildFinishedWorker' do
- expect(::BuildFinishedWorker).to receive(:perform_async)
- expect(Ci::BuildFinishedWorker).not_to receive(:perform_async)
+ it 'calls Ci::BuildFinishedWorker' do
+ expect(Ci::BuildFinishedWorker).to receive(:perform_async)
- subject
- end
+ subject
end
end
@@ -1364,7 +1344,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(deployment).to receive(:execute_hooks)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
end
it 'has deployments record with created status' do
@@ -1420,7 +1400,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
- allow(deployment).to receive(:execute_hooks)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
end
it_behaves_like 'avoid deadlock'
@@ -1506,28 +1486,14 @@ RSpec.describe Ci::Build do
it 'transitions to running and calls webhook' do
freeze_time do
- expect(deployment).to receive(:execute_hooks).with(Time.current)
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status: 'running', status_changed_at: Time.current)
subject
end
expect(deployment).to be_running
end
-
- context 'when `deployment_hooks_skip_worker` flag is disabled' do
- before do
- stub_feature_flags(deployment_hooks_skip_worker: false)
- end
-
- it 'executes Deployments::HooksWorker asynchronously' do
- freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
-
- subject
- end
- end
- end
end
end
end
@@ -1567,48 +1533,47 @@ RSpec.describe Ci::Build do
end
end
- describe 'deployment' do
- describe '#outdated_deployment?' do
- subject { build.outdated_deployment? }
-
- context 'when build succeeded' do
- let(:build) { create(:ci_build, :success) }
- let!(:deployment) { create(:deployment, :success, deployable: build) }
+ describe 'environment' do
+ describe '#has_environment?' do
+ subject { build.has_environment? }
- context 'current deployment is latest' do
- it { is_expected.to be_falsey }
+ context 'when environment is defined' do
+ before do
+ build.update!(environment: 'review')
end
- context 'current deployment is not latest on environment' do
- let!(:deployment2) { create(:deployment, :success, environment: deployment.environment) }
-
- it { is_expected.to be_truthy }
- end
+ it { is_expected.to be_truthy }
end
- context 'when build failed' do
- let(:build) { create(:ci_build, :failed) }
+ context 'when environment is not defined' do
+ before do
+ build.update!(environment: nil)
+ end
it { is_expected.to be_falsey }
end
end
- end
- describe 'environment' do
- describe '#has_environment?' do
- subject { build.has_environment? }
+ describe '#count_user_verification?' do
+ subject { build.count_user_verification? }
- context 'when environment is defined' do
- before do
- build.update!(environment: 'review')
+ context 'when build is the verify action for the environment' do
+ let(:build) do
+ create(:ci_build,
+ ref: 'master',
+ environment: 'staging',
+ options: { environment: { action: 'verify' } })
end
it { is_expected.to be_truthy }
end
- context 'when environment is not defined' do
- before do
- build.update!(environment: nil)
+ context 'when build is not the verify action for the environment' do
+ let(:build) do
+ create(:ci_build,
+ ref: 'master',
+ environment: 'staging',
+ options: { environment: { action: 'start' } })
end
it { is_expected.to be_falsey }
@@ -1975,16 +1940,6 @@ RSpec.describe Ci::Build do
end
end
- describe '#first_pending' do
- let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
- let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
-
- subject { described_class.first_pending }
-
- it { is_expected.to be_a(described_class) }
- it('returns with the first pending build') { is_expected.to eq(first) }
- end
-
describe '#failed_but_allowed?' do
subject { build.failed_but_allowed? }
@@ -2134,6 +2089,34 @@ RSpec.describe Ci::Build do
end
end
+ describe '#save_tags' do
+ let(:build) { create(:ci_build, tag_list: ['tag']) }
+
+ it 'saves tags' do
+ build.save!
+
+ expect(build.tags.count).to eq(1)
+ expect(build.tags.first.name).to eq('tag')
+ end
+
+ it 'strips tags' do
+ build.tag_list = [' taga', 'tagb ', ' tagc ']
+
+ build.save!
+ expect(build.tags.map(&:name)).to match_array(%w[taga tagb tagc])
+ end
+
+ context 'with BulkInsertableTags.with_bulk_insert_tags' do
+ it 'does not save_tags' do
+ Ci::BulkInsertableTags.with_bulk_insert_tags do
+ build.save!
+ end
+
+ expect(build.tags).to be_empty
+ end
+ end
+ end
+
describe '#has_tags?' do
context 'when build has tags' do
subject { create(:ci_build, tag_list: ['tag']) }
@@ -2641,7 +2624,7 @@ RSpec.describe Ci::Build do
context 'when token is empty' do
before do
- build.update_columns(token: nil, token_encrypted: nil)
+ build.update_columns(token_encrypted: nil)
end
it { is_expected.to be_nil}
@@ -3294,10 +3277,6 @@ RSpec.describe Ci::Build do
let(:trigger) { create(:ci_trigger, project: project) }
let(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) }
- let(:user_trigger_variable) do
- { key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1', public: false, masked: false }
- end
-
let(:predefined_trigger_variable) do
{ key: 'CI_PIPELINE_TRIGGERED', value: 'true', public: true, masked: false }
end
@@ -3306,26 +3285,7 @@ RSpec.describe Ci::Build do
build.trigger_request = trigger_request
end
- shared_examples 'returns variables for triggers' do
- it { is_expected.to include(user_trigger_variable) }
- it { is_expected.to include(predefined_trigger_variable) }
- end
-
- context 'when variables are stored in trigger_request' do
- before do
- trigger_request.update_attribute(:variables, { 'TRIGGER_KEY_1' => 'TRIGGER_VALUE_1' } )
- end
-
- it_behaves_like 'returns variables for triggers'
- end
-
- context 'when variables are stored in pipeline_variables' do
- before do
- create(:ci_pipeline_variable, pipeline: pipeline, key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1')
- end
-
- it_behaves_like 'returns variables for triggers'
- end
+ it { is_expected.to include(predefined_trigger_variable) }
end
context 'when pipeline has a variable' do
@@ -3848,7 +3808,7 @@ RSpec.describe Ci::Build do
end
it 'queues BuildHooksWorker' do
- expect(BuildHooksWorker).to receive(:perform_async).with(build.id)
+ expect(BuildHooksWorker).to receive(:perform_async).with(build)
build.enqueue
end
@@ -4321,7 +4281,7 @@ RSpec.describe Ci::Build do
describe '#collect_test_reports!' do
subject { build.collect_test_reports!(test_reports) }
- let(:test_reports) { Gitlab::Ci::Reports::TestReports.new }
+ let(:test_reports) { Gitlab::Ci::Reports::TestReport.new }
it { expect(test_reports.get_suite(build.name).total_count).to eq(0) }
@@ -4372,7 +4332,7 @@ RSpec.describe Ci::Build do
context 'when build is part of parallel build' do
let(:build_1) { create(:ci_build, name: 'build 1/2') }
- let(:test_report) { Gitlab::Ci::Reports::TestReports.new }
+ let(:test_report) { Gitlab::Ci::Reports::TestReport.new }
before do
build_1.collect_test_reports!(test_report)
@@ -4396,7 +4356,7 @@ RSpec.describe Ci::Build do
end
context 'when build is part of matrix build' do
- let(:test_report) { Gitlab::Ci::Reports::TestReports.new }
+ let(:test_report) { Gitlab::Ci::Reports::TestReport.new }
let(:matrix_build_1) { create(:ci_build, :matrix) }
before do
diff --git a/spec/models/ci/group_spec.rb b/spec/models/ci/group_spec.rb
index 6c96e659a34..4900bc792af 100644
--- a/spec/models/ci/group_spec.rb
+++ b/spec/models/ci/group_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe Ci::Group do
describe '.fabricate' do
let(:pipeline) { create(:ci_empty_pipeline) }
- let(:stage) { create(:ci_stage_entity, pipeline: pipeline) }
+ let(:stage) { create(:ci_stage, pipeline: pipeline) }
before do
create_build(:ci_build, name: 'rspec 0 2')
diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb
index 3a4b836e453..fc5a9c879f6 100644
--- a/spec/models/ci/group_variable_spec.rb
+++ b/spec/models/ci/group_variable_spec.rb
@@ -56,4 +56,10 @@ RSpec.describe Ci::GroupVariable do
let!(:parent) { model.group }
end
+
+ describe '#audit_details' do
+ it "equals to the group variable's key" do
+ expect(subject.audit_details).to eq(subject.key)
+ end
+ end
end
diff --git a/spec/models/ci/legacy_stage_spec.rb b/spec/models/ci/legacy_stage_spec.rb
deleted file mode 100644
index 2487ad85889..00000000000
--- a/spec/models/ci/legacy_stage_spec.rb
+++ /dev/null
@@ -1,268 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::LegacyStage do
- let(:stage) { build(:ci_stage) }
- let(:pipeline) { stage.pipeline }
- let(:stage_name) { stage.name }
-
- describe '#expectations' do
- subject { stage }
-
- it { is_expected.to include_module(StaticModel) }
-
- it { is_expected.to respond_to(:pipeline) }
- it { is_expected.to respond_to(:name) }
-
- it { is_expected.to delegate_method(:project).to(:pipeline) }
- end
-
- describe '#statuses' do
- let!(:stage_build) { create_job(:ci_build) }
- let!(:commit_status) { create_job(:commit_status) }
- let!(:other_build) { create_job(:ci_build, stage: 'other stage') }
-
- subject { stage.statuses }
-
- it "returns only matching statuses" do
- is_expected.to contain_exactly(stage_build, commit_status)
- end
- end
-
- describe '#groups' do
- before do
- create_job(:ci_build, name: 'rspec 0 2')
- create_job(:ci_build, name: 'rspec 0 1')
- create_job(:ci_build, name: 'spinach 0 1')
- create_job(:commit_status, name: 'aaaaa')
- end
-
- it 'returns an array of three groups' do
- expect(stage.groups).to be_a Array
- expect(stage.groups).to all(be_a Ci::Group)
- expect(stage.groups.size).to eq 3
- end
-
- it 'returns groups with correctly ordered statuses' do
- expect(stage.groups.first.jobs.map(&:name))
- .to eq ['aaaaa']
- expect(stage.groups.second.jobs.map(&:name))
- .to eq ['rspec 0 1', 'rspec 0 2']
- expect(stage.groups.third.jobs.map(&:name))
- .to eq ['spinach 0 1']
- end
-
- it 'returns groups with correct names' do
- expect(stage.groups.map(&:name))
- .to eq %w[aaaaa rspec spinach]
- end
-
- context 'when a name is nil on legacy pipelines' do
- before do
- pipeline.builds.first.update_attribute(:name, nil)
- end
-
- it 'returns an array of three groups' do
- expect(stage.groups.map(&:name))
- .to eq ['', 'aaaaa', 'rspec', 'spinach']
- end
- end
- end
-
- describe '#statuses_count' do
- before do
- create_job(:ci_build)
- create_job(:ci_build, stage: 'other stage')
- end
-
- subject { stage.statuses_count }
-
- it "counts statuses only from current stage" do
- is_expected.to eq(1)
- end
- end
-
- describe '#builds' do
- let!(:stage_build) { create_job(:ci_build) }
- let!(:commit_status) { create_job(:commit_status) }
-
- subject { stage.builds }
-
- it "returns only builds" do
- is_expected.to contain_exactly(stage_build)
- end
- end
-
- describe '#status' do
- subject { stage.status }
-
- context 'if status is already defined' do
- let(:stage) { build(:ci_stage, status: 'success') }
-
- it "returns defined status" do
- is_expected.to eq('success')
- end
- end
-
- context 'if status has to be calculated' do
- let!(:stage_build) { create_job(:ci_build, status: :failed) }
-
- it "returns status of a build" do
- is_expected.to eq('failed')
- end
-
- context 'and builds are retried' do
- let!(:new_build) { create_job(:ci_build, status: :success) }
-
- before do
- stage_build.update!(retried: true)
- end
-
- it "returns status of latest build" do
- is_expected.to eq('success')
- end
- end
- end
- end
-
- describe '#detailed_status' do
- let(:user) { create(:user) }
-
- subject { stage.detailed_status(user) }
-
- context 'when build is created' do
- let!(:stage_build) { create_job(:ci_build, status: :created) }
-
- it 'returns detailed status for created stage' do
- expect(subject.text).to eq s_('CiStatusText|created')
- end
- end
-
- context 'when build is pending' do
- let!(:stage_build) { create_job(:ci_build, status: :pending) }
-
- it 'returns detailed status for pending stage' do
- expect(subject.text).to eq s_('CiStatusText|pending')
- end
- end
-
- context 'when build is running' do
- let!(:stage_build) { create_job(:ci_build, status: :running) }
-
- it 'returns detailed status for running stage' do
- expect(subject.text).to eq s_('CiStatus|running')
- end
- end
-
- context 'when build is successful' do
- let!(:stage_build) { create_job(:ci_build, status: :success) }
-
- it 'returns detailed status for successful stage' do
- expect(subject.text).to eq s_('CiStatusText|passed')
- end
- end
-
- context 'when build is failed' do
- let!(:stage_build) { create_job(:ci_build, status: :failed) }
-
- it 'returns detailed status for failed stage' do
- expect(subject.text).to eq s_('CiStatusText|failed')
- end
- end
-
- context 'when build is canceled' do
- let!(:stage_build) { create_job(:ci_build, status: :canceled) }
-
- it 'returns detailed status for canceled stage' do
- expect(subject.text).to eq s_('CiStatusText|canceled')
- end
- end
-
- context 'when build is skipped' do
- let!(:stage_build) { create_job(:ci_build, status: :skipped) }
-
- it 'returns detailed status for skipped stage' do
- expect(subject.text).to eq s_('CiStatusText|skipped')
- end
- end
- end
-
- describe '#success?' do
- context 'when stage is successful' do
- before do
- create_job(:ci_build, status: :success)
- create_job(:generic_commit_status, status: :success)
- end
-
- it 'is successful' do
- expect(stage).to be_success
- end
- end
-
- context 'when stage is not successful' do
- before do
- create_job(:ci_build, status: :failed)
- create_job(:generic_commit_status, status: :success)
- end
-
- it 'is not successful' do
- expect(stage).not_to be_success
- end
- end
- end
-
- describe '#has_warnings?' do
- context 'when stage has warnings' do
- context 'when using memoized warnings flag' do
- context 'when there are warnings' do
- let(:stage) { build(:ci_stage, warnings: true) }
-
- it 'returns true using memoized value' do
- expect(stage).not_to receive(:statuses)
- expect(stage).to have_warnings
- end
- end
-
- context 'when there are no warnings' do
- let(:stage) { build(:ci_stage, warnings: false) }
-
- it 'returns false using memoized value' do
- expect(stage).not_to receive(:statuses)
- expect(stage).not_to have_warnings
- end
- end
- end
-
- context 'when calculating warnings from statuses' do
- before do
- create(:ci_build, :failed, :allowed_to_fail,
- stage: stage_name, pipeline: pipeline)
- end
-
- it 'has warnings calculated from statuses' do
- expect(stage).to receive(:statuses).and_call_original
- expect(stage).to have_warnings
- end
- end
- end
-
- context 'when stage does not have warnings' do
- before do
- create(:ci_build, :success, stage: stage_name,
- pipeline: pipeline)
- end
-
- it 'does not have warnings calculated from statuses' do
- expect(stage).to receive(:statuses).and_call_original
- expect(stage).not_to have_warnings
- end
- end
- end
-
- def create_job(type, status: 'success', stage: stage_name, **opts)
- create(type, pipeline: pipeline, stage: stage, status: status, **opts)
- end
-
- it_behaves_like 'manual playable stage', :ci_stage
-end
diff --git a/spec/models/ci/pending_build_spec.rb b/spec/models/ci/pending_build_spec.rb
index 5692444339f..4bb43233dbd 100644
--- a/spec/models/ci/pending_build_spec.rb
+++ b/spec/models/ci/pending_build_spec.rb
@@ -118,41 +118,27 @@ RSpec.describe Ci::PendingBuild do
project.shared_runners_enabled = true
end
- context 'when ci_pending_builds_maintain_denormalized_data is enabled' do
- it 'sets instance_runners_enabled to true' do
- described_class.upsert_from_build!(build)
-
- expect(described_class.last.instance_runners_enabled).to be_truthy
- end
-
- context 'when project is about to be deleted' do
- before do
- build.project.update!(pending_delete: true)
- end
+ it 'sets instance_runners_enabled to true' do
+ described_class.upsert_from_build!(build)
- it 'sets instance_runners_enabled to false' do
- described_class.upsert_from_build!(build)
+ expect(described_class.last.instance_runners_enabled).to be_truthy
+ end
- expect(described_class.last.instance_runners_enabled).to be_falsey
- end
+ context 'when project is about to be deleted' do
+ before do
+ build.project.update!(pending_delete: true)
end
- context 'when builds are disabled' do
- before do
- build.project.project_feature.update!(builds_access_level: false)
- end
-
- it 'sets instance_runners_enabled to false' do
- described_class.upsert_from_build!(build)
+ it 'sets instance_runners_enabled to false' do
+ described_class.upsert_from_build!(build)
- expect(described_class.last.instance_runners_enabled).to be_falsey
- end
+ expect(described_class.last.instance_runners_enabled).to be_falsey
end
end
- context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
+ context 'when builds are disabled' do
before do
- stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
+ build.project.project_feature.update!(builds_access_level: false)
end
it 'sets instance_runners_enabled to false' do
@@ -168,24 +154,10 @@ RSpec.describe Ci::PendingBuild do
subject(:ci_pending_build) { described_class.last }
- context 'when ci_pending_builds_maintain_denormalized_data is enabled' do
- it 'sets tag_ids' do
- described_class.upsert_from_build!(build)
-
- expect(ci_pending_build.tag_ids).to eq(build.tags_ids)
- end
- end
-
- context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
- before do
- stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
- end
-
- it 'does not set tag_ids' do
- described_class.upsert_from_build!(build)
+ it 'sets tag_ids' do
+ described_class.upsert_from_build!(build)
- expect(ci_pending_build.tag_ids).to be_empty
- end
+ expect(ci_pending_build.tag_ids).to eq(build.tags_ids)
end
end
diff --git a/spec/models/ci/pipeline_artifact_spec.rb b/spec/models/ci/pipeline_artifact_spec.rb
index 801505f0231..b051f646bd4 100644
--- a/spec/models/ci/pipeline_artifact_spec.rb
+++ b/spec/models/ci/pipeline_artifact_spec.rb
@@ -196,6 +196,80 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
end
end
+ describe '.create_or_replace_for_pipeline!' do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+
+ let(:file_type) { :code_coverage }
+ let(:file) { CarrierWaveStringFile.new_file(file_content: 'content', filename: 'file.json', content_type: 'json') }
+ let(:size) { file['tempfile'].size }
+
+ subject do
+ Ci::PipelineArtifact.create_or_replace_for_pipeline!(
+ pipeline: pipeline,
+ file_type: file_type,
+ file: file,
+ size: size
+ )
+ end
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ context 'when there is no existing record' do
+ it 'creates a new pipeline artifact for the given parameters' do
+ expect { subject }.to change { Ci::PipelineArtifact.count }.from(0).to(1)
+
+ expect(subject.code_coverage?).to be(true)
+ expect(subject.pipeline).to eq(pipeline)
+ expect(subject.project_id).to eq(pipeline.project_id)
+ expect(subject.file.filename).to eq(file['filename'])
+ expect(subject.size).to eq(size)
+ expect(subject.file_format).to eq(Ci::PipelineArtifact::REPORT_TYPES[file_type].to_s)
+ expect(subject.expire_at).to eq(Ci::PipelineArtifact::EXPIRATION_DATE.from_now)
+ end
+ end
+
+ context 'when there are existing records with different types' do
+ let!(:existing_artifact) do
+ create(:ci_pipeline_artifact, pipeline: pipeline, file_type: file_type, expire_at: 1.day.from_now)
+ end
+
+ let!(:other_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline, file_type: :code_quality_mr_diff) }
+
+ it 'replaces the existing pipeline artifact record with the given file type' do
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
+
+ expect(subject.id).not_to eq(existing_artifact.id)
+
+ expect(subject.code_coverage?).to be(true)
+ expect(subject.pipeline).to eq(pipeline)
+ expect(subject.project_id).to eq(pipeline.project_id)
+ expect(subject.file.filename).to eq(file['filename'])
+ expect(subject.size).to eq(size)
+ expect(subject.file_format).to eq(Ci::PipelineArtifact::REPORT_TYPES[file_type].to_s)
+ expect(subject.expire_at).to eq(Ci::PipelineArtifact::EXPIRATION_DATE.from_now)
+ end
+ end
+
+ context 'when ActiveRecordError is raised' do
+ let(:pipeline) { instance_double(Ci::Pipeline, id: 1) }
+ let(:file_type) { :code_coverage }
+ let(:error) { ActiveRecord::ActiveRecordError.new('something went wrong') }
+
+ before do
+ allow(pipeline).to receive(:pipeline_artifacts).and_raise(error)
+ end
+
+ it 'tracks and raise the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(error, { pipeline_id: pipeline.id, file_type: file_type }).and_call_original
+
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError, 'something went wrong')
+ end
+ end
+ end
+
describe '#present' do
subject(:presenter) { report.present }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 31752f300f4..081fa6cbbae 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -106,6 +106,50 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe 'state machine transitions' do
+ context 'from failed to success' do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, :failed) }
+
+ it 'schedules CoverageReportWorker' do
+ expect(Ci::PipelineArtifacts::CoverageReportWorker).to receive(:perform_async).with(pipeline.id)
+
+ pipeline.succeed!
+ end
+ end
+ end
+
+ describe 'pipeline age metric' do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
+
+ let(:pipeline_age_histogram) do
+ ::Gitlab::Ci::Pipeline::Metrics.pipeline_age_histogram
+ end
+
+ context 'when pipeline age histogram is enabled' do
+ before do
+ stub_feature_flags(ci_pipeline_age_histogram: true)
+ end
+
+ it 'observes pipeline age' do
+ expect(pipeline_age_histogram).to receive(:observe)
+
+ described_class.find(pipeline.id)
+ end
+ end
+
+ context 'when pipeline age histogram is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_age_histogram: false)
+ end
+
+ it 'observes pipeline age' do
+ expect(pipeline_age_histogram).not_to receive(:observe)
+
+ described_class.find(pipeline.id)
+ end
+ end
+ end
+
describe '#set_status' do
let(:pipeline) { build(:ci_empty_pipeline, :created) }
@@ -167,6 +211,28 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '.created_after' do
+ let_it_be(:old_pipeline) { create(:ci_pipeline, created_at: 1.week.ago) }
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ subject { described_class.created_after(1.day.ago) }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(pipeline)
+ end
+ end
+
+ describe '.created_before_id' do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:new_pipeline) { create(:ci_pipeline) }
+
+ subject { described_class.created_before_id(new_pipeline.id) }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(pipeline)
+ end
+ end
+
describe '.for_sha' do
subject { described_class.for_sha(sha) }
@@ -997,6 +1063,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?(merge_request.target_project, merge_request.target_branch).to_s,
'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => '',
'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
@@ -1093,6 +1160,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?(merge_request.target_project, merge_request.target_branch).to_s,
'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => merge_request.target_branch_sha,
'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
@@ -1289,48 +1357,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
status: 'success')
end
- describe '#legacy_stages' do
- using RSpec::Parameterized::TableSyntax
-
- subject { pipeline.legacy_stages }
-
- context 'stages list' do
- it 'returns ordered list of stages' do
- expect(subject.map(&:name)).to eq(%w[build test deploy])
- end
- end
-
- context 'stages with statuses' do
- let(:statuses) do
- subject.map { |stage| [stage.name, stage.status] }
- end
-
- it 'returns list of stages with correct statuses' do
- expect(statuses).to eq([%w(build failed),
- %w(test success),
- %w(deploy running)])
- end
- end
-
- context 'when there is a stage with warnings' do
- before do
- create(:commit_status, pipeline: pipeline,
- stage: 'deploy',
- name: 'prod:2',
- stage_idx: 2,
- status: 'failed',
- allow_failure: true)
- end
-
- it 'populates stage with correct number of warnings' do
- deploy_stage = pipeline.legacy_stages.third
-
- expect(deploy_stage).not_to receive(:statuses)
- expect(deploy_stage).to have_warnings
- end
- end
- end
-
describe '#stages_count' do
it 'returns a valid number of stages' do
expect(pipeline.stages_count).to eq(3)
@@ -1344,37 +1370,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#legacy_stage' do
- subject { pipeline.legacy_stage('test') }
-
- let(:pipeline) { build(:ci_empty_pipeline, :created) }
-
- context 'with status in stage' do
- before do
- create(:commit_status, pipeline: pipeline, stage: 'test')
- end
-
- it { expect(subject).to be_a Ci::LegacyStage }
- it { expect(subject.name).to eq 'test' }
- it { expect(subject.statuses).not_to be_empty }
- end
-
- context 'without status in stage' do
- before do
- create(:commit_status, pipeline: pipeline, stage: 'build')
- end
-
- it 'return stage object' do
- is_expected.to be_nil
- end
- end
- end
-
describe '#stages' do
let(:pipeline) { build(:ci_empty_pipeline, :created) }
before do
- create(:ci_stage_entity, project: project,
+ create(:ci_stage, project: project,
pipeline: pipeline,
position: 4,
name: 'deploy')
@@ -1391,12 +1391,12 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
stage_idx: 2,
name: 'build')
- create(:ci_stage_entity, project: project,
+ create(:ci_stage, project: project,
pipeline: pipeline,
position: 1,
name: 'sanity')
- create(:ci_stage_entity, project: project,
+ create(:ci_stage, project: project,
pipeline: pipeline,
position: 5,
name: 'cleanup')
@@ -1435,7 +1435,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let(:build_c) { create_build('build3', queued_at: 0) }
%w[succeed! drop! cancel! skip! block! delay!].each do |action|
- context "when the pipeline recieved #{action} event" do
+ context "when the pipeline received #{action} event" do
it 'deletes a persistent ref' do
expect(pipeline.persistent_ref).to receive(:delete).once
@@ -1658,7 +1658,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
%w[succeed! drop! cancel! skip!].each do |action|
- context "when the pipeline recieved #{action} event" do
+ context "when the pipeline received #{action} event" do
it 'performs AutoMergeProcessWorker' do
expect(AutoMergeProcessWorker).to receive(:perform_async).with(merge_request.id)
@@ -3074,6 +3074,13 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let(:pipeline_action) { action }
it 'schedules a new PipelineHooksWorker job' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: include("Enqueuing hooks for Pipeline #{pipeline.id}"),
+ class: described_class.name,
+ pipeline_id: pipeline.id,
+ project_id: pipeline.project_id,
+ pipeline_status: String
+ )
expect(PipelineHooksWorker).to receive(:perform_async).with(pipeline.id)
pipeline.public_send(pipeline_action)
@@ -3760,6 +3767,24 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#self_and_descendants_complete?' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :success) }
+ let_it_be(:child_pipeline) { create(:ci_pipeline, :success, child_of: pipeline) }
+ let_it_be_with_reload(:grandchild_pipeline) { create(:ci_pipeline, :success, child_of: child_pipeline) }
+
+ context 'when all pipelines in the hierarchy is complete' do
+ it { expect(pipeline.self_and_descendants_complete?).to be(true) }
+ end
+
+ context 'when a pipeline in the hierarchy is not complete' do
+ before do
+ grandchild_pipeline.update!(status: :running)
+ end
+
+ it { expect(pipeline.self_and_descendants_complete?).to be(false) }
+ end
+ end
+
describe '#builds_in_self_and_descendants' do
subject(:builds) { pipeline.builds_in_self_and_descendants }
@@ -3928,7 +3953,21 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when pipeline status is running' do
let(:pipeline) { create(:ci_pipeline, :running) }
- it { is_expected.to be_falsey }
+ context 'with mr_show_reports_immediately flag enabled' do
+ before do
+ stub_feature_flags(mr_show_reports_immediately: project)
+ end
+
+ it { expect(subject).to be_truthy }
+ end
+
+ context 'with mr_show_reports_immediately flag disabled' do
+ before do
+ stub_feature_flags(mr_show_reports_immediately: false)
+ end
+
+ it { expect(subject).to be_falsey }
+ end
end
context 'when pipeline status is success' do
@@ -4002,7 +4041,21 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when pipeline status is running' do
let(:pipeline) { create(:ci_pipeline, :running) }
- it { expect(subject).to be_falsey }
+ context 'with mr_show_reports_immediately flag enabled' do
+ before do
+ stub_feature_flags(mr_show_reports_immediately: project)
+ end
+
+ it { expect(subject).to be_truthy }
+ end
+
+ context 'with mr_show_reports_immediately flag disabled' do
+ before do
+ stub_feature_flags(mr_show_reports_immediately: false)
+ end
+
+ it { expect(subject).to be_falsey }
+ end
end
context 'when pipeline status is success' do
@@ -4251,13 +4304,13 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#find_stage_by_name' do
+ describe 'fetching a stage by name' do
let_it_be(:pipeline) { create(:ci_pipeline) }
let(:stage_name) { 'test' }
let(:stage) do
- create(:ci_stage_entity,
+ create(:ci_stage,
pipeline: pipeline,
project: pipeline.project,
name: 'test')
@@ -4267,19 +4320,37 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
create_list(:ci_build, 2, pipeline: pipeline, stage: stage.name)
end
- subject { pipeline.find_stage_by_name!(stage_name) }
+ describe '#stage' do
+ subject { pipeline.stage(stage_name) }
- context 'when stage exists' do
- it { is_expected.to eq(stage) }
+ context 'when stage exists' do
+ it { is_expected.to eq(stage) }
+ end
+
+ context 'when stage does not exist' do
+ let(:stage_name) { 'build' }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
end
- context 'when stage does not exist' do
- let(:stage_name) { 'build' }
+ describe '#find_stage_by_name' do
+ subject { pipeline.find_stage_by_name!(stage_name) }
- it 'raises an ActiveRecord exception' do
- expect do
- subject
- end.to raise_exception(ActiveRecord::RecordNotFound)
+ context 'when stage exists' do
+ it { is_expected.to eq(stage) }
+ end
+
+ context 'when stage does not exist' do
+ let(:stage_name) { 'build' }
+
+ it 'raises an ActiveRecord exception' do
+ expect do
+ subject
+ end.to raise_exception(ActiveRecord::RecordNotFound)
+ end
end
end
end
@@ -4832,13 +4903,13 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#has_expired_test_reports?' do
- subject { pipeline.has_expired_test_reports? }
+ describe '#has_test_reports?' do
+ subject { pipeline.has_test_reports? }
let(:pipeline) { create(:ci_pipeline, :success, :with_test_reports) }
context 'when artifacts are not expired' do
- it { is_expected.to be_falsey }
+ it { is_expected.to be_truthy }
end
context 'when artifacts are expired' do
@@ -4849,6 +4920,14 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to be_truthy }
end
+ context 'when artifacts are removed' do
+ before do
+ pipeline.job_artifacts.each(&:destroy)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
context 'when the pipeline is still running' do
let(:pipeline) { create(:ci_pipeline, :running) }
@@ -4942,4 +5021,34 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
end
+
+ describe '#age_in_minutes' do
+ let(:pipeline) { build(:ci_pipeline) }
+
+ context 'when pipeline has not been persisted' do
+ it 'returns zero' do
+ expect(pipeline.age_in_minutes).to eq 0
+ end
+ end
+
+ context 'when pipeline has been saved' do
+ it 'returns pipeline age in minutes' do
+ pipeline.save!
+
+ travel_to(pipeline.created_at + 2.hours) do
+ expect(pipeline.age_in_minutes).to eq 120
+ end
+ end
+ end
+
+ context 'when pipeline has been loaded without all attributes' do
+ it 'raises an exception' do
+ pipeline.save!
+
+ pipeline_id = Ci::Pipeline.where(id: pipeline.id).select(:id).first
+
+ expect { pipeline_id.age_in_minutes }.to raise_error(ArgumentError)
+ end
+ end
+ end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index cdd96d45561..789ae3a2ccc 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Ci::Processable do
new_proc
end
- let_it_be(:stage) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'test') }
+ let_it_be(:stage) { create(:ci_stage, project: project, pipeline: pipeline, name: 'test') }
shared_context 'processable bridge' do
let_it_be(:downstream_project) { create(:project, :repository) }
@@ -57,7 +57,7 @@ RSpec.describe Ci::Processable do
let(:clone_accessors) { ::Ci::Build.clone_accessors.without(::Ci::Build.extra_accessors) }
let(:reject_accessors) do
- %i[id status user token token_encrypted coverage trace runner
+ %i[id status user token_encrypted coverage trace runner
artifacts_expire_at
created_at updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by job_artifacts job_artifacts_archive
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 74d8b012b29..2fbfbbaf830 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Ci::Runner do
end
describe 'groups association' do
- # Due to other assoctions such as projects this whole spec is allowed to
+ # Due to other associations such as projects this whole spec is allowed to
# generate cross-database queries. So we have this temporary spec to
# validate that at least groups association does not generate cross-DB
# queries.
@@ -35,6 +35,46 @@ RSpec.describe Ci::Runner do
end
end
+ describe 'acts_as_taggable' do
+ let(:tag_name) { 'tag123' }
+
+ context 'on save' do
+ let_it_be_with_reload(:runner) { create(:ci_runner) }
+
+ before do
+ runner.tag_list = [tag_name]
+ end
+
+ context 'tag does not exist' do
+ it 'creates a tag' do
+ expect { runner.save! }.to change(ActsAsTaggableOn::Tag, :count).by(1)
+ end
+
+ it 'creates an association to the tag' do
+ runner.save!
+
+ expect(described_class.tagged_with(tag_name)).to include(runner)
+ end
+ end
+
+ context 'tag already exists' do
+ before do
+ ActsAsTaggableOn::Tag.create!(name: tag_name)
+ end
+
+ it 'does not create a tag' do
+ expect { runner.save! }.not_to change(ActsAsTaggableOn::Tag, :count)
+ end
+
+ it 'creates an association to the tag' do
+ runner.save!
+
+ expect(described_class.tagged_with(tag_name)).to include(runner)
+ end
+ end
+ end
+ end
+
describe 'validation' do
it { is_expected.to validate_presence_of(:access_level) }
it { is_expected.to validate_presence_of(:runner_type) }
@@ -1062,18 +1102,6 @@ RSpec.describe Ci::Runner do
end
end
end
-
- context 'with updated version' do
- before do
- runner.version = '1.2.3'
- end
-
- it 'updates version components with new version' do
- heartbeat
-
- expect(runner.reload.read_attribute(:semver)).to eq '15.0.1'
- end
- end
end
def expect_redis_update
@@ -1088,7 +1116,6 @@ RSpec.describe Ci::Runner do
.and change { runner.reload.read_attribute(:architecture) }
.and change { runner.reload.read_attribute(:config) }
.and change { runner.reload.read_attribute(:executor_type) }
- .and change { runner.reload.read_attribute(:semver) }
end
end
@@ -1193,6 +1220,47 @@ RSpec.describe Ci::Runner do
end
end
+ describe '#save_tags' do
+ let(:runner) { build(:ci_runner, tag_list: ['tag']) }
+
+ it 'saves tags' do
+ runner.save!
+
+ expect(runner.tags.count).to eq(1)
+ expect(runner.tags.first.name).to eq('tag')
+ end
+
+ it 'strips tags' do
+ runner.tag_list = [' taga', 'tagb ', ' tagc ']
+
+ runner.save!
+ expect(runner.tags.map(&:name)).to match_array(%w[taga tagb tagc])
+ end
+
+ context 'with BulkInsertableTags.with_bulk_insert_tags' do
+ it 'does not save_tags' do
+ Ci::BulkInsertableTags.with_bulk_insert_tags do
+ runner.save!
+ end
+
+ expect(runner.tags).to be_empty
+ end
+
+ context 'over TAG_LIST_MAX_LENGTH' do
+ let(:tag_list) { (1..described_class::TAG_LIST_MAX_LENGTH + 1).map { |i| "tag#{i}" } }
+ let(:runner) { build(:ci_runner, tag_list: tag_list) }
+
+ it 'fails validation if over tag limit' do
+ Ci::BulkInsertableTags.with_bulk_insert_tags do
+ expect { runner.save! }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+
+ expect(runner.tags).to be_empty
+ end
+ end
+ end
+ end
+
describe '#has_tags?' do
context 'when runner has tags' do
subject { create(:ci_runner, tag_list: ['tag']) }
@@ -1700,40 +1768,37 @@ RSpec.describe Ci::Runner do
end
end
- describe '.save' do
- context 'with initial value' do
- let(:runner) { create(:ci_runner, version: 'v1.2.3') }
-
- it 'updates semver column' do
- expect(runner.semver).to eq '1.2.3'
- end
- end
+ describe '#with_upgrade_status' do
+ subject { described_class.with_upgrade_status(upgrade_status) }
- context 'with no initial version value' do
- let(:runner) { build(:ci_runner) }
+ let_it_be(:runner_14_0_0) { create(:ci_runner, version: '14.0.0') }
+ let_it_be(:runner_14_1_0) { create(:ci_runner, version: '14.1.0') }
+ let_it_be(:runner_14_1_1) { create(:ci_runner, version: '14.1.1') }
+ let_it_be(:runner_version_14_0_0) { create(:ci_runner_version, version: '14.0.0', status: :available) }
+ let_it_be(:runner_version_14_1_0) { create(:ci_runner_version, version: '14.1.0', status: :recommended) }
+ let_it_be(:runner_version_14_1_1) { create(:ci_runner_version, version: '14.1.1', status: :not_available) }
- context 'with version change' do
- subject(:update_version) { runner.update!(version: new_version) }
+ context ':not_available' do
+ let(:upgrade_status) { :not_available }
- context 'to invalid version' do
- let(:new_version) { 'invalid version' }
-
- it 'updates semver column to nil' do
- update_version
+ it 'returns runners whose version is assigned :not_available' do
+ is_expected.to contain_exactly(runner_14_1_1)
+ end
+ end
- expect(runner.reload.semver).to be_nil
- end
- end
+ context ':available' do
+ let(:upgrade_status) { :available }
- context 'to v14.10.1' do
- let(:new_version) { 'v14.10.1' }
+ it 'returns runners whose version is assigned :available' do
+ is_expected.to contain_exactly(runner_14_0_0)
+ end
+ end
- it 'updates semver column' do
- update_version
+ context ':recommended' do
+ let(:upgrade_status) { :recommended}
- expect(runner.reload.semver).to eq '14.10.1'
- end
- end
+ it 'returns runners whose version is assigned :recommended' do
+ is_expected.to contain_exactly(runner_14_1_0)
end
end
end
diff --git a/spec/models/ci/runner_version_spec.rb b/spec/models/ci/runner_version_spec.rb
new file mode 100644
index 00000000000..d3395942a39
--- /dev/null
+++ b/spec/models/ci/runner_version_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RunnerVersion do
+ it_behaves_like 'having unique enum values'
+
+ let_it_be(:runner_version_not_available) do
+ create(:ci_runner_version, version: 'abc123', status: :not_available)
+ end
+
+ let_it_be(:runner_version_recommended) do
+ create(:ci_runner_version, version: 'abc234', status: :recommended)
+ end
+
+ describe '.not_available' do
+ subject { described_class.not_available }
+
+ it { is_expected.to match_array([runner_version_not_available]) }
+ end
+
+ describe '.potentially_outdated' do
+ subject { described_class.potentially_outdated }
+
+ let_it_be(:runner_version_nil) { create(:ci_runner_version, version: 'abc345', status: nil) }
+ let_it_be(:runner_version_available) do
+ create(:ci_runner_version, version: 'abc456', status: :available)
+ end
+
+ let_it_be(:runner_version_unknown) do
+ create(:ci_runner_version, version: 'abc567', status: :unknown)
+ end
+
+ it 'contains any runner version that is not already recommended' do
+ is_expected.to match_array([
+ runner_version_nil,
+ runner_version_not_available,
+ runner_version_available,
+ runner_version_unknown
+ ])
+ end
+ end
+
+ describe 'validation' do
+ context 'when runner version is too long' do
+ let(:runner_version) { build(:ci_runner_version, version: 'a' * 2049) }
+
+ it 'is not valid' do
+ expect(runner_version).to be_invalid
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index b91348eb408..d55a8509a98 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::Stage, :models do
let_it_be(:pipeline) { create(:ci_empty_pipeline) }
- let(:stage) { create(:ci_stage_entity, pipeline: pipeline, project: pipeline.project) }
+ let(:stage) { create(:ci_stage, pipeline: pipeline, project: pipeline.project) }
it_behaves_like 'having unique enum values'
@@ -30,9 +30,9 @@ RSpec.describe Ci::Stage, :models do
describe '.by_position' do
it 'finds stages by position' do
- a = create(:ci_stage_entity, position: 1)
- b = create(:ci_stage_entity, position: 2)
- c = create(:ci_stage_entity, position: 3)
+ a = create(:ci_stage, position: 1)
+ b = create(:ci_stage, position: 2)
+ c = create(:ci_stage, position: 3)
expect(described_class.by_position(1)).to contain_exactly(a)
expect(described_class.by_position(2)).to contain_exactly(b)
@@ -42,9 +42,9 @@ RSpec.describe Ci::Stage, :models do
describe '.by_name' do
it 'finds stages by name' do
- a = create(:ci_stage_entity, name: 'a')
- b = create(:ci_stage_entity, name: 'b')
- c = create(:ci_stage_entity, name: 'c')
+ a = create(:ci_stage, name: 'a')
+ b = create(:ci_stage, name: 'b')
+ c = create(:ci_stage, name: 'c')
expect(described_class.by_name('a')).to contain_exactly(a)
expect(described_class.by_name('b')).to contain_exactly(b)
@@ -54,7 +54,7 @@ RSpec.describe Ci::Stage, :models do
describe '#status' do
context 'when stage is pending' do
- let(:stage) { create(:ci_stage_entity, status: 'pending') }
+ let(:stage) { create(:ci_stage, status: 'pending') }
it 'has a correct status value' do
expect(stage.status).to eq 'pending'
@@ -62,7 +62,7 @@ RSpec.describe Ci::Stage, :models do
end
context 'when stage is success' do
- let(:stage) { create(:ci_stage_entity, status: 'success') }
+ let(:stage) { create(:ci_stage, status: 'success') }
it 'has a correct status value' do
expect(stage.status).to eq 'success'
@@ -119,7 +119,7 @@ RSpec.describe Ci::Stage, :models do
end
context 'when stage has only created builds' do
- let(:stage) { create(:ci_stage_entity, status: :created) }
+ let(:stage) { create(:ci_stage, status: :created) }
before do
create(:ci_build, :created, stage_id: stage.id)
@@ -206,7 +206,7 @@ RSpec.describe Ci::Stage, :models do
using RSpec::Parameterized::TableSyntax
let(:user) { create(:user) }
- let(:stage) { create(:ci_stage_entity, status: :created) }
+ let(:stage) { create(:ci_stage, status: :created) }
subject { stage.detailed_status(user) }
@@ -269,7 +269,7 @@ RSpec.describe Ci::Stage, :models do
describe '#delay' do
subject { stage.delay }
- let(:stage) { create(:ci_stage_entity, status: :created) }
+ let(:stage) { create(:ci_stage, status: :created) }
it 'updates stage status' do
subject
@@ -361,12 +361,12 @@ RSpec.describe Ci::Stage, :models do
end
end
- it_behaves_like 'manual playable stage', :ci_stage_entity
+ it_behaves_like 'manual playable stage', :ci_stage
context 'loose foreign key on ci_stages.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
- let!(:model) { create(:ci_stage_entity, project: parent) }
+ let!(:model) { create(:ci_stage, project: parent) }
end
end
end
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index 29ca088ee04..f0af229ff2c 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -51,4 +51,10 @@ RSpec.describe Ci::Variable do
let!(:model) { create(:ci_variable, project: parent) }
end
end
+
+ describe '#audit_details' do
+ it "equals to the variable's key" do
+ expect(subject.audit_details).to eq(subject.key)
+ end
+ end
end
diff --git a/spec/models/clusters/applications/elastic_stack_spec.rb b/spec/models/clusters/applications/elastic_stack_spec.rb
deleted file mode 100644
index af2802d5e47..00000000000
--- a/spec/models/clusters/applications/elastic_stack_spec.rb
+++ /dev/null
@@ -1,177 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::ElasticStack do
- include KubernetesHelpers
-
- include_examples 'cluster application core specs', :clusters_applications_elastic_stack
- include_examples 'cluster application status specs', :clusters_applications_elastic_stack
- include_examples 'cluster application version specs', :clusters_applications_elastic_stack
- include_examples 'cluster application helm specs', :clusters_applications_elastic_stack
-
- describe 'cluster.integration_elastic_stack state synchronization' do
- let!(:application) { create(:clusters_applications_elastic_stack) }
- let(:cluster) { application.cluster }
- let(:integration) { cluster.integration_elastic_stack }
-
- describe 'after_destroy' do
- it 'disables the corresponding integration' do
- application.destroy!
-
- expect(integration).not_to be_enabled
- end
- end
-
- describe 'on install' do
- it 'enables the corresponding integration' do
- application.make_scheduled!
- application.make_installing!
- application.make_installed!
-
- expect(integration).to be_enabled
- end
- end
-
- describe 'on uninstall' do
- it 'disables the corresponding integration' do
- application.make_scheduled!
- application.make_installing!
- application.make_installed!
- application.make_externally_uninstalled!
-
- expect(integration).not_to be_enabled
- end
- end
- end
-
- describe '#install_command' do
- let!(:elastic_stack) { create(:clusters_applications_elastic_stack) }
-
- subject { elastic_stack.install_command }
-
- it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::InstallCommand) }
-
- it 'is initialized with elastic stack arguments' do
- expect(subject.name).to eq('elastic-stack')
- expect(subject.chart).to eq('elastic-stack/elastic-stack')
- expect(subject.version).to eq('3.0.0')
- expect(subject.repository).to eq('https://charts.gitlab.io')
- expect(subject).to be_rbac
- expect(subject.files).to eq(elastic_stack.files)
- expect(subject.preinstall).to be_empty
- end
-
- context 'within values.yaml' do
- let(:values_yaml_content) {subject.files[:"values.yaml"]}
-
- it 'contains the disabled index lifecycle management' do
- expect(values_yaml_content).to include "setup.ilm.enabled: false"
- end
-
- it 'contains daily indices with respective template' do
- expect(values_yaml_content).to include "index: \"filebeat-%{[agent.version]}-%{+yyyy.MM.dd}\""
- expect(values_yaml_content).to include "setup.template.name: 'filebeat'"
- expect(values_yaml_content).to include "setup.template.pattern: 'filebeat-*'"
- end
- end
-
- context 'on a non rbac enabled cluster' do
- before do
- elastic_stack.cluster.platform_kubernetes.abac!
- end
-
- it { is_expected.not_to be_rbac }
- end
-
- context 'on versions older than 2' do
- before do
- elastic_stack.status = elastic_stack.status_states[:updating]
- elastic_stack.version = "1.9.0"
- end
-
- it 'includes a preinstall script' do
- expect(subject.preinstall).not_to be_empty
- expect(subject.preinstall.first).to include("helm uninstall")
- end
- end
-
- context 'on versions older than 3' do
- before do
- elastic_stack.status = elastic_stack.status_states[:updating]
- elastic_stack.version = "2.9.0"
- end
-
- it 'includes a preinstall script' do
- expect(subject.preinstall).not_to be_empty
- expect(subject.preinstall.first).to include("helm uninstall")
- end
- end
-
- context 'application failed to install previously' do
- let(:elastic_stack) { create(:clusters_applications_elastic_stack, :errored, version: '0.0.1') }
-
- it 'is initialized with the locked version' do
- expect(subject.version).to eq('3.0.0')
- end
- end
- end
-
- describe '#chart_above_v2?' do
- let(:elastic_stack) { create(:clusters_applications_elastic_stack, version: version) }
-
- subject { elastic_stack.chart_above_v2? }
-
- context 'on v1.9.0' do
- let(:version) { '1.9.0' }
-
- it { is_expected.to be_falsy }
- end
-
- context 'on v2.0.0' do
- let(:version) { '2.0.0' }
-
- it { is_expected.to be_truthy }
- end
- end
-
- describe '#chart_above_v3?' do
- let(:elastic_stack) { create(:clusters_applications_elastic_stack, version: version) }
-
- subject { elastic_stack.chart_above_v3? }
-
- context 'on v1.9.0' do
- let(:version) { '1.9.0' }
-
- it { is_expected.to be_falsy }
- end
-
- context 'on v3.0.0' do
- let(:version) { '3.0.0' }
-
- it { is_expected.to be_truthy }
- end
- end
-
- describe '#uninstall_command' do
- let!(:elastic_stack) { create(:clusters_applications_elastic_stack) }
-
- subject { elastic_stack.uninstall_command }
-
- it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::DeleteCommand) }
-
- it 'is initialized with elastic stack arguments' do
- expect(subject.name).to eq('elastic-stack')
- expect(subject).to be_rbac
- expect(subject.files).to eq(elastic_stack.files)
- end
-
- it 'specifies a post delete command to remove custom resource definitions' do
- expect(subject.postdelete).to eq([
- 'kubectl delete pvc --selector app\\=elastic-stack-elasticsearch-master --namespace gitlab-managed-apps'
- ])
- end
- end
-
- it_behaves_like 'cluster-based #elasticsearch_client', :clusters_applications_elastic_stack
-end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 30591a3ff5d..65ead01a2bd 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -42,7 +42,6 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to delegate_method(:available?).to(:application_helm).with_prefix }
it { is_expected.to delegate_method(:available?).to(:application_ingress).with_prefix }
it { is_expected.to delegate_method(:available?).to(:application_knative).with_prefix }
- it { is_expected.to delegate_method(:available?).to(:integration_elastic_stack).with_prefix }
it { is_expected.to delegate_method(:available?).to(:integration_prometheus).with_prefix }
it { is_expected.to delegate_method(:external_ip).to(:application_ingress).with_prefix }
it { is_expected.to delegate_method(:external_hostname).to(:application_ingress).with_prefix }
@@ -200,22 +199,6 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
end
- describe '.with_available_elasticstack' do
- subject { described_class.with_available_elasticstack }
-
- let_it_be(:cluster) { create(:cluster) }
-
- context 'cluster has ElasticStack application' do
- let!(:application) { create(:clusters_applications_elastic_stack, :installed, cluster: cluster) }
-
- it { is_expected.to include(cluster) }
- end
-
- context 'cluster does not have ElasticStack application' do
- it { is_expected.not_to include(cluster) }
- end
- end
-
describe '.distinct_with_deployed_environments' do
subject { described_class.distinct_with_deployed_environments }
diff --git a/spec/models/clusters/integrations/elastic_stack_spec.rb b/spec/models/clusters/integrations/elastic_stack_spec.rb
deleted file mode 100644
index be4d59b52a2..00000000000
--- a/spec/models/clusters/integrations/elastic_stack_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Integrations::ElasticStack do
- include KubernetesHelpers
- include StubRequests
-
- describe 'associations' do
- it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster') }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:cluster) }
- it { is_expected.not_to allow_value(nil).for(:enabled) }
- end
-
- it_behaves_like 'cluster-based #elasticsearch_client', :clusters_integrations_elastic_stack
-end
diff --git a/spec/models/clusters/integrations/prometheus_spec.rb b/spec/models/clusters/integrations/prometheus_spec.rb
index d1e40fffee0..90e99aefdce 100644
--- a/spec/models/clusters/integrations/prometheus_spec.rb
+++ b/spec/models/clusters/integrations/prometheus_spec.rb
@@ -26,19 +26,6 @@ RSpec.describe Clusters::Integrations::Prometheus do
integration.destroy!
end
-
- context 'when the FF :rename_integrations_workers is disabled' do
- before do
- stub_feature_flags(rename_integrations_workers: false)
- end
-
- it 'uses the old worker' do
- expect(Clusters::Applications::DeactivateServiceWorker)
- .to receive(:perform_async).with(cluster.id, 'prometheus')
-
- integration.destroy!
- end
- end
end
describe 'after_save' do
@@ -70,19 +57,6 @@ RSpec.describe Clusters::Integrations::Prometheus do
integration.update!(enabled: true)
end
-
- context 'when the FF :rename_integrations_workers is disabled' do
- before do
- stub_feature_flags(rename_integrations_workers: false)
- end
-
- it 'uses the old worker' do
- expect(Clusters::Applications::ActivateServiceWorker)
- .to receive(:perform_async).with(cluster.id, 'prometheus')
-
- integration.update!(enabled: true)
- end
- end
end
context 'when disabling' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index dbb15fad246..3cccc41a892 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -803,7 +803,7 @@ RSpec.describe CommitStatus do
describe 'ensure stage assignment' do
context 'when commit status has a stage_id assigned' do
let!(:stage) do
- create(:ci_stage_entity, project: project, pipeline: pipeline)
+ create(:ci_stage, project: project, pipeline: pipeline)
end
let(:commit_status) do
@@ -836,7 +836,7 @@ RSpec.describe CommitStatus do
context 'when commit status does not have stage but it exists' do
let!(:stage) do
- create(:ci_stage_entity, project: project,
+ create(:ci_stage, project: project,
pipeline: pipeline,
name: 'test')
end
@@ -984,22 +984,6 @@ RSpec.describe CommitStatus do
end
end
- describe '.bulk_insert_tags!' do
- let(:statuses) { double('statuses') }
- let(:inserter) { double('inserter') }
-
- it 'delegates to bulk insert class' do
- expect(Gitlab::Ci::Tags::BulkInsert)
- .to receive(:new)
- .with(statuses)
- .and_return(inserter)
-
- expect(inserter).to receive(:insert!)
-
- described_class.bulk_insert_tags!(statuses)
- end
- end
-
describe '#expire_etag_cache!' do
it 'expires the etag cache' do
expect_next_instance_of(Gitlab::EtagCaching::Store) do |etag_store|
diff --git a/spec/models/concerns/awareness_spec.rb b/spec/models/concerns/awareness_spec.rb
new file mode 100644
index 00000000000..67acacc7bb1
--- /dev/null
+++ b/spec/models/concerns/awareness_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Awareness, :clean_gitlab_redis_shared_state do
+ subject { create(:user) }
+
+ let(:session) { AwarenessSession.for(1) }
+
+ describe "when joining a session" do
+ it "increases the number of sessions" do
+ expect { subject.join(session) }
+ .to change { subject.session_ids.size }
+ .by(1)
+ end
+ end
+
+ describe "when leaving session" do
+ it "decreases the number of sessions" do
+ subject.join(session)
+
+ expect { subject.leave(session) }
+ .to change { subject.session_ids.size }
+ .by(-1)
+ end
+ end
+
+ describe "when joining multiple sessions" do
+ let(:session2) { AwarenessSession.for(2) }
+
+ it "increases number of active sessions for user" do
+ expect do
+ subject.join(session)
+ subject.join(session2)
+ end.to change { subject.session_ids.size }
+ .by(2)
+ end
+ end
+end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index a00129b3fdf..19b9a1519eb 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
include CacheMarkdownField
cache_markdown_field :title, pipeline: :single_line
cache_markdown_field :description
+
+ before_validation -> { self.work_item_type_id = ::WorkItems::Type.default_issue_type.id }
end
end
diff --git a/spec/models/concerns/cacheable_attributes_spec.rb b/spec/models/concerns/cacheable_attributes_spec.rb
index dc80e30216a..0629debda15 100644
--- a/spec/models/concerns/cacheable_attributes_spec.rb
+++ b/spec/models/concerns/cacheable_attributes_spec.rb
@@ -205,7 +205,12 @@ RSpec.describe CacheableAttributes do
end
end
- it 'uses RequestStore in addition to process memory cache', :request_store, :do_not_mock_admin_mode_setting do
+ it(
+ 'uses RequestStore in addition to process memory cache',
+ :request_store,
+ :do_not_mock_admin_mode_setting,
+ :do_not_stub_snowplow_by_default
+ ) do
# Warm up the cache
create(:application_setting).cache!
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
index b27a4d0dcc1..6af244a5a0f 100644
--- a/spec/models/concerns/ci/artifactable_spec.rb
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -53,6 +53,15 @@ RSpec.describe Ci::Artifactable do
expect { |b| artifact.each_blob(&b) }.to raise_error(described_class::NotSupportedAdapterError)
end
end
+
+ context 'pushes artifact_size to application context' do
+ let(:artifact) { create(:ci_job_artifact, :junit) }
+
+ it 'logs artifact size', :aggregate_failures do
+ expect { |b| artifact.each_blob(&b) }.to yield_control.once
+ expect(Gitlab::ApplicationContext.current).to include("meta.artifact_size" => artifact.size)
+ end
+ end
end
context 'ActiveRecord scopes' do
diff --git a/spec/models/concerns/ci/bulk_insertable_tags_spec.rb b/spec/models/concerns/ci/bulk_insertable_tags_spec.rb
new file mode 100644
index 00000000000..23f0831403d
--- /dev/null
+++ b/spec/models/concerns/ci/bulk_insertable_tags_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BulkInsertableTags do
+ let(:taggable_class) do
+ Class.new do
+ prepend Ci::BulkInsertableTags
+
+ attr_reader :tags_saved
+
+ def save_tags
+ @tags_saved = true
+ end
+ end
+ end
+
+ let(:record) { taggable_class.new }
+
+ describe '.with_bulk_insert_tags' do
+ it 'changes the thread key to true' do
+ expect(Thread.current['ci_bulk_insert_tags']).to be_nil
+
+ described_class.with_bulk_insert_tags do
+ expect(Thread.current['ci_bulk_insert_tags']).to eq(true)
+ end
+
+ expect(Thread.current['ci_bulk_insert_tags']).to be_nil
+ end
+ end
+
+ describe '#save_tags' do
+ it 'calls super' do
+ record.save_tags
+
+ expect(record.tags_saved).to eq(true)
+ end
+
+ it 'does not call super with BulkInsertableTags.with_bulk_insert_tags' do
+ described_class.with_bulk_insert_tags do
+ record.save_tags
+ end
+
+ expect(record.tags_saved).to be_nil
+ end
+
+ it 'isolates bulk insert behavior between threads' do
+ record2 = taggable_class.new
+
+ t1 = Thread.new do
+ described_class.with_bulk_insert_tags do
+ record.save_tags
+ end
+ end
+
+ t2 = Thread.new do
+ record2.save_tags
+ end
+
+ [t1, t2].each(&:join)
+
+ expect(record.tags_saved).to be_nil
+ expect(record2.tags_saved).to eq(true)
+ end
+ end
+end
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index f1fb4fcbd03..2c75d4d5c41 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -3,17 +3,17 @@
require 'spec_helper'
RSpec.describe EachBatch do
- describe '.each_batch' do
- let(:model) do
- Class.new(ActiveRecord::Base) do
- include EachBatch
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ include EachBatch
- self.table_name = 'users'
+ self.table_name = 'users'
- scope :never_signed_in, -> { where(sign_in_count: 0) }
- end
+ scope :never_signed_in, -> { where(sign_in_count: 0) }
end
+ end
+ describe '.each_batch' do
before do
create_list(:user, 5, updated_at: 1.day.ago)
end
@@ -86,4 +86,89 @@ RSpec.describe EachBatch do
end
end
end
+
+ describe '.distinct_each_batch' do
+ let_it_be(:users) { create_list(:user, 5, sign_in_count: 0) }
+
+ let(:params) { {} }
+
+ subject(:values) do
+ values = []
+
+ model.distinct_each_batch(**params) { |rel| values.concat(rel.pluck(params[:column])) }
+ values
+ end
+
+ context 'when iterating over a unique column' do
+ context 'when using ascending order' do
+ let(:expected_values) { users.pluck(:id).sort }
+ let(:params) { { column: :id, of: 1, order: :asc } }
+
+ it { is_expected.to eq(expected_values) }
+
+ context 'when using larger batch size' do
+ before do
+ params[:of] = 3
+ end
+
+ it { is_expected.to eq(expected_values) }
+ end
+
+ context 'when using larger batch size than the result size' do
+ before do
+ params[:of] = 100
+ end
+
+ it { is_expected.to eq(expected_values) }
+ end
+ end
+
+ context 'when using descending order' do
+ let(:expected_values) { users.pluck(:id).sort.reverse }
+ let(:params) { { column: :id, of: 1, order: :desc } }
+
+ it { is_expected.to eq(expected_values) }
+
+ context 'when using larger batch size' do
+ before do
+ params[:of] = 3
+ end
+
+ it { is_expected.to eq(expected_values) }
+ end
+ end
+ end
+
+ context 'when iterating over a non-unique column' do
+ let(:params) { { column: :sign_in_count, of: 2, order: :asc } }
+
+ context 'when only one value is present' do
+ it { is_expected.to eq([0]) }
+ end
+
+ context 'when duplicated values present' do
+ let(:expected_values) { [2, 5] }
+
+ before do
+ users[0].reload.update!(sign_in_count: 5)
+ users[1].reload.update!(sign_in_count: 2)
+ users[2].reload.update!(sign_in_count: 5)
+ users[3].reload.update!(sign_in_count: 2)
+ users[4].reload.update!(sign_in_count: 5)
+ end
+
+ it { is_expected.to eq(expected_values) }
+
+ context 'when using descending order' do
+ let(:expected_values) { [5, 2] }
+
+ before do
+ params[:order] = :desc
+ end
+
+ it { is_expected.to eq(expected_values) }
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/loose_index_scan_spec.rb b/spec/models/concerns/loose_index_scan_spec.rb
new file mode 100644
index 00000000000..685819bfb86
--- /dev/null
+++ b/spec/models/concerns/loose_index_scan_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+# frozen_string_literal
+
+require 'spec_helper'
+
+RSpec.describe LooseIndexScan, type: :model do
+ let(:issue_model) do
+ Class.new(ApplicationRecord) do
+ include LooseIndexScan
+
+ self.table_name = 'issues'
+ end
+ end
+
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:user_3) { create(:user) }
+
+ let_it_be(:issue_1) { create(:issue, author: user_2) }
+ let_it_be(:issue_2) { create(:issue, author: user_1) }
+ let_it_be(:issue_3) { create(:issue, author: user_1) }
+ let_it_be(:issue_4) { create(:issue, author: user_2) }
+ let_it_be(:issue_5) { create(:issue, author: user_3) }
+
+ context 'loading distinct author_ids' do
+ subject(:author_ids) { issue_model.loose_index_scan(column: :author_id, order: order).pluck(:author_id) }
+
+ shared_examples 'assert distinct values example' do
+ it 'loads the distinct values in the correct order' do
+ expect(author_ids).to eq(expected_order)
+ end
+ end
+
+ context 'when using ascending order' do
+ let(:order) { :asc }
+ let(:expected_order) { [user_1.id, user_2.id, user_3.id] }
+
+ it_behaves_like 'assert distinct values example'
+
+ context 'when null values are present' do
+ before do
+ issue_1.author_id = nil
+ issue_1.save!(validate: false)
+ end
+
+ it_behaves_like 'assert distinct values example'
+ end
+
+ context 'when using descending order' do
+ let(:order) { :desc }
+ let(:expected_order) { [user_3.id, user_2.id, user_1.id] }
+
+ it_behaves_like 'assert distinct values example'
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb
index 99a3a0fb79a..b92c7c52f0b 100644
--- a/spec/models/concerns/participable_spec.rb
+++ b/spec/models/concerns/participable_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Participable do
expect(instance).to receive(:foo).and_return(user2)
expect(instance).to receive(:bar).and_return(user3)
- expect(instance).to receive(:project).twice.and_return(project)
+ expect(instance).to receive(:project).thrice.and_return(project)
participants = instance.participants(user1)
@@ -66,7 +66,7 @@ RSpec.describe Participable do
expect(instance).to receive(:foo).and_return(other)
expect(other).to receive(:bar).and_return(user2)
- expect(instance).to receive(:project).twice.and_return(project)
+ expect(instance).to receive(:project).thrice.and_return(project)
expect(instance.participants(user1)).to eq([user2])
end
@@ -86,7 +86,7 @@ RSpec.describe Participable do
instance = model.new
- expect(instance).to receive(:project).twice.and_return(project)
+ expect(instance).to receive(:project).thrice.and_return(project)
instance.participants(user1)
@@ -138,7 +138,7 @@ RSpec.describe Participable do
allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
expect(instance).to receive(:foo).and_return(user2)
expect(instance).to receive(:bar).and_return(user3)
- expect(instance).to receive(:project).twice.and_return(project)
+ expect(instance).to receive(:project).thrice.and_return(project)
participants = instance.visible_participants(user1)
@@ -159,7 +159,7 @@ RSpec.describe Participable do
allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
allow(instance).to receive(:bar).and_return(user2)
- expect(instance).to receive(:project).twice.and_return(project)
+ expect(instance).to receive(:project).thrice.and_return(project)
expect(instance.visible_participants(user1)).to be_empty
end
diff --git a/spec/models/concerns/pg_full_text_searchable_spec.rb b/spec/models/concerns/pg_full_text_searchable_spec.rb
index 84209999ab2..55e3caf3c4c 100644
--- a/spec/models/concerns/pg_full_text_searchable_spec.rb
+++ b/spec/models/concerns/pg_full_text_searchable_spec.rb
@@ -14,6 +14,8 @@ RSpec.describe PgFullTextSearchable do
belongs_to :project
has_one :search_data, class_name: 'Issues::SearchData'
+ before_validation -> { self.work_item_type_id = ::WorkItems::Type.default_issue_type.id }
+
def persist_pg_full_text_search_vector(search_vector)
Issues::SearchData.upsert({ project_id: project_id, issue_id: id, search_vector: search_vector }, unique_by: %i(project_id issue_id))
end
@@ -185,6 +187,8 @@ RSpec.describe PgFullTextSearchable do
belongs_to :project
has_one :search_data, class_name: 'Issues::SearchData'
+ before_validation -> { self.work_item_type_id = ::WorkItems::Type.default_issue_type.id }
+
def self.name
'Issue'
end
diff --git a/spec/models/concerns/require_email_verification_spec.rb b/spec/models/concerns/require_email_verification_spec.rb
new file mode 100644
index 00000000000..d087b2864f8
--- /dev/null
+++ b/spec/models/concerns/require_email_verification_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RequireEmailVerification do
+ let_it_be(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'users'
+
+ devise :lockable
+
+ include RequireEmailVerification
+ end
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:feature_flag_enabled, :two_factor_enabled, :overridden) do
+ false | false | false
+ false | true | false
+ true | false | true
+ true | true | false
+ end
+
+ with_them do
+ let(:instance) { model.new(id: 1) }
+ let(:another_instance) { model.new(id: 2) }
+
+ before do
+ stub_feature_flags(require_email_verification: feature_flag_enabled ? instance : another_instance)
+ allow(instance).to receive(:two_factor_enabled?).and_return(two_factor_enabled)
+ end
+
+ describe '#lock_access!' do
+ subject { instance.lock_access! }
+
+ before do
+ allow(instance).to receive(:save)
+ end
+
+ it 'sends Devise unlock instructions unless overridden and always sets locked_at' do
+ expect(instance).to receive(:send_unlock_instructions).exactly(overridden ? 0 : 1).times
+
+ expect { subject }.to change { instance.locked_at }.from(nil)
+ end
+ end
+
+ describe '#attempts_exceeded?' do
+ subject { instance.send(:attempts_exceeded?) }
+
+ context 'when failed_attempts is LT overridden amount' do
+ before do
+ instance.failed_attempts = 5
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when failed_attempts is GTE overridden amount but LT Devise default amount' do
+ before do
+ instance.failed_attempts = 6
+ end
+
+ it { is_expected.to eq(overridden) }
+ end
+
+ context 'when failed_attempts is GTE Devise default amount' do
+ before do
+ instance.failed_attempts = 10
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ describe '#lock_expired?' do
+ subject { instance.send(:lock_expired?) }
+
+ context 'when locked shorter ago than Devise default time' do
+ before do
+ instance.locked_at = 9.minutes.ago
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when locked longer ago than Devise default time but shorter ago than overriden time' do
+ before do
+ instance.locked_at = 11.minutes.ago
+ end
+
+ it { is_expected.to eq(!overridden) }
+ end
+
+ context 'when locked longer ago than overriden time' do
+ before do
+ instance.locked_at = (24.hours + 1.minute).ago
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index d7bfcc3f579..a2ce02f4661 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -94,6 +94,7 @@ RSpec.describe ApplicationSetting, 'TokenAuthenticatable' do
end
it { is_expected.to respond_to(:ensure_runners_registration_token) }
+ it { is_expected.to respond_to(:ensure_error_tracking_access_token) }
it { is_expected.to respond_to(:ensure_yet_another_token) }
end
diff --git a/spec/models/container_registry/event_spec.rb b/spec/models/container_registry/event_spec.rb
index e0194a07f46..799d9d4fd1c 100644
--- a/spec/models/container_registry/event_spec.rb
+++ b/spec/models/container_registry/event_spec.rb
@@ -40,16 +40,18 @@ RSpec.describe ContainerRegistry::Event do
subject(:handle!) { described_class.new(raw_event).handle! }
- it 'enqueues a project statistics update' do
- expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:container_registry_size])
+ shared_examples 'event with project statistics update' do
+ it 'enqueues a project statistics update' do
+ expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:container_registry_size])
- handle!
- end
+ handle!
+ end
- it 'clears the cache for the namespace container repositories size' do
- expect(Rails.cache).to receive(:delete).with(group.container_repositories_size_cache_key)
+ it 'clears the cache for the namespace container repositories size' do
+ expect(Rails.cache).to receive(:delete).with(group.container_repositories_size_cache_key)
- handle!
+ handle!
+ end
end
shared_examples 'event without project statistics update' do
@@ -60,10 +62,32 @@ RSpec.describe ContainerRegistry::Event do
end
end
+ it_behaves_like 'event with project statistics update'
+
context 'with no target tag' do
let(:target) { super().without('tag') }
it_behaves_like 'event without project statistics update'
+
+ context 'with a target digest' do
+ let(:target) { super().merge('digest' => 'abc123') }
+
+ it_behaves_like 'event without project statistics update'
+ end
+
+ context 'with a delete action' do
+ let(:action) { 'delete' }
+
+ context 'without a target digest' do
+ it_behaves_like 'event without project statistics update'
+ end
+
+ context 'with a target digest' do
+ let(:target) { super().merge('digest' => 'abc123') }
+
+ it_behaves_like 'event with project statistics update'
+ end
+ end
end
context 'with an unsupported action' do
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 7d0dfad91b2..e35788b1848 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -689,10 +689,29 @@ RSpec.describe ContainerRepository, :aggregate_failures do
it { is_expected.to eq(nil) }
end
- context 'with an old repository' do
+ context 'supports gitlab api on .com with an old repository' do
+ let(:on_com) { true }
let(:created_at) { described_class::MIGRATION_PHASE_1_STARTED_AT - 3.months }
- it { is_expected.to eq(nil) }
+ before do
+ allow(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true)
+ allow(repository.gitlab_api_client).to receive(:repository_details).with(repository.path, sizing: :self).and_return(response)
+ expect(repository).to receive(:migration_state).and_return(migration_state)
+ end
+
+ context 'with migration_state import_done' do
+ let(:response) { { 'size_bytes' => 12345 } }
+ let(:migration_state) { 'import_done' }
+
+ it { is_expected.to eq(12345) }
+ end
+
+ context 'with migration_state not import_done' do
+ let(:response) { { 'size_bytes' => 12345 } }
+ let(:migration_state) { 'default' }
+
+ it { is_expected.to eq(nil) }
+ end
end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index a58d32dfe5d..0a4ee73f3d3 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -139,29 +139,16 @@ RSpec.describe Deployment do
end
end
- it 'executes deployment hooks' do
+ it 'executes Deployments::HooksWorker asynchronously' do
freeze_time do
- expect(deployment).to receive(:execute_hooks).with(Time.current)
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status: 'running',
+ status_changed_at: Time.current)
deployment.run!
end
end
- context 'when `deployment_hooks_skip_worker` flag is disabled' do
- before do
- stub_feature_flags(deployment_hooks_skip_worker: false)
- end
-
- it 'executes Deployments::HooksWorker asynchronously' do
- freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
-
- deployment.run!
- end
- end
- end
-
it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
expect(Deployments::DropOlderDeploymentsWorker)
.to receive(:perform_async).once.with(deployment.id)
@@ -189,28 +176,15 @@ RSpec.describe Deployment do
deployment.succeed!
end
- it 'executes deployment hooks' do
+ it 'executes Deployments::HooksWorker asynchronously' do
freeze_time do
- expect(deployment).to receive(:execute_hooks).with(Time.current)
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status: 'success',
+ status_changed_at: Time.current)
deployment.succeed!
end
end
-
- context 'when `deployment_hooks_skip_worker` flag is disabled' do
- before do
- stub_feature_flags(deployment_hooks_skip_worker: false)
- end
-
- it 'executes Deployments::HooksWorker asynchronously' do
- freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
-
- deployment.succeed!
- end
- end
- end
end
context 'when deployment failed' do
@@ -232,28 +206,15 @@ RSpec.describe Deployment do
deployment.drop!
end
- it 'executes deployment hooks' do
+ it 'executes Deployments::HooksWorker asynchronously' do
freeze_time do
- expect(deployment).to receive(:execute_hooks).with(Time.current)
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status: 'failed',
+ status_changed_at: Time.current)
deployment.drop!
end
end
-
- context 'when `deployment_hooks_skip_worker` flag is disabled' do
- before do
- stub_feature_flags(deployment_hooks_skip_worker: false)
- end
-
- it 'executes Deployments::HooksWorker asynchronously' do
- freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
-
- deployment.drop!
- end
- end
- end
end
context 'when deployment was canceled' do
@@ -275,28 +236,15 @@ RSpec.describe Deployment do
deployment.cancel!
end
- it 'executes deployment hooks' do
+ it 'executes Deployments::HooksWorker asynchronously' do
freeze_time do
- expect(deployment).to receive(:execute_hooks).with(Time.current)
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status: 'canceled',
+ status_changed_at: Time.current)
deployment.cancel!
end
end
-
- context 'when `deployment_hooks_skip_worker` flag is disabled' do
- before do
- stub_feature_flags(deployment_hooks_skip_worker: false)
- end
-
- it 'executes Deployments::HooksWorker asynchronously' do
- freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
-
- deployment.cancel!
- end
- end
- end
end
context 'when deployment was skipped' do
@@ -324,12 +272,6 @@ RSpec.describe Deployment do
deployment.skip!
end
end
-
- it 'does not execute deployment hooks' do
- expect(deployment).not_to receive(:execute_hooks)
-
- deployment.skip!
- end
end
context 'when deployment is blocked' do
@@ -353,12 +295,6 @@ RSpec.describe Deployment do
deployment.block!
end
-
- it 'does not execute deployment hooks' do
- expect(deployment).not_to receive(:execute_hooks)
-
- deployment.block!
- end
end
describe 'synching status to Jira' do
@@ -756,6 +692,37 @@ RSpec.describe Deployment do
.to contain_exactly(stop_env_b)
end
end
+
+ context 'When last deployment for environment is a retried build' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:environment_b) { create(:environment, project: project) }
+
+ let(:build_a) do
+ create(:ci_build, :success, project: project, pipeline: pipeline, environment: environment.name)
+ end
+
+ let(:build_b) do
+ create(:ci_build, :success, project: project, pipeline: pipeline, environment: environment_b.name)
+ end
+
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: build_a)
+ end
+
+ let!(:deployment_b) do
+ create(:deployment, :success, project: project, environment: environment_b, deployable: build_b)
+ end
+
+ before do
+ # Retry build_b
+ build_b.update!(retried: true)
+
+ # New successful build after retry.
+ create(:ci_build, :success, project: project, pipeline: pipeline, environment: environment_b.name)
+ end
+
+ it { expect(subject_method(environment_b)).not_to be_nil }
+ end
end
end
@@ -1052,30 +1019,11 @@ RSpec.describe Deployment do
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
expect(Deployments::ArchiveInProjectWorker).to receive(:perform_async)
+ expect(Deployments::HooksWorker).to receive(:perform_async)
expect(deploy.update_status('success')).to eq(true)
end
- context 'when `deployment_hooks_skip_worker` flag is disabled' do
- before do
- stub_feature_flags(deployment_hooks_skip_worker: false)
- end
-
- it 'schedules `Deployments::HooksWorker` when finishing a deploy' do
- expect(Deployments::HooksWorker).to receive(:perform_async)
-
- deploy.update_status('success')
- end
- end
-
- it 'executes deployment hooks when finishing a deploy' do
- freeze_time do
- expect(deploy).to receive(:execute_hooks).with(Time.current)
-
- deploy.update_status('success')
- end
- end
-
it 'updates finished_at when transitioning to a finished status' do
freeze_time do
deploy.update_status('success')
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index f377b34679c..d379ffeee02 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -260,32 +260,12 @@ RSpec.describe DiffNote do
end
context 'when the discussion was created in the diff' do
- context 'when file_identifier_hash is disabled' do
- before do
- stub_feature_flags(file_identifier_hash: false)
- end
-
- it 'returns correct diff file' do
- diff_file = subject.diff_file
-
- expect(diff_file.old_path).to eq(position.old_path)
- expect(diff_file.new_path).to eq(position.new_path)
- expect(diff_file.diff_refs).to eq(position.diff_refs)
- end
- end
-
- context 'when file_identifier_hash is enabled' do
- before do
- stub_feature_flags(file_identifier_hash: true)
- end
-
- it 'returns correct diff file' do
- diff_file = subject.diff_file
+ it 'returns correct diff file' do
+ diff_file = subject.diff_file
- expect(diff_file.old_path).to eq(position.old_path)
- expect(diff_file.new_path).to eq(position.new_path)
- expect(diff_file.diff_refs).to eq(position.diff_refs)
- end
+ expect(diff_file.old_path).to eq(position.old_path)
+ expect(diff_file.new_path).to eq(position.new_path)
+ expect(diff_file.diff_refs).to eq(position.diff_refs)
end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index fd89a3a2e22..e3207636bdc 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -34,6 +34,14 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_length_of(:external_url).is_at_most(255) }
+ describe 'validation' do
+ it 'does not become invalid record when external_url is empty' do
+ environment = build(:environment, external_url: nil)
+
+ expect(environment).to be_valid
+ end
+ end
+
describe '.before_save' do
it 'ensures environment tier when a new object is created' do
environment = build(:environment, name: 'gprd', tier: nil)
@@ -1672,6 +1680,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'abcdef' | ChronicDuration::DurationParseError
'' | nil
nil | nil
+ 'never' | nil
end
with_them do
it 'sets correct auto_stop_in' do
@@ -1711,25 +1720,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
end
- describe '#elastic_stack_available?' do
- let!(:cluster) { create(:cluster, :project, :provided_by_user, projects: [project]) }
- let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
-
- context 'when integration does not exist' do
- it 'returns false' do
- expect(environment.elastic_stack_available?).to be(false)
- end
- end
-
- context 'when integration is enabled' do
- let!(:integration) { create(:clusters_integrations_elastic_stack, cluster: cluster) }
-
- it 'returns true' do
- expect(environment.elastic_stack_available?).to be(true)
- end
- end
- end
-
describe '#destroy' do
it 'remove the deployment refs from gitaly' do
deployment = create(:deployment, :success, environment: environment, project: project)
diff --git a/spec/models/factories_spec.rb b/spec/models/factories_spec.rb
index 45c3f93e6cf..2993b2aee58 100644
--- a/spec/models/factories_spec.rb
+++ b/spec/models/factories_spec.rb
@@ -104,6 +104,7 @@ RSpec.describe 'factories' do
factories_based_on_view = %i[
postgres_index
postgres_index_bloat_estimate
+ postgres_autovacuum_activity
].to_set.freeze
without_fd, with_fd = FactoryBot.factories
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index d47f43a630d..e8e805b2678 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -42,6 +42,7 @@ RSpec.describe Group do
it { is_expected.to have_many(:organizations).class_name('CustomerRelations::Organization') }
it { is_expected.to have_one(:crm_settings) }
it { is_expected.to have_one(:group_feature) }
+ it { is_expected.to have_one(:harbor_integration) }
describe '#members & #requesters' do
let(:requester) { create(:user) }
@@ -726,7 +727,7 @@ RSpec.describe Group do
context 'when user is a member of private group' do
before do
- private_group.add_user(user, Gitlab::Access::DEVELOPER)
+ private_group.add_member(user, Gitlab::Access::DEVELOPER)
end
it { is_expected.to match_array([private_group, internal_group, group]) }
@@ -736,7 +737,7 @@ RSpec.describe Group do
let!(:private_subgroup) { create(:group, :private, parent: private_group) }
before do
- private_subgroup.add_user(user, Gitlab::Access::DEVELOPER)
+ private_subgroup.add_member(user, Gitlab::Access::DEVELOPER)
end
it { is_expected.to match_array([private_subgroup, internal_group, group]) }
@@ -848,7 +849,7 @@ RSpec.describe Group do
expect(member).to receive(:refresh_member_authorized_projects).with(blocking: true)
end
- group.add_user(user, GroupMember::MAINTAINER)
+ group.add_member(user, GroupMember::MAINTAINER)
expect(group.group_members.maintainers.map(&:user)).to include(user)
end
@@ -858,7 +859,7 @@ RSpec.describe Group do
expect(member).to receive(:refresh_member_authorized_projects).with(blocking: false)
end
- group.add_user(user, GroupMember::MAINTAINER, blocking_refresh: false)
+ group.add_member(user, GroupMember::MAINTAINER, blocking_refresh: false)
end
end
@@ -866,12 +867,12 @@ RSpec.describe Group do
let(:user) { create(:user) }
before do
- group.add_users([user.id], GroupMember::GUEST)
+ group.add_members([user.id], GroupMember::GUEST)
end
it "updates the group permission" do
expect(group.group_members.guests.map(&:user)).to include(user)
- group.add_users([user.id], GroupMember::DEVELOPER)
+ group.add_members([user.id], GroupMember::DEVELOPER)
expect(group.group_members.developers.map(&:user)).to include(user)
expect(group.group_members.guests.map(&:user)).not_to include(user)
end
@@ -880,7 +881,7 @@ RSpec.describe Group do
let!(:project) { create(:project, group: group) }
before do
- group.add_users([create(:user)], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
+ group.add_members([create(:user)], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
end
it 'creates a member_task with the correct attributes', :aggregate_failures do
@@ -896,7 +897,7 @@ RSpec.describe Group do
let(:user) { create(:user) }
before do
- group.add_user(user, GroupMember::MAINTAINER)
+ group.add_member(user, GroupMember::MAINTAINER)
end
it "is true if avatar is image" do
@@ -993,7 +994,7 @@ RSpec.describe Group do
context 'there is also a project_bot owner' do
before do
- group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ group.add_member(create(:user, :project_bot), GroupMember::OWNER)
end
it { expect(group.last_owner?(@members[:owner])).to be_truthy }
@@ -1024,7 +1025,7 @@ RSpec.describe Group do
let(:member) { blocked_user.group_members.last }
before do
- group.add_user(blocked_user, GroupMember::OWNER)
+ group.add_member(blocked_user, GroupMember::OWNER)
end
context 'when last_blocked_owner is set' do
@@ -1050,7 +1051,7 @@ RSpec.describe Group do
context 'with another active owner' do
before do
- group.add_user(create(:user), GroupMember::OWNER)
+ group.add_member(create(:user), GroupMember::OWNER)
end
it { expect(group.member_last_blocked_owner?(member)).to be(false) }
@@ -1058,7 +1059,7 @@ RSpec.describe Group do
context 'with 2 blocked owners' do
before do
- group.add_user(create(:user, :blocked), GroupMember::OWNER)
+ group.add_member(create(:user, :blocked), GroupMember::OWNER)
end
it { expect(group.member_last_blocked_owner?(member)).to be(false) }
@@ -1082,7 +1083,7 @@ RSpec.describe Group do
describe '#single_blocked_owner?' do
context 'when there is only one blocked owner' do
before do
- group.add_user(blocked_user, GroupMember::OWNER)
+ group.add_member(blocked_user, GroupMember::OWNER)
end
it 'returns true' do
@@ -1094,8 +1095,8 @@ RSpec.describe Group do
let_it_be(:blocked_user_2) { create(:user, :blocked) }
before do
- group.add_user(blocked_user, GroupMember::OWNER)
- group.add_user(blocked_user_2, GroupMember::OWNER)
+ group.add_member(blocked_user, GroupMember::OWNER)
+ group.add_member(blocked_user_2, GroupMember::OWNER)
end
it 'returns true' do
@@ -1114,8 +1115,8 @@ RSpec.describe Group do
let_it_be(:user) { create(:user) }
before do
- group.add_user(blocked_user, GroupMember::OWNER)
- group.add_user(user, GroupMember::OWNER)
+ group.add_member(blocked_user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
end
it 'has only blocked owners' do
@@ -1129,7 +1130,7 @@ RSpec.describe Group do
context 'when there is only one owner' do
let!(:owner) do
- group.add_user(user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
end
it 'returns the owner' do
@@ -1138,7 +1139,7 @@ RSpec.describe Group do
context 'and there is also a project_bot owner' do
before do
- group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ group.add_member(create(:user, :project_bot), GroupMember::OWNER)
end
it 'returns only the human owner' do
@@ -1151,11 +1152,11 @@ RSpec.describe Group do
let_it_be(:user_2) { create(:user) }
let!(:owner) do
- group.add_user(user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
end
let!(:owner2) do
- group.add_user(user_2, GroupMember::OWNER)
+ group.add_member(user_2, GroupMember::OWNER)
end
it 'returns both owners' do
@@ -1164,7 +1165,7 @@ RSpec.describe Group do
context 'and there is also a project_bot owner' do
before do
- group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ group.add_member(create(:user, :project_bot), GroupMember::OWNER)
end
it 'returns only the human owners' do
@@ -1186,7 +1187,7 @@ RSpec.describe Group do
let(:member) { group.members.last }
before do
- group.add_user(user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
end
context 'when last_owner is set' do
@@ -1284,11 +1285,11 @@ RSpec.describe Group do
requester: create(:user)
}
- group.add_user(members[:owner], GroupMember::OWNER)
- group.add_user(members[:maintainer], GroupMember::MAINTAINER)
- group.add_user(members[:developer], GroupMember::DEVELOPER)
- group.add_user(members[:reporter], GroupMember::REPORTER)
- group.add_user(members[:guest], GroupMember::GUEST)
+ group.add_member(members[:owner], GroupMember::OWNER)
+ group.add_member(members[:maintainer], GroupMember::MAINTAINER)
+ group.add_member(members[:developer], GroupMember::DEVELOPER)
+ group.add_member(members[:reporter], GroupMember::REPORTER)
+ group.add_member(members[:guest], GroupMember::GUEST)
group.request_access(members[:requester])
members
@@ -1464,8 +1465,8 @@ RSpec.describe Group do
describe '#direct_members' do
let_it_be(:group) { create(:group, :nested) }
- let_it_be(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
- let_it_be(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
+ let_it_be(:maintainer) { group.parent.add_member(create(:user), GroupMember::MAINTAINER) }
+ let_it_be(:developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
it 'does not return members of the parent' do
expect(group.direct_members).not_to include(maintainer)
@@ -1491,8 +1492,8 @@ RSpec.describe Group do
shared_examples_for 'members_with_parents' do
let!(:group) { create(:group, :nested) }
- let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
- let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
+ let!(:maintainer) { group.parent.add_member(create(:user), GroupMember::MAINTAINER) }
+ let!(:developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
let!(:pending_maintainer) { create(:group_member, :awaiting, :maintainer, group: group.parent) }
let!(:pending_developer) { create(:group_member, :awaiting, :developer, group: group) }
@@ -1603,9 +1604,9 @@ RSpec.describe Group do
context 'members-related methods' do
let!(:group) { create(:group, :nested) }
let!(:sub_group) { create(:group, parent: group) }
- let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
- let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
- let!(:other_developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
+ let!(:maintainer) { group.parent.add_member(create(:user), GroupMember::MAINTAINER) }
+ let!(:developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
+ let!(:other_developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
describe '#direct_and_indirect_members' do
it 'returns parents members' do
@@ -1619,7 +1620,7 @@ RSpec.describe Group do
end
describe '#direct_and_indirect_members_with_inactive' do
- let!(:maintainer_blocked) { group.parent.add_user(create(:user, :blocked), GroupMember::MAINTAINER) }
+ let!(:maintainer_blocked) { group.parent.add_member(create(:user, :blocked), GroupMember::MAINTAINER) }
it 'returns parents members' do
expect(group.direct_and_indirect_members_with_inactive).to include(developer)
@@ -1795,8 +1796,8 @@ RSpec.describe Group do
maintainer = create(:user)
developer = create(:user)
- group.add_user(maintainer, GroupMember::MAINTAINER)
- group.add_user(developer, GroupMember::DEVELOPER)
+ group.add_member(maintainer, GroupMember::MAINTAINER)
+ group.add_member(developer, GroupMember::DEVELOPER)
expect(group.user_ids_for_project_authorizations)
.to include(maintainer.id, developer.id)
@@ -1847,7 +1848,7 @@ RSpec.describe Group do
context 'group membership' do
before do
- group.add_user(user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
end
it 'is called when require_two_factor_authentication is changed' do
@@ -1870,7 +1871,7 @@ RSpec.describe Group do
it 'calls #update_two_factor_requirement on each group member' do
other_user = create(:user)
- group.add_user(other_user, GroupMember::OWNER)
+ group.add_member(other_user, GroupMember::OWNER)
calls = 0
allow_any_instance_of(User).to receive(:update_two_factor_requirement) do
@@ -1885,7 +1886,7 @@ RSpec.describe Group do
context 'sub groups and projects' do
it 'enables two_factor_requirement for group member' do
- group.add_user(user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
@@ -1899,7 +1900,7 @@ RSpec.describe Group do
context 'two_factor_requirement is also enabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
- subgroup.add_user(indirect_user, GroupMember::OWNER)
+ subgroup.add_member(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
@@ -1910,7 +1911,7 @@ RSpec.describe Group do
context 'two_factor_requirement is disabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group, require_two_factor_authentication: true)
- subgroup.add_user(indirect_user, GroupMember::OWNER)
+ subgroup.add_member(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
@@ -1919,7 +1920,7 @@ RSpec.describe Group do
it 'enable two_factor_requirement for ancestor group member' do
ancestor_group = create(:group)
- ancestor_group.add_user(indirect_user, GroupMember::OWNER)
+ ancestor_group.add_member(indirect_user, GroupMember::OWNER)
group.update!(parent: ancestor_group)
group.update!(require_two_factor_authentication: true)
@@ -1933,7 +1934,7 @@ RSpec.describe Group do
context 'two_factor_requirement is enabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
- subgroup.add_user(indirect_user, GroupMember::OWNER)
+ subgroup.add_member(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
@@ -1944,7 +1945,7 @@ RSpec.describe Group do
context 'two_factor_requirement is also disabled for ancestor group' do
it 'disables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
- subgroup.add_user(indirect_user, GroupMember::OWNER)
+ subgroup.add_member(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
@@ -1954,7 +1955,7 @@ RSpec.describe Group do
it 'disables two_factor_requirement for ancestor group member' do
ancestor_group = create(:group, require_two_factor_authentication: false)
indirect_user.update!(require_two_factor_authentication_from_group: true)
- ancestor_group.add_user(indirect_user, GroupMember::OWNER)
+ ancestor_group.add_member(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
diff --git a/spec/models/hooks/project_hook_spec.rb b/spec/models/hooks/project_hook_spec.rb
index 4253686b843..923a6f92424 100644
--- a/spec/models/hooks/project_hook_spec.rb
+++ b/spec/models/hooks/project_hook_spec.rb
@@ -15,6 +15,19 @@ RSpec.describe ProjectHook do
subject { build(:project_hook, project: create(:project)) }
end
+ describe '.for_projects' do
+ it 'finds related project hooks' do
+ hook_a = create(:project_hook)
+ hook_b = create(:project_hook)
+ hook_c = create(:project_hook)
+
+ expect(described_class.for_projects([hook_a.project, hook_b.project]))
+ .to contain_exactly(hook_a, hook_b)
+ expect(described_class.for_projects(hook_c.project))
+ .to contain_exactly(hook_c)
+ end
+ end
+
describe '.push_hooks' do
it 'returns hooks for push events only' do
hook = create(:project_hook, push_events: true)
@@ -50,4 +63,62 @@ RSpec.describe ProjectHook do
)
end
end
+
+ describe '#update_last_failure', :clean_gitlab_redis_shared_state do
+ let_it_be(:hook) { create(:project_hook) }
+
+ it 'is a method of this class' do
+ expect { hook.update_last_failure }.not_to raise_error
+ end
+
+ context 'when the hook is executable' do
+ it 'does not update the state' do
+ expect(Gitlab::Redis::SharedState).not_to receive(:with)
+
+ hook.update_last_failure
+ end
+ end
+
+ context 'when the hook is failed' do
+ before do
+ allow(hook).to receive(:executable?).and_return(false)
+ end
+
+ def last_failure
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.get("web_hooks:last_failure:project-#{hook.project.id}")
+ end
+ end
+
+ context 'there is no prior value', :freeze_time do
+ it 'updates the state' do
+ expect { hook.update_last_failure }.to change { last_failure }.to(Time.current)
+ end
+ end
+
+ context 'there is a prior value, from before now' do
+ it 'updates the state' do
+ the_future = 1.minute.from_now
+
+ hook.update_last_failure
+
+ travel_to(the_future) do
+ expect { hook.update_last_failure }.to change { last_failure }.to(the_future.iso8601)
+ end
+ end
+ end
+
+ context 'there is a prior value, from after now' do
+ it 'does not update the state' do
+ the_past = 1.minute.ago
+
+ hook.update_last_failure
+
+ travel_to(the_past) do
+ expect { hook.update_last_failure }.not_to change { last_failure }
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index fb4d1cee606..9faa5e1567c 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -78,6 +78,32 @@ RSpec.describe WebHook do
expect(hook.url).to eq('https://example.com')
end
+
+ context 'when there are URL variables' do
+ subject { hook }
+
+ before do
+ hook.url_variables = { 'one' => 'a', 'two' => 'b' }
+ end
+
+ it { is_expected.to allow_value('http://example.com').for(:url) }
+ it { is_expected.to allow_value('http://example.com/{one}/{two}').for(:url) }
+ it { is_expected.to allow_value('http://example.com/{one}').for(:url) }
+ it { is_expected.to allow_value('http://example.com/{two}').for(:url) }
+ it { is_expected.to allow_value('http://user:s3cret@example.com/{two}').for(:url) }
+ it { is_expected.to allow_value('http://{one}:{two}@example.com').for(:url) }
+
+ it { is_expected.not_to allow_value('http://example.com/{one}/{two}/{three}').for(:url) }
+ it { is_expected.not_to allow_value('http://example.com/{foo}').for(:url) }
+ it { is_expected.not_to allow_value('http:{user}:{pwd}//example.com/{foo}').for(:url) }
+
+ it 'mentions all missing variable names' do
+ hook.url = 'http://example.com/{one}/{foo}/{two}/{three}'
+
+ expect(hook).to be_invalid
+ expect(hook.errors[:url].to_sentence).to eq "Invalid URL template. Missing keys: [\"foo\", \"three\"]"
+ end
+ end
end
describe 'token' do
@@ -161,8 +187,8 @@ RSpec.describe WebHook do
end
end
- describe '.executable' do
- let(:not_executable) do
+ describe '.executable/.disabled' do
+ let!(:not_executable) do
[
[0, Time.current],
[0, 1.minute.from_now],
@@ -176,7 +202,7 @@ RSpec.describe WebHook do
end
end
- let(:executables) do
+ let!(:executables) do
[
[0, nil],
[0, 1.day.ago],
@@ -191,6 +217,7 @@ RSpec.describe WebHook do
it 'finds the correct set of project hooks' do
expect(described_class.where(project_id: project.id).executable).to match_array executables
+ expect(described_class.where(project_id: project.id).disabled).to match_array not_executable
end
context 'when the feature flag is not enabled' do
@@ -198,7 +225,7 @@ RSpec.describe WebHook do
stub_feature_flags(web_hooks_disable_failed: false)
end
- it 'is the same as all' do
+ specify 'enabled is the same as all' do
expect(described_class.where(project_id: project.id).executable).to match_array(executables + not_executable)
end
end
@@ -559,4 +586,60 @@ RSpec.describe WebHook do
expect(hook.to_json(unsafe_serialization_hash: true)).not_to include('encrypted_url_variables')
end
end
+
+ describe '#interpolated_url' do
+ subject(:hook) { build(:project_hook, project: project) }
+
+ context 'when the hook URL does not contain variables' do
+ before do
+ hook.url = 'http://example.com'
+ end
+
+ it { is_expected.to have_attributes(interpolated_url: hook.url) }
+ end
+
+ it 'is not vulnerable to malicious input' do
+ hook.url = 'something%{%<foo>2147483628G}'
+ hook.url_variables = { 'foo' => '1234567890.12345678' }
+
+ expect(hook).to have_attributes(interpolated_url: hook.url)
+ end
+
+ context 'when the hook URL contains variables' do
+ before do
+ hook.url = 'http://example.com/{path}/resource?token={token}'
+ hook.url_variables = { 'path' => 'abc', 'token' => 'xyz' }
+ end
+
+ it { is_expected.to have_attributes(interpolated_url: 'http://example.com/abc/resource?token=xyz') }
+
+ context 'when a variable is missing' do
+ before do
+ hook.url_variables = { 'path' => 'present' }
+ end
+
+ it 'raises an error' do
+ # We expect validations to prevent this entirely - this is not user-error
+ expect { hook.interpolated_url }
+ .to raise_error(described_class::InterpolationError, include('Missing key token'))
+ end
+ end
+
+ context 'when the URL appears to include percent formatting' do
+ before do
+ hook.url = 'http://example.com/%{path}/resource?token=%{token}'
+ end
+
+ it 'succeeds, interpolates correctly' do
+ expect(hook.interpolated_url).to eq 'http://example.com/%abc/resource?token=%xyz'
+ end
+ end
+ end
+ end
+
+ describe '#update_last_failure' do
+ it 'is a method of this class' do
+ expect { described_class.new.update_last_failure }.not_to raise_error
+ end
+ end
end
diff --git a/spec/models/incident_management/issuable_escalation_status_spec.rb b/spec/models/incident_management/issuable_escalation_status_spec.rb
index f956be3a04e..39d1fb325f5 100644
--- a/spec/models/incident_management/issuable_escalation_status_spec.rb
+++ b/spec/models/incident_management/issuable_escalation_status_spec.rb
@@ -11,7 +11,9 @@ RSpec.describe IncidentManagement::IssuableEscalationStatus do
describe 'associations' do
it { is_expected.to belong_to(:issue) }
- it { is_expected.to have_one(:project).through(:issue) }
+ it do
+ is_expected.to have_one(:project).through(:issue).inverse_of(:incident_management_issuable_escalation_statuses)
+ end
end
describe 'validatons' do
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 038018fbd0c..86074765c7b 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -799,7 +799,7 @@ RSpec.describe Integration do
shared_examples '#api_field_names' do
it 'filters out secret fields' do
- safe_fields = %w[some_safe_field safe_field url trojan_gift]
+ safe_fields = %w[some_safe_field safe_field url trojan_gift api_only_field]
expect(fake_integration.new).to have_attributes(
api_field_names: match_array(safe_fields)
@@ -807,6 +807,12 @@ RSpec.describe Integration do
end
end
+ shared_examples '#form_fields' do
+ it 'filters out API only fields' do
+ expect(fake_integration.new.form_fields.pluck(:name)).not_to include('api_only_field')
+ end
+ end
+
context 'when the class overrides #fields' do
let(:fake_integration) do
Class.new(Integration) do
@@ -824,7 +830,8 @@ RSpec.describe Integration do
{ name: 'safe_field' },
{ name: 'url' },
{ name: 'trojan_horse', type: 'password' },
- { name: 'trojan_gift', type: 'gift' }
+ { name: 'trojan_gift', type: 'text' },
+ { name: 'api_only_field', api_only: true }
].shuffle
end
end
@@ -832,6 +839,7 @@ RSpec.describe Integration do
it_behaves_like '#fields'
it_behaves_like '#api_field_names'
+ it_behaves_like '#form_fields'
end
context 'when the class uses the field DSL' do
@@ -849,12 +857,14 @@ RSpec.describe Integration do
field :safe_field
field :url
field :trojan_horse, type: 'password'
- field :trojan_gift, type: 'gift'
+ field :trojan_gift, type: 'text'
+ field :api_only_field, api_only: true
end
end
it_behaves_like '#fields'
it_behaves_like '#api_field_names'
+ it_behaves_like '#form_fields'
end
end
@@ -1051,11 +1061,9 @@ RSpec.describe Integration do
field :bar, type: 'password'
field :password
- field :with_help,
- help: -> { 'help' }
-
- field :a_number,
- type: 'number'
+ field :with_help, help: -> { 'help' }
+ field :select, type: 'select'
+ field :boolean, type: 'checkbox'
end
end
@@ -1084,6 +1092,16 @@ RSpec.describe Integration do
expect(integration).to be_foo_p_changed
end
+ it 'provides boolean accessors for checkbox fields' do
+ expect(integration).to respond_to(:boolean)
+ expect(integration).to respond_to(:boolean?)
+
+ expect(integration).not_to respond_to(:foo?)
+ expect(integration).not_to respond_to(:bar?)
+ expect(integration).not_to respond_to(:password?)
+ expect(integration).not_to respond_to(:select?)
+ end
+
it 'provides data fields' do
integration.foo_dt = 3
expect(integration.foo_dt).to eq 3
@@ -1093,21 +1111,24 @@ RSpec.describe Integration do
it 'registers fields in the fields list' do
expect(integration.fields.pluck(:name)).to match_array %w[
- foo foo_p foo_dt bar password with_help a_number
+ foo foo_p foo_dt bar password with_help select boolean
]
expect(integration.api_field_names).to match_array %w[
- foo foo_p foo_dt with_help a_number
+ foo foo_p foo_dt with_help select boolean
]
end
specify 'fields have expected attributes' do
expect(integration.fields).to include(
have_attributes(name: 'foo', type: 'text'),
+ have_attributes(name: 'foo_p', type: 'text'),
+ have_attributes(name: 'foo_dt', type: 'text'),
have_attributes(name: 'bar', type: 'password'),
have_attributes(name: 'password', type: 'password'),
- have_attributes(name: 'a_number', type: 'number'),
- have_attributes(name: 'with_help', help: 'help')
+ have_attributes(name: 'with_help', help: 'help'),
+ have_attributes(name: 'select', type: 'select'),
+ have_attributes(name: 'boolean', type: 'checkbox')
)
end
end
@@ -1115,11 +1136,12 @@ RSpec.describe Integration do
describe 'boolean_accessor' do
let(:klass) do
Class.new(Integration) do
+ prop_accessor :test_value
boolean_accessor :test_value
end
end
- let(:integration) { klass.new(properties: { test_value: input }) }
+ let(:integration) { klass.new(test_value: input) }
where(:input, :method_result, :predicate_method_result) do
true | true | true
@@ -1149,6 +1171,35 @@ RSpec.describe Integration do
test_value: be(method_result),
test_value?: be(predicate_method_result)
)
+
+ # Make sure the original value is stored correctly
+ expect(integration.send(:test_value_before_type_cast)).to eq(input)
+ expect(integration.properties).to include('test_value' => input)
+ end
+
+ context 'when using data fields' do
+ let(:klass) do
+ Class.new(Integration) do
+ field :project_url, storage: :data_fields, type: 'checkbox'
+
+ def data_fields
+ issue_tracker_data || self.build_issue_tracker_data
+ end
+ end
+ end
+
+ let(:integration) { klass.new(project_url: input) }
+
+ it 'has the correct value' do
+ expect(integration).to have_attributes(
+ project_url: be(method_result),
+ project_url?: be(predicate_method_result)
+ )
+
+ # Make sure the original value is stored correctly
+ expect(integration.send(:project_url_before_type_cast)).to eq(input == false ? 'false' : input)
+ expect(integration.properties).not_to include('project_url')
+ end
end
end
@@ -1160,6 +1211,24 @@ RSpec.describe Integration do
test_value?: be(false)
)
end
+
+ context 'when getter is not defined' do
+ let(:input) { true }
+ let(:klass) do
+ Class.new(Integration) do
+ boolean_accessor :test_value
+ end
+ end
+
+ it 'defines a prop_accessor' do
+ expect(integration).to have_attributes(
+ test_value: true,
+ test_value?: true
+ )
+
+ expect(integration.properties['test_value']).to be(true)
+ end
+ end
end
describe '#attributes' do
@@ -1218,7 +1287,6 @@ RSpec.describe Integration do
it 'queues a Integrations::ExecuteWorker' do
expect(Integrations::ExecuteWorker).to receive(:perform_async).with(integration.id, data)
- expect(ProjectServiceWorker).not_to receive(:perform_async)
async_execute
end
@@ -1232,18 +1300,5 @@ RSpec.describe Integration do
async_execute
end
end
-
- context 'when the FF :rename_integration_workers is disabled' do
- before do
- stub_feature_flags(rename_integrations_workers: false)
- end
-
- it 'queues a ProjectServiceWorker' do
- expect(ProjectServiceWorker).to receive(:perform_async).with(integration.id, data)
- expect(Integrations::ExecuteWorker).not_to receive(:perform_async)
-
- async_execute
- end
- end
end
end
diff --git a/spec/models/integrations/base_chat_notification_spec.rb b/spec/models/integrations/base_chat_notification_spec.rb
index 672d8de1e14..eb503e501d6 100644
--- a/spec/models/integrations/base_chat_notification_spec.rb
+++ b/spec/models/integrations/base_chat_notification_spec.rb
@@ -285,4 +285,22 @@ RSpec.describe Integrations::BaseChatNotification do
expect { subject.webhook_placeholder }.to raise_error(NotImplementedError)
end
end
+
+ describe '#event_channel_name' do
+ it 'returns the channel field name for the given event' do
+ expect(subject.event_channel_name(:event)).to eq('event_channel')
+ end
+ end
+
+ describe '#event_channel_value' do
+ it 'returns the channel field value for the given event' do
+ subject.push_channel = '#pushes'
+
+ expect(subject.event_channel_value(:push)).to eq('#pushes')
+ end
+
+ it 'raises an error for unsupported events' do
+ expect { subject.event_channel_value(:foo) }.to raise_error(NoMethodError)
+ end
+ end
end
diff --git a/spec/models/integrations/chat_message/deployment_message_spec.rb b/spec/models/integrations/chat_message/deployment_message_spec.rb
index 6bcd29c0a00..8da27ef5aa0 100644
--- a/spec/models/integrations/chat_message/deployment_message_spec.rb
+++ b/spec/models/integrations/chat_message/deployment_message_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Integrations::ChatMessage::DeploymentMessage do
let_it_be(:deployment) { create(:deployment, status: :success, deployable: ci_build, environment: environment, project: project, user: user, sha: commit.sha) }
let(:args) do
- Gitlab::DataBuilder::Deployment.build(deployment, Time.current)
+ Gitlab::DataBuilder::Deployment.build(deployment, 'success', Time.current)
end
it_behaves_like Integrations::ChatMessage
diff --git a/spec/models/integrations/datadog_spec.rb b/spec/models/integrations/datadog_spec.rb
index cfc44b22a84..47f916e8457 100644
--- a/spec/models/integrations/datadog_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -240,4 +240,20 @@ RSpec.describe Integrations::Datadog do
end
end
end
+
+ describe '#fields' do
+ it 'includes the archive_trace_events field' do
+ expect(instance.fields).to include(have_attributes(name: 'archive_trace_events'))
+ end
+
+ context 'when the FF :datadog_integration_logs_collection is disabled' do
+ before do
+ stub_feature_flags(datadog_integration_logs_collection: false)
+ end
+
+ it 'does not include the archive_trace_events field' do
+ expect(instance.fields).not_to include(have_attributes(name: 'archive_trace_events'))
+ end
+ end
+ end
end
diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb
index 6b1ce7fcbde..642fb1fbf7f 100644
--- a/spec/models/integrations/field_spec.rb
+++ b/spec/models/integrations/field_spec.rb
@@ -14,6 +14,37 @@ RSpec.describe ::Integrations::Field do
end
end
+ describe '#initialize' do
+ it 'sets type password for secret names' do
+ attrs[:name] = 'token'
+ attrs[:type] = 'text'
+
+ expect(field[:type]).to eq('password')
+ end
+
+ it 'uses the given type for other names' do
+ attrs[:name] = 'field'
+ attrs[:type] = 'select'
+
+ expect(field[:type]).to eq('select')
+ end
+
+ it 'raises an error if an invalid attribute is given' do
+ attrs[:foo] = 'foo'
+ attrs[:bar] = 'bar'
+ attrs[:name] = 'name'
+ attrs[:type] = 'text'
+
+ expect { field }.to raise_error(ArgumentError, "Invalid attributes [:foo, :bar]")
+ end
+
+ it 'raises an error if an invalid type is given' do
+ attrs[:type] = 'other'
+
+ expect { field }.to raise_error(ArgumentError, 'Invalid type "other"')
+ end
+ end
+
describe '#name' do
before do
attrs[:name] = :foo
@@ -59,7 +90,7 @@ RSpec.describe ::Integrations::Field do
it 'has the correct default' do
expect(field[name]).to have_correct_default
- expect(field.send(name)).to have_correct_default
+ expect(field.public_send(name)).to have_correct_default
end
end
@@ -69,32 +100,66 @@ RSpec.describe ::Integrations::Field do
end
it 'is known' do
+ next if name == :type
+
expect(field[name]).to eq(:known)
- expect(field.send(name)).to eq(:known)
+ expect(field.public_send(name)).to eq(:known)
end
end
context 'when set to a dynamic value' do
it 'is computed' do
+ next if name == :type
+
attrs[name] = -> { Time.current }
start = Time.current
travel_to(start + 1.minute) do
expect(field[name]).to be_after(start)
- expect(field.send(name)).to be_after(start)
+ expect(field.public_send(name)).to be_after(start)
end
end
it 'is executed in the class scope' do
+ next if name == :type
+
attrs[name] = -> { default_placeholder }
expect(field[name]).to eq('my placeholder')
- expect(field.send(name)).to eq('my placeholder')
+ expect(field.public_send(name)).to eq('my placeholder')
end
end
end
end
+ described_class::BOOLEAN_ATTRIBUTES.each do |name|
+ describe "##{name}?" do
+ it 'returns true if the value is truthy' do
+ attrs[name] = ''
+ expect(field.public_send("#{name}?")).to be(true)
+ end
+
+ it 'returns false if the value is falsey' do
+ attrs[name] = nil
+ expect(field.public_send("#{name}?")).to be(false)
+ end
+ end
+ end
+
+ described_class::TYPES.each do |type|
+ describe "##{type}?" do
+ it 'returns true if the type matches' do
+ attrs[:type] = type
+ expect(field.public_send("#{type}?")).to be(true)
+ end
+
+ it 'returns false if the type does not match' do
+ attrs[:type] = (described_class::TYPES - [type]).first
+ expect(field.public_send("#{type}?")).to be(false)
+ end
+ end
+ end
+
describe '#secret?' do
context 'when empty' do
it { is_expected.not_to be_secret }
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
index 9e3d4b524a6..5d8597969a1 100644
--- a/spec/models/integrations/harbor_spec.rb
+++ b/spec/models/integrations/harbor_spec.rb
@@ -19,6 +19,14 @@ RSpec.describe Integrations::Harbor do
it { is_expected.to allow_value('helloworld').for(:password) }
end
+ describe 'url' do
+ subject { build(:harbor_integration) }
+
+ it { is_expected.not_to allow_value('https://192.168.1.1').for(:url) }
+ it { is_expected.not_to allow_value('https://127.0.0.1').for(:url) }
+ it { is_expected.to allow_value('https://demo.goharbor.io').for(:url)}
+ end
+
describe '#fields' do
it 'returns custom fields' do
expect(harbor_integration.fields.pluck(:name)).to eq(%w[url project_name username password])
diff --git a/spec/models/integrations/irker_spec.rb b/spec/models/integrations/irker_spec.rb
index 16487aa36e7..e98b8b54e03 100644
--- a/spec/models/integrations/irker_spec.rb
+++ b/spec/models/integrations/irker_spec.rb
@@ -76,19 +76,5 @@ RSpec.describe Integrations::Irker do
ensure
conn.close if conn
end
-
- context 'when the FF :rename_integrations_workers is disabled' do
- before do
- stub_feature_flags(rename_integrations_workers: false)
- end
-
- it 'queues a IrkerWorker' do
- expect(::IrkerWorker).to receive(:perform_async)
- .with(project.id, irker.channels, colorize_messages, sample_data, irker.settings)
- expect(Integrations::IrkerWorker).not_to receive(:perform_async)
-
- irker.execute(sample_data)
- end
- end
end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 28d97b74adb..2a994540bd3 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe Integrations::Jira do
subject(:fields) { integration.fields }
it 'returns custom fields' do
- expect(fields.pluck(:name)).to eq(%w[url api_url username password])
+ expect(fields.pluck(:name)).to eq(%w[url api_url username password jira_issue_transition_id])
end
end
diff --git a/spec/models/integrations/prometheus_spec.rb b/spec/models/integrations/prometheus_spec.rb
index fbeaebfd807..ae965ed78d1 100644
--- a/spec/models/integrations/prometheus_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -475,47 +475,4 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
end
end
end
-
- describe '#fields' do
- let(:expected_fields) do
- [
- {
- type: 'checkbox',
- name: 'manual_configuration',
- title: s_('PrometheusService|Active'),
- help: s_('PrometheusService|Select this checkbox to override the auto configuration settings with your own settings.'),
- required: true
- },
- {
- type: 'text',
- name: 'api_url',
- title: 'API URL',
- placeholder: s_('PrometheusService|https://prometheus.example.com/'),
- help: s_('PrometheusService|The Prometheus API base URL.'),
- required: true
- },
- {
- type: 'text',
- name: 'google_iap_audience_client_id',
- title: 'Google IAP Audience Client ID',
- placeholder: s_('PrometheusService|IAP_CLIENT_ID.apps.googleusercontent.com'),
- help: s_('PrometheusService|The ID of the IAP-secured resource.'),
- autocomplete: 'off',
- required: false
- },
- {
- type: 'textarea',
- name: 'google_iap_service_account_json',
- title: 'Google IAP Service Account JSON',
- placeholder: s_('PrometheusService|{ "type": "service_account", "project_id": ... }'),
- help: s_('PrometheusService|The contents of the credentials.json file of your service account.'),
- required: false
- }
- ]
- end
-
- it 'returns fields' do
- expect(integration.fields).to eq(expected_fields)
- end
- end
end
diff --git a/spec/models/integrations/slack_spec.rb b/spec/models/integrations/slack_spec.rb
index 3997d69f947..5801a4c3749 100644
--- a/spec/models/integrations/slack_spec.rb
+++ b/spec/models/integrations/slack_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Integrations::Slack do
context 'deployment notification' do
let_it_be(:deployment) { create(:deployment, user: user) }
- let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, Time.current) }
+ let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, deployment.status, Time.current) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_deployment_notification'
end
diff --git a/spec/models/integrations/youtrack_spec.rb b/spec/models/integrations/youtrack_spec.rb
index f6a9dd8ef37..618ebcbb76a 100644
--- a/spec/models/integrations/youtrack_spec.rb
+++ b/spec/models/integrations/youtrack_spec.rb
@@ -37,4 +37,10 @@ RSpec.describe Integrations::Youtrack do
expect(described_class.reference_pattern.match('yt-123')[:issue]).to eq('yt-123')
end
end
+
+ describe '#fields' do
+ it 'only returns the project_url and issues_url fields' do
+ expect(subject.fields.pluck(:name)).to eq(%w[project_url issues_url])
+ end
+ end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index d45a23a7ef8..89c440dc49c 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe Issue do
it { is_expected.to belong_to(:milestone) }
it { is_expected.to belong_to(:iteration) }
it { is_expected.to belong_to(:project) }
- it { is_expected.to have_one(:namespace).through(:project) }
it { is_expected.to belong_to(:work_item_type).class_name('WorkItems::Type') }
it { is_expected.to belong_to(:moved_to).class_name('Issue') }
it { is_expected.to have_one(:moved_from).class_name('Issue') }
@@ -132,6 +131,37 @@ RSpec.describe Issue do
create(:issue)
end
end
+
+ context 'issue namespace' do
+ let(:issue) { build(:issue, project: reusable_project) }
+
+ it 'sets the namespace_id' do
+ expect(issue).to be_valid
+ expect(issue.namespace).to eq(reusable_project.project_namespace)
+ end
+
+ context 'when issue is created' do
+ it 'sets the namespace_id' do
+ issue.save!
+
+ expect(issue.reload.namespace).to eq(reusable_project.project_namespace)
+ end
+ end
+
+ context 'when existing issue is saved' do
+ let(:issue) { create(:issue) }
+
+ before do
+ issue.update!(namespace_id: nil)
+ end
+
+ it 'sets the namespace id' do
+ issue.update!(title: "#{issue.title} and something extra")
+
+ expect(issue.namespace).to eq(issue.project.project_namespace)
+ end
+ end
+ end
end
context 'order by upvotes' do
@@ -651,28 +681,6 @@ RSpec.describe Issue do
end
end
- describe '#has_related_branch?' do
- let(:issue) { create(:issue, project: reusable_project, title: "Blue Bell Knoll") }
-
- subject { issue.has_related_branch? }
-
- context 'branch found' do
- before do
- allow(issue.project.repository).to receive(:branch_names).and_return(["iceblink-luck", issue.to_branch_name])
- end
-
- it { is_expected.to eq true }
- end
-
- context 'branch not found' do
- before do
- allow(issue.project.repository).to receive(:branch_names).and_return(["lazy-calm"])
- end
-
- it { is_expected.to eq false }
- end
- end
-
it_behaves_like 'an editable mentionable' do
subject { create(:issue, project: create(:project, :repository)) }
@@ -744,25 +752,11 @@ RSpec.describe Issue do
end
describe '#participants' do
- context 'using a public project' do
- let_it_be(:public_project) { create(:project, :public) }
- let_it_be(:issue) { create(:issue, project: public_project) }
+ it_behaves_like 'issuable participants' do
+ let_it_be(:issuable_parent) { create(:project, :public) }
+ let_it_be_with_refind(:issuable) { create(:issue, project: issuable_parent) }
- let!(:note1) do
- create(:note_on_issue, noteable: issue, project: public_project, note: 'a')
- end
-
- let!(:note2) do
- create(:note_on_issue, noteable: issue, project: public_project, note: 'b')
- end
-
- it 'includes the issue author' do
- expect(issue.participants).to include(issue.author)
- end
-
- it 'includes the authors of the notes' do
- expect(issue.participants).to include(note1.author, note2.author)
- end
+ let(:params) { { noteable: issuable, project: issuable_parent } }
end
context 'using a private project' do
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index a9d1a8a5ef2..b98c0e8eae0 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -47,10 +47,9 @@ RSpec.describe Key, :mailer do
end
describe 'validation of banned keys' do
- let_it_be(:user) { create(:user) }
-
let(:key) { build(:key) }
- let(:banned_keys) do
+
+ where(:key_content) do
[
'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2y' \
'x5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW' \
@@ -131,68 +130,13 @@ RSpec.describe Key, :mailer do
]
end
- context 'when ssh_banned_key feature flag is enabled with a user' do
- before do
- stub_feature_flags(ssh_banned_key: user)
- end
-
- where(:key_content) { banned_keys }
-
- with_them do
- it 'does not allow banned keys' do
- key.key = key_content
- key.user = user
-
- expect(key).to be_invalid
- expect(key.errors[:key]).to include(
- _('cannot be used because it belongs to a compromised private key. Stop using this key and generate a new one.'))
- end
-
- it 'allows when the user is a ghost user' do
- key.key = key_content
- key.user = User.ghost
-
- expect(key).to be_valid
- end
-
- it 'allows when the user is nil' do
- key.key = key_content
- key.user = nil
-
- expect(key).to be_valid
- end
- end
-
- it 'allows other keys' do
- key.user = user
-
- expect(key).to be_valid
- end
-
- it 'allows other users' do
- key.user = User.ghost
-
- expect(key).to be_valid
- end
- end
-
- context 'when ssh_banned_key feature flag is disabled' do
- before do
- stub_feature_flags(ssh_banned_key: false)
- end
-
- where(:key_content) { banned_keys }
+ with_them do
+ it 'does not allow banned keys' do
+ key.key = key_content
- with_them do
- it 'allows banned keys' do
- key.key = key_content
-
- expect(key).to be_valid
- end
- end
-
- it 'allows other keys' do
- expect(key).to be_valid
+ expect(key).to be_invalid
+ expect(key.errors[:key]).to include(
+ _('cannot be used because it belongs to a compromised private key. Stop using this key and generate a new one.'))
end
end
end
@@ -296,6 +240,39 @@ RSpec.describe Key, :mailer do
end
end
+ describe '#ensure_sha256_fingerprint!' do
+ let_it_be_with_reload(:user_key) { create(:personal_key) }
+
+ context 'with a valid SHA256 fingerprint' do
+ it 'does nothing' do
+ expect(user_key).not_to receive(:generate_fingerprint)
+
+ user_key.ensure_sha256_fingerprint!
+ end
+ end
+
+ context 'with a missing SHA256 fingerprint' do
+ before do
+ user_key.update_column(:fingerprint_sha256, nil)
+ user_key.ensure_sha256_fingerprint!
+ end
+
+ it 'fingerprints are present' do
+ expect(user_key.reload.fingerprint_sha256).to be_present
+ end
+ end
+
+ context 'with an invalid public key' do
+ before do
+ user_key.update_column(:key, 'a')
+ end
+
+ it 'does not throw an exception' do
+ expect { user_key.ensure_sha256_fingerprint! }.not_to raise_error
+ end
+ end
+ end
+
context 'fingerprint generation' do
it 'generates both md5 and sha256 fingerprints' do
key = build(:rsa_key_4096)
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index f93c2d36966..94032146f51 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -219,7 +219,7 @@ RSpec.describe GroupMember do
end
context 'on create' do
- let(:action) { group.add_user(user, Gitlab::Access::GUEST) }
+ let(:action) { group.add_member(user, Gitlab::Access::GUEST) }
let(:blocking) { true }
it 'changes access level', :sidekiq_inline do
@@ -241,7 +241,7 @@ RSpec.describe GroupMember do
context 'on update' do
before do
- group.add_user(user, Gitlab::Access::GUEST)
+ group.add_member(user, Gitlab::Access::GUEST)
end
let(:action) { group.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
@@ -266,7 +266,7 @@ RSpec.describe GroupMember do
context 'on destroy' do
before do
- group.add_user(user, Gitlab::Access::GUEST)
+ group.add_member(user, Gitlab::Access::GUEST)
end
let(:action) { group.members.find_by(user: user).destroy! }
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 8c989f5aaca..39d9d25a98c 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -111,12 +111,12 @@ RSpec.describe ProjectMember do
end
end
- describe '.add_users_to_projects' do
+ describe '.add_members_to_projects' do
it 'adds the given users to the given projects' do
projects = create_list(:project, 2)
users = create_list(:user, 2)
- described_class.add_users_to_projects(
+ described_class.add_members_to_projects(
[projects.first.id, projects.second.id],
[users.first.id, users.second],
described_class::MAINTAINER)
@@ -174,8 +174,8 @@ RSpec.describe ProjectMember do
expect { project.destroy! }.to change { user.can?(:guest_access, project) }.from(true).to(false)
end
- it 'refreshes the authorization without calling AuthorizedProjectUpdate::ProjectRecalculatePerUserService' do
- expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserService).not_to receive(:new)
+ it 'refreshes the authorization without calling AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).not_to receive(:bulk_perform_and_wait)
project.destroy!
end
@@ -199,7 +199,7 @@ RSpec.describe ProjectMember do
context 'when importing' do
it 'does not refresh' do
- expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserService).not_to receive(:new)
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).not_to receive(:bulk_perform_and_wait)
member = build(:project_member)
member.importing = true
@@ -212,11 +212,11 @@ RSpec.describe ProjectMember do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- shared_examples_for 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations' do
- it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService' do
- expect_next_instance_of(AuthorizedProjectUpdate::ProjectRecalculatePerUserService, project, user) do |service|
- expect(service).to receive(:execute)
- end
+ shared_examples_for 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker inline to recalculate authorizations' do
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).to receive(:bulk_perform_and_wait).with(
+ [[project.id, user.id]]
+ )
action
end
@@ -236,13 +236,13 @@ RSpec.describe ProjectMember do
end
context 'on create' do
- let(:action) { project.add_user(user, Gitlab::Access::GUEST) }
+ let(:action) { project.add_member(user, Gitlab::Access::GUEST) }
it 'changes access level' do
expect { action }.to change { user.can?(:guest_access, project) }.from(false).to(true)
end
- it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
+ it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker inline to recalculate authorizations'
it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
end
@@ -250,14 +250,14 @@ RSpec.describe ProjectMember do
let(:action) { project.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
before do
- project.add_user(user, Gitlab::Access::GUEST)
+ project.add_member(user, Gitlab::Access::GUEST)
end
it 'changes access level' do
expect { action }.to change { user.can?(:developer_access, project) }.from(false).to(true)
end
- it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
+ it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker inline to recalculate authorizations'
it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
end
@@ -265,7 +265,7 @@ RSpec.describe ProjectMember do
let(:action) { project.members.find_by(user: user).destroy! }
before do
- project.add_user(user, Gitlab::Access::GUEST)
+ project.add_member(user, Gitlab::Access::GUEST)
end
it 'changes access level', :sidekiq_inline do
diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb
index c9bcb900eca..7dc550a6c93 100644
--- a/spec/models/merge_request_diff_file_spec.rb
+++ b/spec/models/merge_request_diff_file_spec.rb
@@ -13,50 +13,53 @@ RSpec.describe MergeRequestDiffFile do
let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
end
+ let(:unpacked) { 'unpacked' }
+ let(:packed) { [unpacked].pack('m0') }
+ let(:file) { create(:merge_request).merge_request_diff.merge_request_diff_files.first }
+
describe '#diff' do
+ let(:file) { build(:merge_request_diff_file) }
+
context 'when diff is not stored' do
let(:unpacked) { 'unpacked' }
let(:packed) { [unpacked].pack('m0') }
before do
- subject.diff = packed
+ file.diff = packed
end
context 'when the diff is marked as binary' do
before do
- subject.binary = true
+ file.binary = true
end
it 'unpacks from base 64' do
- expect(subject.diff).to eq(unpacked)
+ expect(file.diff).to eq(unpacked)
end
context 'invalid base64' do
let(:packed) { '---/dev/null' }
it 'returns the raw diff' do
- expect(subject.diff).to eq(packed)
+ expect(file.diff).to eq(packed)
end
end
end
context 'when the diff is not marked as binary' do
it 'returns the raw diff' do
- expect(subject.diff).to eq(packed)
+ expect(file.diff).to eq(packed)
end
end
end
context 'when diff is stored in DB' do
- let(:file) { create(:merge_request).merge_request_diff.merge_request_diff_files.first }
-
it 'returns UTF-8 string' do
expect(file.diff.encoding).to eq Encoding::UTF_8
end
end
context 'when diff is stored in external storage' do
- let(:file) { create(:merge_request).merge_request_diff.merge_request_diff_files.first }
let(:test_dir) { 'tmp/tests/external-diffs' }
around do |example|
@@ -81,17 +84,141 @@ RSpec.describe MergeRequestDiffFile do
describe '#utf8_diff' do
it 'does not raise error when the diff is binary' do
- subject.diff = "\x05\x00\x68\x65\x6c\x6c\x6f"
+ file = build(:merge_request_diff_file)
+ file.diff = "\x05\x00\x68\x65\x6c\x6c\x6f"
- expect { subject.utf8_diff }.not_to raise_error
+ expect { file.utf8_diff }.not_to raise_error
end
it 'calls #diff once' do
- allow(subject).to receive(:diff).and_return('test')
+ allow(file).to receive(:diff).and_return('test')
+
+ expect(file).to receive(:diff).once
+
+ file.utf8_diff
+ end
+
+ context 'externally stored diff caching' do
+ let(:test_dir) { 'tmp/tests/external-diffs' }
+
+ around do |example|
+ FileUtils.mkdir_p(test_dir)
+
+ begin
+ example.run
+ ensure
+ FileUtils.rm_rf(test_dir)
+ end
+ end
+
+ before do
+ stub_external_diffs_setting(enabled: true, storage_path: test_dir)
+ end
+
+ context 'when external diff is not cached' do
+ it 'caches external diffs' do
+ expect(file.merge_request_diff).to receive(:cache_external_diff).and_call_original
+
+ expect(file.utf8_diff).to eq(file.diff)
+ end
+ end
+
+ context 'when external diff is already cached' do
+ it 'reads diff from cached external diff' do
+ file_stub = double
+
+ allow(file.merge_request_diff).to receive(:cached_external_diff).and_yield(file_stub)
+ expect(file_stub).to receive(:seek).with(file.external_diff_offset)
+ expect(file_stub).to receive(:read).with(file.external_diff_size)
+
+ file.utf8_diff
+ end
+ end
+
+ context 'when the diff is marked as binary' do
+ let(:file) { build(:merge_request_diff_file) }
- expect(subject).to receive(:diff).once
+ before do
+ allow(file.merge_request_diff).to receive(:stored_externally?).and_return(true)
+ allow(file.merge_request_diff).to receive(:cached_external_diff).and_return(packed)
+ end
- subject.utf8_diff
+ context 'when the diff is marked as binary' do
+ before do
+ file.binary = true
+ end
+
+ it 'unpacks from base 64' do
+ expect(file.utf8_diff).to eq(unpacked)
+ end
+
+ context 'invalid base64' do
+ let(:packed) { '---/dev/null' }
+
+ it 'returns the raw diff' do
+ expect(file.utf8_diff).to eq(packed)
+ end
+ end
+ end
+
+ context 'when the diff is not marked as binary' do
+ it 'returns the raw diff' do
+ expect(file.utf8_diff).to eq(packed)
+ end
+ end
+ end
+
+ context 'when content responds to #encoding' do
+ it 'encodes content to utf8 encoding' do
+ expect(file.utf8_diff.encoding).to eq(Encoding::UTF_8)
+ end
+ end
+
+ context 'when content is blank' do
+ it 'returns an empty string' do
+ allow(file.merge_request_diff).to receive(:cached_external_diff).and_return(nil)
+
+ expect(file.utf8_diff).to eq('')
+ end
+ end
+
+ context 'when exception is raised' do
+ it 'falls back to #diff' do
+ allow(file).to receive(:binary?).and_raise(StandardError, 'Error!')
+ expect(file).to receive(:diff)
+ expect(Gitlab::AppLogger)
+ .to receive(:warn)
+ .with(
+ a_hash_including(
+ :message => 'Cached external diff export failed',
+ :merge_request_diff_file_id => file.id,
+ :merge_request_diff_id => file.merge_request_diff.id,
+ 'exception.class' => 'StandardError',
+ 'exception.message' => 'Error!'
+ )
+ )
+
+ file.utf8_diff
+ end
+ end
+ end
+
+ context 'when externally_stored_diffs_caching_export feature flag is disabled' do
+ it 'calls #diff' do
+ stub_feature_flags(externally_stored_diffs_caching_export: false)
+
+ expect(file).to receive(:diff)
+
+ file.utf8_diff
+ end
+ end
+
+ context 'when diff is not stored externally' do
+ it 'calls #diff' do
+ expect(file).to receive(:diff)
+
+ file.utf8_diff
+ end
end
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index afe7251f59a..007e84164a8 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -1120,4 +1120,101 @@ RSpec.describe MergeRequestDiff do
expect(described_class.latest_diff_for_merge_requests(nil)).to be_empty
end
end
+
+ context 'external diff caching' do
+ let(:test_dir) { 'tmp/tests/external-diffs' }
+ let(:cache_dir) { File.join(Dir.tmpdir, "project-#{diff.project.id}-external-mr-#{diff.merge_request_id}-diff-#{diff.id}-cache") }
+ let(:cache_filepath) { File.join(cache_dir, "diff-#{diff.id}") }
+ let(:external_diff_content) { diff.opening_external_diff { |diff| diff.read } }
+
+ around do |example|
+ FileUtils.mkdir_p(test_dir)
+
+ begin
+ example.run
+ ensure
+ FileUtils.rm_rf(test_dir)
+ end
+ end
+
+ before do
+ stub_external_diffs_setting(enabled: true, storage_path: test_dir)
+ end
+
+ subject(:diff) { diff_with_commits }
+
+ describe '#cached_external_diff' do
+ context 'when diff is externally stored' do
+ context 'when diff is already cached' do
+ it 'yields cached file' do
+ Dir.mkdir(cache_dir)
+ File.open(cache_filepath, 'wb') { |f| f.write(external_diff_content) }
+
+ expect(diff).not_to receive(:cache_external_diff)
+
+ expect { |b| diff.cached_external_diff(&b) }.to yield_with_args(File)
+ end
+ end
+
+ context 'when diff is not cached' do
+ it 'caches external diff in tmp storage' do
+ expect(diff).to receive(:cache_external_diff).and_call_original
+ expect(File.exist?(cache_filepath)).to eq(false)
+ expect { |b| diff.cached_external_diff(&b) }.to yield_with_args(File)
+ expect(File.exist?(cache_filepath)).to eq(true)
+ expect(File.read(cache_filepath)).to eq(external_diff_content)
+ end
+ end
+ end
+
+ context 'when diff is not externally stored' do
+ it 'yields nil' do
+ stub_external_diffs_setting(enabled: false)
+
+ expect { |b| diff.cached_external_diff(&b) }.to yield_with_args(nil)
+ end
+ end
+ end
+
+ describe '#remove_cached_external_diff' do
+ before do
+ diff.cached_external_diff { |diff| diff }
+ end
+
+ it 'removes external diff cache diff' do
+ expect(Dir.exist?(cache_dir)).to eq(true)
+
+ diff.remove_cached_external_diff
+
+ expect(Dir.exist?(cache_dir)).to eq(false)
+ end
+
+ context 'when path is traversed' do
+ it 'raises' do
+ allow(diff).to receive(:external_diff_cache_dir).and_return(File.join(cache_dir, '..'))
+
+ expect { diff.remove_cached_external_diff }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+
+ context 'when path is not allowed' do
+ it 'raises' do
+ allow(diff).to receive(:external_diff_cache_dir).and_return('/')
+
+ expect { diff.remove_cached_external_diff }.to raise_error(StandardError, 'path / is not allowed')
+ end
+ end
+
+ context 'when dir does not exist' do
+ it 'returns' do
+ FileUtils.rm_rf(cache_dir)
+
+ expect(Dir.exist?(cache_dir)).to eq(false)
+ expect(FileUtils).not_to receive(:rm_rf).with(cache_dir)
+
+ diff.remove_cached_external_diff
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 381eccf2376..c3e325c4e6c 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -658,7 +658,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
before do
- project.add_user(user, :developer)
+ project.add_member(user, :developer)
end
describe '.total_time_to_merge' do
@@ -4286,6 +4286,18 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe 'transition to closed' do
+ context 'with merge error' do
+ subject { create(:merge_request, merge_error: 'merge error') }
+
+ it 'clears merge error' do
+ subject.close!
+
+ expect(subject.reload.merge_error).to eq(nil)
+ end
+ end
+ end
+
describe 'transition to cannot_be_merged' do
let(:notification_service) { double(:notification_service) }
let(:todo_service) { double(:todo_service) }
@@ -4903,7 +4915,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
.to delegate_method(:builds_with_coverage)
.to(:head_pipeline)
.with_prefix
- .with_arguments(allow_nil: true)
+ .allow_nil
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 96e06e617d5..664cdb27290 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
it { is_expected.to have_one :cluster_enabled_grant }
+ it { is_expected.to have_many(:work_items) }
it do
is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)
@@ -337,16 +338,13 @@ RSpec.describe Namespace do
end
describe 'delegate' do
- it { is_expected.to delegate_method(:name).to(:owner).with_prefix.with_arguments(allow_nil: true) }
- it { is_expected.to delegate_method(:avatar_url).to(:owner).with_arguments(allow_nil: true) }
- it do
- is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy)
- .to(:namespace_settings).with_arguments(allow_nil: true)
- end
+ it { is_expected.to delegate_method(:name).to(:owner).with_prefix.allow_nil }
+ it { is_expected.to delegate_method(:avatar_url).to(:owner).allow_nil }
+ it { is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy).to(:namespace_settings).allow_nil }
it do
- is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=)
- .to(:namespace_settings).with_arguments(allow_nil: true)
+ is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=).to(:namespace_settings)
+ .with_arguments(:args).allow_nil
end
end
@@ -1886,17 +1884,39 @@ RSpec.describe Namespace do
end
end
+ describe '#emails_enabled?' do
+ it "is the opposite of emails_disabled" do
+ group = create(:group, emails_disabled: false)
+
+ expect(group.emails_enabled?).to be_truthy
+ end
+ end
+
describe '#pages_virtual_domain' do
let(:project) { create(:project, namespace: namespace) }
+ let(:virtual_domain) { namespace.pages_virtual_domain }
- it 'returns the virual domain' do
+ before do
project.mark_pages_as_deployed
project.update_pages_deployment!(create(:pages_deployment, project: project))
+ end
- virtual_domain = namespace.pages_virtual_domain
-
+ it 'returns the virual domain' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
+ expect(virtual_domain.cache_key).to eq("pages_domain_for_namespace_#{namespace.root_ancestor.id}")
+ end
+
+ context 'when :cache_pages_domain_api is disabled' do
+ before do
+ stub_feature_flags(cache_pages_domain_api: false)
+ end
+
+ it 'returns the virual domain' do
+ expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
+ expect(virtual_domain.lookup_paths).not_to be_empty
+ expect(virtual_domain.cache_key).to be_nil
+ end
end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 4b262c1f3a9..fc6f7832c2c 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -106,6 +106,22 @@ RSpec.describe Note do
end
end
+ describe 'created_at in the past' do
+ let_it_be(:noteable) { create(:issue) }
+
+ context 'when creating a note not too much in the past' do
+ subject { build(:note, project: noteable.project, noteable: noteable, created_at: '1990-05-06') }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when creating a note too much in the past' do
+ subject { build(:note, project: noteable.project, noteable: noteable, created_at: '1600-05-06') }
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+
describe 'confidentiality' do
context 'for existing public note' do
let_it_be(:existing_note) { create(:note) }
diff --git a/spec/models/oauth_access_token_spec.rb b/spec/models/oauth_access_token_spec.rb
index 65a7f6410cf..2b47da1ebe1 100644
--- a/spec/models/oauth_access_token_spec.rb
+++ b/spec/models/oauth_access_token_spec.rb
@@ -7,22 +7,40 @@ RSpec.describe OauthAccessToken do
let(:app_one) { create(:oauth_application) }
let(:app_two) { create(:oauth_application) }
let(:app_three) { create(:oauth_application) }
- let(:tokens) { described_class.all }
+ let(:token) { create(:oauth_access_token, application_id: app_one.id) }
- before do
- create(:oauth_access_token, application_id: app_one.id)
- create_list(:oauth_access_token, 2, resource_owner: user, application_id: app_two.id)
- end
+ describe 'scopes' do
+ describe '.distinct_resource_owner_counts' do
+ let(:tokens) { described_class.all }
+
+ before do
+ token
+ create_list(:oauth_access_token, 2, resource_owner: user, application_id: app_two.id)
+ end
+
+ it 'returns unique owners' do
+ expect(tokens.count).to eq(3)
+ expect(tokens.distinct_resource_owner_counts([app_one])).to eq({ app_one.id => 1 })
+ expect(tokens.distinct_resource_owner_counts([app_two])).to eq({ app_two.id => 1 })
+ expect(tokens.distinct_resource_owner_counts([app_three])).to eq({})
+ expect(tokens.distinct_resource_owner_counts([app_one, app_two]))
+ .to eq({
+ app_one.id => 1,
+ app_two.id => 1
+ })
+ end
+ end
+
+ describe '.latest_per_application' do
+ let!(:app_two_token1) { create(:oauth_access_token, application: app_two) }
+ let!(:app_two_token2) { create(:oauth_access_token, application: app_two) }
+ let!(:app_three_token1) { create(:oauth_access_token, application: app_three) }
+ let!(:app_three_token2) { create(:oauth_access_token, application: app_three) }
- it 'returns unique owners' do
- expect(tokens.count).to eq(3)
- expect(tokens.distinct_resource_owner_counts([app_one])).to eq({ app_one.id => 1 })
- expect(tokens.distinct_resource_owner_counts([app_two])).to eq({ app_two.id => 1 })
- expect(tokens.distinct_resource_owner_counts([app_three])).to eq({})
- expect(tokens.distinct_resource_owner_counts([app_one, app_two]))
- .to eq({
- app_one.id => 1,
- app_two.id => 1
- })
+ it 'returns only the latest token for each application' do
+ expect(described_class.latest_per_application.map(&:id))
+ .to match_array([app_two_token2.id, app_three_token2.id])
+ end
+ end
end
end
diff --git a/spec/models/operations/feature_flags_client_spec.rb b/spec/models/operations/feature_flags_client_spec.rb
index 05988d676f3..2ed3222c65c 100644
--- a/spec/models/operations/feature_flags_client_spec.rb
+++ b/spec/models/operations/feature_flags_client_spec.rb
@@ -3,7 +3,15 @@
require 'spec_helper'
RSpec.describe Operations::FeatureFlagsClient do
- subject { create(:operations_feature_flags_client) }
+ let_it_be(:project) { create(:project) }
+
+ let!(:client) { create(:operations_feature_flags_client, project: project) }
+
+ subject { client }
+
+ before do
+ client.unleash_app_name = 'production'
+ end
describe 'associations' do
it { is_expected.to belong_to(:project) }
@@ -18,4 +26,64 @@ RSpec.describe Operations::FeatureFlagsClient do
expect(subject.token).not_to be_empty
end
end
+
+ describe '.update_last_feature_flag_updated_at!' do
+ subject { described_class.update_last_feature_flag_updated_at!(project) }
+
+ it 'updates the last_feature_flag_updated_at of the project client' do
+ freeze_time do
+ expect { subject }.to change { client.reload.last_feature_flag_updated_at }.from(nil).to(Time.current)
+ end
+ end
+ end
+
+ describe '#unleash_api_version' do
+ subject { client.unleash_api_version }
+
+ it { is_expected.to eq(described_class::DEFAULT_UNLEASH_API_VERSION) }
+ end
+
+ describe '#unleash_api_features' do
+ subject { client.unleash_api_features }
+
+ it 'fetches' do
+ expect(Operations::FeatureFlag).to receive(:for_unleash_client).with(project, 'production').once
+
+ subject
+ end
+
+ context 'when unleash app name is not set' do
+ before do
+ client.unleash_app_name = nil
+ end
+
+ it 'does not fetch' do
+ expect(Operations::FeatureFlag).not_to receive(:for_unleash_client)
+
+ subject
+ end
+ end
+ end
+
+ describe '#unleash_api_cache_key' do
+ subject { client.unleash_api_cache_key }
+
+ it 'constructs the cache key' do
+ is_expected.to eq("api_version:#{client.unleash_api_version}"\
+ ":app_name:#{client.unleash_app_name}"\
+ ":updated_at:#{client.last_feature_flag_updated_at.to_i}")
+ end
+
+ context 'when unleash app name is not set' do
+ before do
+ client.unleash_app_name = nil
+ end
+
+ it 'constructs the cache key without unleash app name' do
+ is_expected.to eq("api_version:#{client.unleash_api_version}"\
+ ":app_name:"\
+ ":updated_at:#{client.last_feature_flag_updated_at.to_i}")
+ end
+ end
+ end
end
diff --git a/spec/models/packages/cleanup/policy_spec.rb b/spec/models/packages/cleanup/policy_spec.rb
index c08ae4aa7e7..a37042520e7 100644
--- a/spec/models/packages/cleanup/policy_spec.rb
+++ b/spec/models/packages/cleanup/policy_spec.rb
@@ -25,4 +25,40 @@ RSpec.describe Packages::Cleanup::Policy, type: :model do
it { is_expected.to contain_exactly(active_policy) }
end
+
+ describe '.with_packages' do
+ let_it_be(:policy_with_packages) { create(:packages_cleanup_policy) }
+ let_it_be(:policy_without_packages) { create(:packages_cleanup_policy) }
+ let_it_be(:package) { create(:package, project: policy_with_packages.project) }
+
+ subject { described_class.with_packages }
+
+ it { is_expected.to contain_exactly(policy_with_packages) }
+ end
+
+ describe '.runnable' do
+ let_it_be(:runnable_policy_with_packages) { create(:packages_cleanup_policy, :runnable) }
+ let_it_be(:runnable_policy_without_packages) { create(:packages_cleanup_policy, :runnable) }
+ let_it_be(:non_runnable_policy_with_packages) { create(:packages_cleanup_policy) }
+ let_it_be(:non_runnable_policy_without_packages) { create(:packages_cleanup_policy) }
+
+ let_it_be(:package1) { create(:package, project: runnable_policy_with_packages.project) }
+ let_it_be(:package2) { create(:package, project: non_runnable_policy_with_packages.project) }
+
+ subject { described_class.runnable }
+
+ it { is_expected.to contain_exactly(runnable_policy_with_packages) }
+ end
+
+ describe '#keep_n_duplicated_package_files_disabled?' do
+ subject { policy.keep_n_duplicated_package_files_disabled? }
+
+ %w[all 1].each do |value|
+ context "with value set to #{value}" do
+ let(:policy) { build(:packages_cleanup_policy, keep_n_duplicated_package_files: value) }
+
+ it { is_expected.to eq(value == 'all') }
+ end
+ end
+ end
end
diff --git a/spec/models/packages/debian/file_entry_spec.rb b/spec/models/packages/debian/file_entry_spec.rb
index e981adf69bc..ed6372f2873 100644
--- a/spec/models/packages/debian/file_entry_spec.rb
+++ b/spec/models/packages/debian/file_entry_spec.rb
@@ -31,6 +31,13 @@ RSpec.describe Packages::Debian::FileEntry, type: :model do
describe 'validations' do
it { is_expected.to be_valid }
+ context 'with FIPS mode', :fips_mode do
+ it 'raises an error' do
+ expect { subject.validate! }
+ .to raise_error(::Packages::FIPS::DisabledError, 'Debian registry is not FIPS compliant')
+ end
+ end
+
describe '#filename' do
it { is_expected.to validate_presence_of(:filename) }
it { is_expected.not_to allow_value('Hé').for(:filename) }
diff --git a/spec/models/pages/virtual_domain_spec.rb b/spec/models/pages/virtual_domain_spec.rb
index 29c14cbeb3e..b5a421295b2 100644
--- a/spec/models/pages/virtual_domain_spec.rb
+++ b/spec/models/pages/virtual_domain_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Pages::VirtualDomain do
let(:domain) { nil }
let(:project) { instance_double(Project) }
- subject(:virtual_domain) { described_class.new([project], domain: domain) }
+ subject(:virtual_domain) { described_class.new(projects: [project], domain: domain) }
it 'returns nil if there is no domain provided' do
expect(virtual_domain.certificate).to be_nil
@@ -35,7 +35,7 @@ RSpec.describe Pages::VirtualDomain do
context 'when there is pages domain provided' do
let(:domain) { instance_double(PagesDomain) }
- subject(:virtual_domain) { described_class.new([project_a, project_b, project_c], domain: domain) }
+ subject(:virtual_domain) { described_class.new(projects: [project_a, project_b, project_c], domain: domain) }
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
expect(project_a).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_a)
@@ -47,7 +47,7 @@ RSpec.describe Pages::VirtualDomain do
end
context 'when there is trim_prefix provided' do
- subject(:virtual_domain) { described_class.new([project_a, project_b], trim_prefix: 'group/') }
+ subject(:virtual_domain) { described_class.new(projects: [project_a, project_b], trim_prefix: 'group/') }
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
expect(project_a).to receive(:pages_lookup_path).with(trim_prefix: 'group/', domain: nil).and_return(pages_lookup_path_a)
@@ -57,4 +57,19 @@ RSpec.describe Pages::VirtualDomain do
end
end
end
+
+ describe '#cache_key' do
+ it 'returns the cache key based in the given cache_control' do
+ cache_control = instance_double(::Gitlab::Pages::CacheControl, cache_key: 'cache_key')
+ virtual_domain = described_class.new(projects: [instance_double(Project)], cache: cache_control)
+
+ expect(virtual_domain.cache_key).to eq('cache_key')
+ end
+
+ it 'returns nil when no cache_control is given' do
+ virtual_domain = described_class.new(projects: [instance_double(Project)])
+
+ expect(virtual_domain.cache_key).to be_nil
+ end
+ end
end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 7fde8d63947..4e463b1194c 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -544,16 +544,31 @@ RSpec.describe PagesDomain do
end
end
- it 'returns the virual domain when there are pages deployed for the project' do
- project.mark_pages_as_deployed
- project.update_pages_deployment!(create(:pages_deployment, project: project))
+ context 'when there are pages deployed for the project' do
+ let(:virtual_domain) { pages_domain.pages_virtual_domain }
- expect(Pages::VirtualDomain).to receive(:new).with([project], domain: pages_domain).and_call_original
+ before do
+ project.mark_pages_as_deployed
+ project.update_pages_deployment!(create(:pages_deployment, project: project))
+ end
+
+ it 'returns the virual domain when there are pages deployed for the project' do
+ expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
+ expect(virtual_domain.lookup_paths).not_to be_empty
+ expect(virtual_domain.cache_key).to eq("pages_domain_for_project_#{project.id}")
+ end
- virtual_domain = pages_domain.pages_virtual_domain
+ context 'when :cache_pages_domain_api is disabled' do
+ before do
+ stub_feature_flags(cache_pages_domain_api: false)
+ end
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.lookup_paths).not_to be_empty
+ it 'returns the virual domain when there are pages deployed for the project' do
+ expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
+ expect(virtual_domain.lookup_paths).not_to be_empty
+ expect(virtual_domain.cache_key).to be_nil
+ end
+ end
end
end
diff --git a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
index 7d4268f74e9..7411bc95147 100644
--- a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
# we have an existing N+1, one for each project for which user is not a member
# in this spec, project_3, project_4, project_5
# https://gitlab.com/gitlab-org/gitlab/-/issues/362890
- expect { query }.to make_queries(projects.size + 3)
+ ee_only_policy_check_queries = Gitlab.ee? ? 1 : 0
+ expect { query }.to make_queries(projects.size + 3 + ee_only_policy_check_queries)
end
end
diff --git a/spec/models/project_export_job_spec.rb b/spec/models/project_export_job_spec.rb
index 5a2b1443f8b..653d4d2df27 100644
--- a/spec/models/project_export_job_spec.rb
+++ b/spec/models/project_export_job_spec.rb
@@ -3,17 +3,14 @@
require 'spec_helper'
RSpec.describe ProjectExportJob, type: :model do
- let(:project) { create(:project) }
- let!(:job1) { create(:project_export_job, project: project, status: 0) }
- let!(:job2) { create(:project_export_job, project: project, status: 2) }
-
describe 'associations' do
- it { expect(job1).to belong_to(:project) }
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:relation_exports) }
end
describe 'validations' do
- it { expect(job1).to validate_presence_of(:project) }
- it { expect(job1).to validate_presence_of(:jid) }
- it { expect(job1).to validate_presence_of(:status) }
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:jid) }
+ it { is_expected.to validate_presence_of(:status) }
end
end
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index f6e398bd23c..db79185d759 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe ProjectImportState, type: :model do
project.import_state.finish
end
- it 'does not qneueue housekeeping when project does not have a valid import type' do
+ it 'does not enqueue housekeeping when project does not have a valid import type' do
project = create(:project, :import_started, import_type: nil)
expect(Projects::AfterImportWorker).not_to receive(:perform_async)
@@ -164,6 +164,43 @@ RSpec.describe ProjectImportState, type: :model do
project.import_state.finish
end
end
+
+ context 'state transition: [:none, :scheduled, :started] => [:canceled]' do
+ it 'updates the import status' do
+ import_state = create(:import_state, :none)
+ expect { import_state.cancel }
+ .to change { import_state.status }
+ .from('none').to('canceled')
+ end
+
+ it 'unsets the JID' do
+ import_state = create(:import_state, :started, jid: '123')
+
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:unset)
+ .with('123')
+ .and_call_original
+
+ import_state.cancel!
+
+ expect(import_state.jid).to be_nil
+ end
+
+ it 'removes import data' do
+ import_data = ProjectImportData.new(data: { 'test' => 'some data' })
+ project = create(:project, :import_scheduled, import_data: import_data)
+
+ expect(project)
+ .to receive(:remove_import_data)
+ .and_call_original
+
+ expect do
+ project.import_state.cancel
+ project.reload
+ end.to change { project.import_data }
+ .from(import_data).to(nil)
+ end
+ end
end
describe 'clearing `jid` after finish', :clean_gitlab_redis_cache do
@@ -178,7 +215,7 @@ RSpec.describe ProjectImportState, type: :model do
end
end
- context 'with an JID' do
+ context 'with a JID' do
it 'unsets the JID' do
import_state = create(:import_state, :started, jid: '123')
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index 867ad843406..fb1601a5f9c 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -6,6 +6,17 @@ RSpec.describe ProjectSetting, type: :model do
using RSpec::Parameterized::TableSyntax
it { is_expected.to belong_to(:project) }
+ describe 'scopes' do
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_setting_1) { create(:project_setting, project: project_1) }
+ let_it_be(:project_setting_2) { create(:project_setting, project: project_2) }
+
+ it 'returns project setting for the given projects' do
+ expect(described_class.for_projects(project_1)).to contain_exactly(project_setting_1)
+ end
+ end
+
describe 'validations' do
it { is_expected.not_to allow_value(nil).for(:target_platforms) }
it { is_expected.to allow_value([]).for(:target_platforms) }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 2d84c1b843e..2171ee752fd 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:merge_requests) }
it { is_expected.to have_many(:merge_request_metrics).class_name('MergeRequest::Metrics') }
it { is_expected.to have_many(:issues) }
+ it { is_expected.to have_many(:incident_management_issuable_escalation_statuses).through(:issues).inverse_of(:project).class_name('IncidentManagement::IssuableEscalationStatus') }
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:iterations) }
it { is_expected.to have_many(:project_members).dependent(:delete_all) }
@@ -81,7 +82,6 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:fork_network_member) }
it { is_expected.to have_one(:auto_devops).class_name('ProjectAutoDevops') }
- it { is_expected.to have_one(:tracing_setting).class_name('ProjectTracingSetting') }
it { is_expected.to have_one(:error_tracking_setting).class_name('ErrorTracking::ProjectErrorTrackingSetting') }
it { is_expected.to have_one(:project_setting) }
it { is_expected.to have_one(:alerting_setting).class_name('Alerting::ProjectAlertingSetting') }
@@ -821,31 +821,38 @@ RSpec.describe Project, factory_default: :keep do
end
describe 'delegation' do
- [:add_guest, :add_reporter, :add_developer, :add_maintainer, :add_user, :add_users].each do |method|
+ [:add_guest, :add_reporter, :add_developer, :add_maintainer, :add_member, :add_members].each do |method|
it { is_expected.to delegate_method(method).to(:team) }
end
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
- it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
- it { is_expected.to delegate_method(:root_ancestor).to(:namespace).with_arguments(allow_nil: true) }
- it { is_expected.to delegate_method(:certificate_based_clusters_enabled?).to(:namespace).with_arguments(allow_nil: true) }
- it { is_expected.to delegate_method(:last_pipeline).to(:commit).with_arguments(allow_nil: true) }
+ it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).allow_nil }
+ it { is_expected.to delegate_method(:root_ancestor).to(:namespace).allow_nil }
+ it { is_expected.to delegate_method(:certificate_based_clusters_enabled?).to(:namespace).allow_nil }
+ it { is_expected.to delegate_method(:last_pipeline).to(:commit).allow_nil }
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
- describe 'project settings' do
+ describe 'read project settings' do
%i(
show_default_award_emojis
- show_default_award_emojis=
show_default_award_emojis?
warn_about_potentially_unwanted_characters
- warn_about_potentially_unwanted_characters=
warn_about_potentially_unwanted_characters?
enforce_auth_checks_on_uploads
- enforce_auth_checks_on_uploads=
enforce_auth_checks_on_uploads?
).each do |method|
- it { is_expected.to delegate_method(method).to(:project_setting).with_arguments(allow_nil: true) }
+ it { is_expected.to delegate_method(method).to(:project_setting).allow_nil }
+ end
+ end
+
+ describe 'write project settings' do
+ %i(
+ show_default_award_emojis=
+ warn_about_potentially_unwanted_characters=
+ enforce_auth_checks_on_uploads=
+ ).each do |method|
+ it { is_expected.to delegate_method(method).to(:project_setting).with_arguments(:args).allow_nil }
end
end
@@ -1855,7 +1862,7 @@ RSpec.describe Project, factory_default: :keep do
describe 'when a user has access to a project' do
before do
- project.add_user(user, Gitlab::Access::MAINTAINER)
+ project.add_member(user, Gitlab::Access::MAINTAINER)
end
it { is_expected.to eq([project]) }
@@ -3588,6 +3595,14 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#emails_enabled?' do
+ let(:project) { build(:project, emails_disabled: false) }
+
+ it "is the opposite of emails_disabled" do
+ expect(project.emails_enabled?).to be_truthy
+ end
+ end
+
describe '#lfs_enabled?' do
let(:namespace) { create(:namespace) }
let(:project) { build(:project, namespace: namespace) }
@@ -8383,6 +8398,27 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#group_group_links' do
+ context 'with group project' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ it 'returns group links of group' do
+ expect(group).to receive_message_chain(:shared_with_group_links, :of_ancestors_and_self)
+
+ project.group_group_links
+ end
+ end
+
+ context 'with personal project' do
+ let_it_be(:project) { create(:project) }
+
+ it 'returns none' do
+ expect(project.group_group_links).to eq(GroupGroupLink.none)
+ end
+ end
+ end
+
describe '#security_training_available?' do
subject { build(:project) }
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index 2ddbab7779e..1fab07c1452 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -251,13 +251,13 @@ RSpec.describe ProjectTeam do
end
end
- describe '#add_users' do
+ describe '#add_members' do
let(:user1) { create(:user) }
let(:user2) { create(:user) }
let(:project) { create(:project) }
it 'add the given users to the team' do
- project.team.add_users([user1, user2], :reporter)
+ project.team.add_members([user1, user2], :reporter)
expect(project.team.reporter?(user1)).to be(true)
expect(project.team.reporter?(user2)).to be(true)
@@ -265,7 +265,7 @@ RSpec.describe ProjectTeam do
context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
before do
- project.team.add_users([user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
+ project.team.add_members([user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
end
it 'creates a member_task with the correct attributes', :aggregate_failures do
@@ -277,12 +277,12 @@ RSpec.describe ProjectTeam do
end
end
- describe '#add_user' do
+ describe '#add_member' do
let(:user) { create(:user) }
let(:project) { create(:project) }
it 'add the given user to the team' do
- project.team.add_user(user, :reporter)
+ project.team.add_member(user, :reporter)
expect(project.team.reporter?(user)).to be(true)
end
diff --git a/spec/models/project_tracing_setting_spec.rb b/spec/models/project_tracing_setting_spec.rb
deleted file mode 100644
index a7e4e557b25..00000000000
--- a/spec/models/project_tracing_setting_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProjectTracingSetting do
- describe '#external_url' do
- let_it_be(:project) { create(:project) }
-
- let(:tracing_setting) { project.build_tracing_setting }
-
- describe 'Validations' do
- describe 'external_url' do
- it 'accepts a valid url' do
- tracing_setting.external_url = 'https://gitlab.com'
-
- expect(tracing_setting).to be_valid
- end
-
- it 'fails with an invalid url' do
- tracing_setting.external_url = 'gitlab.com'
-
- expect(tracing_setting).to be_invalid
- end
-
- it 'fails with a blank string' do
- tracing_setting.external_url = nil
-
- expect(tracing_setting).to be_invalid
- end
-
- it 'sanitizes the url' do
- tracing_setting.external_url = %{https://replaceme.com/'><script>alert(document.cookie)</script>}
-
- expect(tracing_setting).to be_valid
- expect(tracing_setting.external_url).to eq(%{https://replaceme.com/'&gt;})
- end
- end
- end
- end
-end
diff --git a/spec/models/projects/import_export/relation_export_spec.rb b/spec/models/projects/import_export/relation_export_spec.rb
new file mode 100644
index 00000000000..c74ca82e161
--- /dev/null
+++ b/spec/models/projects/import_export/relation_export_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ImportExport::RelationExport, type: :model do
+ subject { create(:project_relation_export) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project_export_job) }
+ it { is_expected.to have_one(:upload) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project_export_job) }
+ it { is_expected.to validate_presence_of(:relation) }
+ it { is_expected.to validate_uniqueness_of(:relation).scoped_to(:project_export_job_id) }
+ it { is_expected.to validate_presence_of(:status) }
+ it { is_expected.to validate_numericality_of(:status).only_integer }
+ it { is_expected.to validate_length_of(:relation).is_at_most(255) }
+ it { is_expected.to validate_length_of(:jid).is_at_most(255) }
+ it { is_expected.to validate_length_of(:export_error).is_at_most(300) }
+ end
+end
diff --git a/spec/models/projects/import_export/relation_export_upload_spec.rb b/spec/models/projects/import_export/relation_export_upload_spec.rb
new file mode 100644
index 00000000000..c0014c5a14c
--- /dev/null
+++ b/spec/models/projects/import_export/relation_export_upload_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ImportExport::RelationExportUpload, type: :model do
+ subject { described_class.new(relation_export: project_relation_export) }
+
+ let_it_be(:project_relation_export) { create(:project_relation_export) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:relation_export) }
+ end
+
+ it 'stores export file' do
+ stub_uploads_object_storage(ImportExportUploader, enabled: false)
+
+ filename = 'labels.tar.gz'
+ subject.export_file = fixture_file_upload("spec/fixtures/gitlab/import_export/#{filename}")
+
+ subject.save!
+
+ url = "/uploads/-/system/projects/import_export/relation_export_upload/export_file/#{subject.id}/#{filename}"
+ expect(subject.export_file.url).to eq(url)
+ end
+end
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 366de809bed..a3fc09b31fb 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -190,6 +190,14 @@ RSpec.describe ProtectedBranch do
expect(described_class).not_to receive(:matching)
expect(described_class.protected?(project, protected_branch.name)).to eq(true)
end
+
+ it 'sets expires_in for a cache key' do
+ cache_key = described_class.protected_ref_cache_key(project, protected_branch.name)
+
+ expect(Rails.cache).to receive(:fetch).with(cache_key, expires_in: 1.hour)
+
+ described_class.protected?(project, protected_branch.name)
+ end
end
end
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index d2d7859e726..51351c9fdd1 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe RemoteMirror, :mailer do
include GitHelpers
+ before do
+ stub_feature_flags(remote_mirror_no_delay: false)
+ end
+
describe 'URL validation' do
context 'with a valid URL' do
it 'is valid' do
@@ -343,6 +347,20 @@ RSpec.describe RemoteMirror, :mailer do
remote_mirror.sync
end
+
+ context 'when remote_mirror_no_delay is enabled' do
+ before do
+ stub_feature_flags(remote_mirror_no_delay: true)
+ end
+
+ it 'schedules a RepositoryUpdateRemoteMirrorWorker to run now' do
+ remote_mirror.last_update_started_at = Time.current - 30.seconds
+
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_async).with(remote_mirror.id, Time.current)
+
+ remote_mirror.sync
+ end
+ end
end
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index e1d903a40cf..11323c40d28 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -125,11 +125,11 @@ RSpec.describe Repository do
let(:latest_tag) { 'v0.0.0' }
before do
- rugged_repo(repository).tags.create(latest_tag, repository.commit.id)
+ repository.add_tag(user, latest_tag, repository.commit.id)
end
after do
- rugged_repo(repository).tags.delete(latest_tag)
+ repository.rm_tag(user, latest_tag)
end
context 'desc' do
@@ -150,16 +150,13 @@ RSpec.describe Repository do
subject { repository.tags_sorted_by('updated_asc').map(&:name) & (tags_to_compare + [annotated_tag_name]) }
before do
- options = { message: 'test tag message\n',
- tagger: { name: 'John Smith', email: 'john@gmail.com' } }
-
- rugged_repo(repository).tags.create(annotated_tag_name, 'a48e4fc218069f68ef2e769dd8dfea3991362175', **options)
+ repository.add_tag(user, annotated_tag_name, 'a48e4fc218069f68ef2e769dd8dfea3991362175', 'test tag message\n')
end
it { is_expected.to eq(['v1.0.0', 'v1.1.0', annotated_tag_name]) }
after do
- rugged_repo(repository).tags.delete(annotated_tag_name)
+ repository.rm_tag(user, annotated_tag_name)
end
end
end
@@ -258,21 +255,10 @@ RSpec.describe Repository do
end
context 'with a commit with invalid UTF-8 path' do
- def create_commit_with_invalid_utf8_path
- rugged = rugged_repo(repository)
- blob_id = Rugged::Blob.from_buffer(rugged, "some contents")
- tree_builder = Rugged::Tree::Builder.new(rugged)
- tree_builder.insert({ oid: blob_id, name: "hello\x80world", filemode: 0100644 })
- tree_id = tree_builder.write
- user = { email: "jcai@gitlab.com", time: Time.current.to_time, name: "John Cai" }
-
- Rugged::Commit.create(rugged, message: 'some commit message', parents: [rugged.head.target.oid], tree: tree_id, committer: user, author: user)
- end
-
it 'does not raise an error' do
- commit = create_commit_with_invalid_utf8_path
+ response = create_file_in_repo(project, 'master', 'master', "hello\x80world", 'some contents')
- expect { repository.list_last_commits_for_tree(commit, '.', offset: 0) }.not_to raise_error
+ expect { repository.list_last_commits_for_tree(response[:result], '.', offset: 0) }.not_to raise_error
end
end
end
@@ -2262,20 +2248,12 @@ RSpec.describe Repository do
describe '#branch_count' do
it 'returns the number of branches' do
expect(repository.branch_count).to be_an(Integer)
-
- rugged_count = rugged_repo(repository).branches.count
-
- expect(repository.branch_count).to eq(rugged_count)
end
end
describe '#tag_count' do
it 'returns the number of tags' do
expect(repository.tag_count).to be_an(Integer)
-
- rugged_count = rugged_repo(repository).tags.count
-
- expect(repository.tag_count).to eq(rugged_count)
end
end
@@ -2757,6 +2735,33 @@ RSpec.describe Repository do
end
end
+ describe '#changelog_config' do
+ let(:user) { create(:user) }
+ let(:changelog_config_path) { Gitlab::Changelog::Config::DEFAULT_FILE_PATH }
+
+ before do
+ repository.create_file(
+ user,
+ changelog_config_path,
+ 'CONTENT',
+ message: '...',
+ branch_name: 'master'
+ )
+ end
+
+ context 'when there is a changelog_config_path at the commit' do
+ it 'returns the content' do
+ expect(repository.changelog_config(repository.commit.sha, changelog_config_path)).to eq('CONTENT')
+ end
+ end
+
+ context 'when there is no changelog_config_path at the commit' do
+ it 'returns nil' do
+ expect(repository.changelog_config(repository.commit.parent.sha, changelog_config_path)).to be_nil
+ end
+ end
+ end
+
describe '#route_map_for' do
before do
repository.create_file(User.last, '.gitlab/route-map.yml', 'CONTENT', message: 'Add .gitlab/route-map.yml', branch_name: 'master')
@@ -2776,8 +2781,7 @@ RSpec.describe Repository do
end
def create_remote_branch(remote_name, branch_name, target)
- rugged = rugged_repo(repository)
- rugged.references.create("refs/remotes/#{remote_name}/#{branch_name}", target.id)
+ repository.write_ref("refs/remotes/#{remote_name}/#{branch_name}", target.id)
end
shared_examples '#ancestor?' do
diff --git a/spec/models/ssh_host_key_spec.rb b/spec/models/ssh_host_key_spec.rb
index 4b756846598..0348aab9f97 100644
--- a/spec/models/ssh_host_key_spec.rb
+++ b/spec/models/ssh_host_key_spec.rb
@@ -26,6 +26,9 @@ RSpec.describe SshHostKey do
'Ebi86VjJRi2sOuYoXQU1'
end
+ let(:ssh_key1) { Gitlab::SSHPublicKey.new(key1) }
+ let(:ssh_key2) { Gitlab::SSHPublicKey.new(key2) }
+
# Purposefully ordered so that `sort` will make changes
let(:known_hosts) do
<<~EOF
@@ -88,10 +91,17 @@ RSpec.describe SshHostKey do
it 'returns an array of indexed fingerprints when the cache is filled' do
stub_reactive_cache(ssh_host_key, known_hosts: known_hosts)
- expected = [key1, key2]
- .map { |data| Gitlab::SSHPublicKey.new(data) }
+ expected = [ssh_key1, ssh_key2]
.each_with_index
- .map { |key, i| { bits: key.bits, fingerprint: key.fingerprint, type: key.type, index: i } }
+ .map do |key, i|
+ {
+ bits: key.bits,
+ fingerprint: key.fingerprint,
+ fingerprint_sha256: key.fingerprint_sha256,
+ type: key.type,
+ index: i
+ }
+ end
expect(ssh_host_key.fingerprints.as_json).to eq(expected)
end
@@ -107,8 +117,16 @@ RSpec.describe SshHostKey do
expect(ssh_host_key.fingerprints.as_json).to eq(
[
- { bits: 2048, fingerprint: Gitlab::SSHPublicKey.new(key1).fingerprint, type: :rsa, index: 0 },
- { bits: 2048, fingerprint: Gitlab::SSHPublicKey.new(key2).fingerprint, type: :rsa, index: 1 }
+ { bits: 2048,
+ fingerprint: ssh_key1.fingerprint,
+ fingerprint_sha256: ssh_key1.fingerprint_sha256,
+ type: :rsa,
+ index: 0 },
+ { bits: 2048,
+ fingerprint: ssh_key2.fingerprint,
+ fingerprint_sha256: ssh_key2.fingerprint_sha256,
+ type: :rsa,
+ index: 1 }
]
)
end
@@ -116,6 +134,19 @@ RSpec.describe SshHostKey do
it 'returns an empty array when the cache is empty' do
expect(ssh_host_key.fingerprints).to eq([])
end
+
+ context 'when FIPS is enabled', :fips_mode do
+ it 'only includes SHA256 fingerprint' do
+ stub_reactive_cache(ssh_host_key, known_hosts: known_hosts)
+
+ expect(ssh_host_key.fingerprints.as_json).to eq(
+ [
+ { bits: 2048, fingerprint_sha256: ssh_key1.fingerprint_sha256, type: :rsa, index: 0 },
+ { bits: 2048, fingerprint_sha256: ssh_key2.fingerprint_sha256, type: :rsa, index: 1 }
+ ]
+ )
+ end
+ end
end
describe '#host_keys_changed?' do
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 651e2cf273f..7df22078c6d 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -114,6 +114,26 @@ RSpec.describe Todo do
end
end
+ describe '#for_issue_or_work_item?' do
+ it 'returns true when target is an Issue' do
+ subject.target_type = 'Issue'
+
+ expect(subject.for_issue_or_work_item?).to be_truthy
+ end
+
+ it 'returns true when target is a WorkItem' do
+ subject.target_type = 'WorkItem'
+
+ expect(subject.for_issue_or_work_item?).to be_truthy
+ end
+
+ it 'returns false when target is not an Issue' do
+ subject.target_type = 'DesignManagement::Design'
+
+ expect(subject.for_issue_or_work_item?).to be_falsey
+ end
+ end
+
describe '#target' do
context 'for commits' do
let(:project) { create(:project, :repository) }
diff --git a/spec/models/tree_spec.rb b/spec/models/tree_spec.rb
index b7a8276ec55..20d786f311f 100644
--- a/spec/models/tree_spec.rb
+++ b/spec/models/tree_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Tree do
- let(:repository) { create(:project, :repository).repository }
+ let_it_be(:repository) { create(:project, :repository).repository }
+
let(:sha) { repository.root_ref }
subject(:tree) { described_class.new(repository, '54fcc214') }
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index abc02dd1f55..6d2ba66d5f4 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -136,6 +136,7 @@ RSpec.describe User do
it { is_expected.to have_many(:timelogs) }
it { is_expected.to have_many(:callouts).class_name('Users::Callout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
+ it { is_expected.to have_many(:namespace_callouts).class_name('Users::NamespaceCallout') }
describe '#user_detail' do
it 'does not persist `user_detail` by default' do
@@ -1109,6 +1110,20 @@ RSpec.describe User do
.to contain_exactly(user1, user2)
end
end
+
+ describe '.order_recent_last_activity' do
+ it 'sorts users by activity and id to make the ordes deterministic' do
+ expect(described_class.order_recent_last_activity.to_sql).to include(
+ 'ORDER BY "users"."last_activity_on" DESC NULLS LAST, "users"."id" ASC')
+ end
+ end
+
+ describe '.order_oldest_last_activity' do
+ it 'sorts users by activity and id to make the ordes deterministic' do
+ expect(described_class.order_oldest_last_activity.to_sql).to include(
+ 'ORDER BY "users"."last_activity_on" ASC NULLS FIRST, "users"."id" DESC')
+ end
+ end
end
context 'strip attributes' do
@@ -2278,7 +2293,7 @@ RSpec.describe User do
@group = create :group
@group.add_owner(@user)
- @group.add_user(@user2, GroupMember::OWNER)
+ @group.add_member(@user2, GroupMember::OWNER)
end
it { expect(@user2.several_namespaces?).to be_truthy }
@@ -2729,131 +2744,149 @@ RSpec.describe User do
end
end
- describe '.search' do
- let_it_be(:user) { create(:user, name: 'user', username: 'usern', email: 'email@example.com') }
- let_it_be(:public_email) do
- create(:email, :confirmed, user: user, email: 'publicemail@example.com').tap do |email|
- user.update!(public_email: email.email)
+ shared_examples '.search examples' do
+ describe '.search' do
+ let_it_be(:user) { create(:user, name: 'user', username: 'usern', email: 'email@example.com') }
+ let_it_be(:public_email) do
+ create(:email, :confirmed, user: user, email: 'publicemail@example.com').tap do |email|
+ user.update!(public_email: email.email)
+ end
end
- end
- let_it_be(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@example.com') }
- let_it_be(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@example.com') }
- let_it_be(:email) { create(:email, user: user, email: 'alias@example.com') }
+ let_it_be(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@example.com') }
+ let_it_be(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@example.com') }
+ let_it_be(:email) { create(:email, user: user, email: 'alias@example.com') }
- describe 'name user and email relative ordering' do
- let_it_be(:named_alexander) { create(:user, name: 'Alexander Person', username: 'abcd', email: 'abcd@example.com') }
- let_it_be(:username_alexand) { create(:user, name: 'Joao Alexander', username: 'Alexand', email: 'joao@example.com') }
+ describe 'name user and email relative ordering' do
+ let_it_be(:named_alexander) { create(:user, name: 'Alexander Person', username: 'abcd', email: 'abcd@example.com') }
+ let_it_be(:username_alexand) { create(:user, name: 'Joao Alexander', username: 'Alexand', email: 'joao@example.com') }
- it 'prioritizes exact matches' do
- expect(described_class.search('Alexand')).to eq([username_alexand, named_alexander])
- end
+ it 'prioritizes exact matches' do
+ expect(described_class.search('Alexand')).to eq([username_alexand, named_alexander])
+ end
- it 'falls back to ordering by name' do
- expect(described_class.search('Alexander')).to eq([named_alexander, username_alexand])
+ it 'falls back to ordering by name' do
+ expect(described_class.search('Alexander')).to eq([named_alexander, username_alexand])
+ end
end
- end
- describe 'name matching' do
- it 'returns users with a matching name with exact match first' do
- expect(described_class.search(user.name)).to eq([user, user2])
- end
+ describe 'name matching' do
+ it 'returns users with a matching name with exact match first' do
+ expect(described_class.search(user.name)).to eq([user, user2])
+ end
- it 'returns users with a partially matching name' do
- expect(described_class.search(user.name[0..2])).to eq([user, user2])
- end
+ it 'returns users with a partially matching name' do
+ expect(described_class.search(user.name[0..2])).to eq([user, user2])
+ end
- it 'returns users with a matching name regardless of the casing' do
- expect(described_class.search(user2.name.upcase)).to eq([user2])
- end
+ it 'returns users with a matching name regardless of the casing' do
+ expect(described_class.search(user2.name.upcase)).to eq([user2])
+ end
- it 'returns users with a exact matching name shorter than 3 chars' do
- expect(described_class.search(user3.name)).to eq([user3])
- end
+ it 'returns users with a exact matching name shorter than 3 chars' do
+ expect(described_class.search(user3.name)).to eq([user3])
+ end
- it 'returns users with a exact matching name shorter than 3 chars regardless of the casing' do
- expect(described_class.search(user3.name.upcase)).to eq([user3])
- end
+ it 'returns users with a exact matching name shorter than 3 chars regardless of the casing' do
+ expect(described_class.search(user3.name.upcase)).to eq([user3])
+ end
- context 'when use_minimum_char_limit is false' do
- it 'returns users with a partially matching name' do
- expect(described_class.search('u', use_minimum_char_limit: false)).to eq([user3, user, user2])
+ context 'when use_minimum_char_limit is false' do
+ it 'returns users with a partially matching name' do
+ expect(described_class.search('u', use_minimum_char_limit: false)).to eq([user3, user, user2])
+ end
end
end
- end
- describe 'email matching' do
- it 'returns users with a matching public email' do
- expect(described_class.search(user.public_email)).to match_array([user])
- end
+ describe 'email matching' do
+ it 'returns users with a matching public email' do
+ expect(described_class.search(user.public_email)).to match_array([user])
+ end
- it 'does not return users with a partially matching public email' do
- expect(described_class.search(user.public_email[1...-1])).to be_empty
- end
+ it 'does not return users with a partially matching public email' do
+ expect(described_class.search(user.public_email[1...-1])).to be_empty
+ end
- it 'returns users with a matching public email regardless of the casing' do
- expect(described_class.search(user.public_email.upcase)).to match_array([user])
- end
+ it 'returns users with a matching public email regardless of the casing' do
+ expect(described_class.search(user.public_email.upcase)).to match_array([user])
+ end
+
+ it 'does not return users with a matching private email' do
+ expect(described_class.search(user.email)).to be_empty
+ expect(described_class.search(email.email)).to be_empty
+ end
+
+ context 'with private emails search' do
+ it 'returns users with matching private email' do
+ expect(described_class.search(user.email, with_private_emails: true)).to match_array([user])
+ end
- it 'does not return users with a matching private email' do
- expect(described_class.search(user.email)).to be_empty
- expect(described_class.search(email.email)).to be_empty
+ it 'returns users with matching private secondary email' do
+ expect(described_class.search(email.email, with_private_emails: true)).to match_array([user])
+ end
+ end
end
- context 'with private emails search' do
- it 'returns users with matching private email' do
- expect(described_class.search(user.email, with_private_emails: true)).to match_array([user])
+ describe 'username matching' do
+ it 'returns users with a matching username' do
+ expect(described_class.search(user.username)).to eq([user, user2])
end
- it 'returns users with matching private secondary email' do
- expect(described_class.search(email.email, with_private_emails: true)).to match_array([user])
+ it 'returns users with a matching username starting with a @' do
+ expect(described_class.search("@#{user.username}")).to eq([user, user2])
end
- end
- end
- describe 'username matching' do
- it 'returns users with a matching username' do
- expect(described_class.search(user.username)).to eq([user, user2])
- end
+ it 'returns users with a partially matching username' do
+ expect(described_class.search(user.username[0..2])).to eq([user, user2])
+ end
- it 'returns users with a matching username starting with a @' do
- expect(described_class.search("@#{user.username}")).to eq([user, user2])
- end
+ it 'returns users with a partially matching username starting with @' do
+ expect(described_class.search("@#{user.username[0..2]}")).to eq([user, user2])
+ end
- it 'returns users with a partially matching username' do
- expect(described_class.search(user.username[0..2])).to eq([user, user2])
- end
+ it 'returns users with a matching username regardless of the casing' do
+ expect(described_class.search(user2.username.upcase)).to eq([user2])
+ end
- it 'returns users with a partially matching username starting with @' do
- expect(described_class.search("@#{user.username[0..2]}")).to eq([user, user2])
- end
+ it 'returns users with a exact matching username shorter than 3 chars' do
+ expect(described_class.search(user3.username)).to eq([user3])
+ end
- it 'returns users with a matching username regardless of the casing' do
- expect(described_class.search(user2.username.upcase)).to eq([user2])
- end
+ it 'returns users with a exact matching username shorter than 3 chars regardless of the casing' do
+ expect(described_class.search(user3.username.upcase)).to eq([user3])
+ end
- it 'returns users with a exact matching username shorter than 3 chars' do
- expect(described_class.search(user3.username)).to eq([user3])
+ context 'when use_minimum_char_limit is false' do
+ it 'returns users with a partially matching username' do
+ expect(described_class.search('se', use_minimum_char_limit: false)).to eq([user3, user, user2])
+ end
+ end
end
- it 'returns users with a exact matching username shorter than 3 chars regardless of the casing' do
- expect(described_class.search(user3.username.upcase)).to eq([user3])
+ it 'returns no matches for an empty string' do
+ expect(described_class.search('')).to be_empty
end
- context 'when use_minimum_char_limit is false' do
- it 'returns users with a partially matching username' do
- expect(described_class.search('se', use_minimum_char_limit: false)).to eq([user3, user, user2])
- end
+ it 'returns no matches for nil' do
+ expect(described_class.search(nil)).to be_empty
end
end
+ end
- it 'returns no matches for an empty string' do
- expect(described_class.search('')).to be_empty
+ context 'when the use_keyset_aware_user_search_query FF is on' do
+ before do
+ stub_feature_flags(use_keyset_aware_user_search_query: true)
end
- it 'returns no matches for nil' do
- expect(described_class.search(nil)).to be_empty
+ it_behaves_like '.search examples'
+ end
+
+ context 'when the use_keyset_aware_user_search_query FF is off' do
+ before do
+ stub_feature_flags(use_keyset_aware_user_search_query: false)
end
+
+ it_behaves_like '.search examples'
end
describe '.user_search_minimum_char_limit' do
@@ -3001,7 +3034,7 @@ RSpec.describe User do
it 'has all ssh keys' do
user = create :user
- key = create :key, key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD33bWLBxu48Sev9Fert1yzEO4WGcWglWF7K/AwblIUFselOt/QdOL9DSjpQGxLagO1s9wl53STIO8qGS4Ms0EJZyIXOEFMjFJ5xmjSy+S37By4sG7SsltQEHMxtbtFOaW5LV2wCrX+rUsRNqLMamZjgjcPO0/EgGCXIGMAYW4O7cwGZdXWYIhQ1Vwy+CsVMDdPkPgBXqK7nR/ey8KMs8ho5fMNgB5hBw/AL9fNGhRw3QTD6Q12Nkhl4VZES2EsZqlpNnJttnPdp847DUsT6yuLRlfiQfz5Cn9ysHFdXObMN5VYIiPFwHeYCZp1X2S4fDZooRE8uOLTfxWHPXwrhqSH", user_id: user.id
+ key = create :key_without_comment, user_id: user.id
expect(user.all_ssh_keys).to include(a_string_starting_with(key.key))
end
@@ -3428,6 +3461,15 @@ RSpec.describe User do
end
end
+ describe '#followed_by?' do
+ it 'check if followed by another user' do
+ follower = create :user
+ followee = create :user
+
+ expect { follower.follow(followee) }.to change { followee.followed_by?(follower) }.from(false).to(true)
+ end
+ end
+
describe '#follow' do
it 'follow another user' do
user = create :user
@@ -3518,49 +3560,45 @@ RSpec.describe User do
end
describe '#sort_by_attribute' do
- before do
- described_class.delete_all
- @user = create :user, created_at: Date.today, current_sign_in_at: Date.today, name: 'Alpha'
- @user1 = create :user, created_at: Date.today - 1, current_sign_in_at: Date.today - 1, name: 'Omega'
- @user2 = create :user, created_at: Date.today - 2, name: 'Beta'
- end
+ let_it_be(:user) { create :user, created_at: Date.today, current_sign_in_at: Date.today, username: 'user0' }
+ let_it_be(:user1) { create :user, created_at: Date.today - 1, last_activity_on: Date.today - 1, current_sign_in_at: Date.today - 1, username: 'user1' }
+ let_it_be(:user2) { create :user, created_at: Date.today - 2, username: 'user2' }
+ let_it_be(:user3) { create :user, created_at: Date.today - 3, last_activity_on: Date.today, username: "user3" }
context 'when sort by recent_sign_in' do
let(:users) { described_class.sort_by_attribute('recent_sign_in') }
- it 'sorts users by recent sign-in time' do
- expect(users.first).to eq(@user)
- expect(users.second).to eq(@user1)
- end
-
- it 'pushes users who never signed in to the end' do
- expect(users.third).to eq(@user2)
+ it 'sorts users by recent sign-in time with user that never signed in at the end' do
+ expect(users).to eq([user, user1, user2, user3])
end
end
context 'when sort by oldest_sign_in' do
let(:users) { described_class.sort_by_attribute('oldest_sign_in') }
- it 'sorts users by the oldest sign-in time' do
- expect(users.first).to eq(@user1)
- expect(users.second).to eq(@user)
- end
-
- it 'pushes users who never signed in to the end' do
- expect(users.third).to eq(@user2)
+ it 'sorts users by the oldest sign-in time with users that never signed in at the end' do
+ expect(users).to eq([user1, user, user2, user3])
end
end
it 'sorts users in descending order by their creation time' do
- expect(described_class.sort_by_attribute('created_desc').first).to eq(@user)
+ expect(described_class.sort_by_attribute('created_desc')).to eq([user, user1, user2, user3])
end
it 'sorts users in ascending order by their creation time' do
- expect(described_class.sort_by_attribute('created_asc').first).to eq(@user2)
+ expect(described_class.sort_by_attribute('created_asc')).to eq([user3, user2, user1, user])
end
it 'sorts users by id in descending order when nil is passed' do
- expect(described_class.sort_by_attribute(nil).first).to eq(@user2)
+ expect(described_class.sort_by_attribute(nil)).to eq([user3, user2, user1, user])
+ end
+
+ it 'sorts user by latest activity descending, nulls last ordered by ascending id' do
+ expect(described_class.sort_by_attribute('last_activity_on_desc')).to eq([user3, user1, user, user2])
+ end
+
+ it 'sorts user by latest activity ascending, nulls first ordered by descending id' do
+ expect(described_class.sort_by_attribute('last_activity_on_asc')).to eq([user2, user, user1, user3])
end
end
@@ -3824,7 +3862,7 @@ RSpec.describe User do
let!(:project) { create(:project, group: project_group) }
before do
- private_group.add_user(user, Gitlab::Access::MAINTAINER)
+ private_group.add_member(user, Gitlab::Access::MAINTAINER)
project.add_maintainer(user)
end
@@ -3851,7 +3889,7 @@ RSpec.describe User do
let_it_be(:parent_group) do
create(:group).tap do |g|
- g.add_user(user, Gitlab::Access::MAINTAINER)
+ g.add_member(user, Gitlab::Access::MAINTAINER)
end
end
@@ -4279,7 +4317,7 @@ RSpec.describe User do
let!(:runner) { create(:ci_runner, :group, groups: [group]) }
def add_user(access)
- group.add_user(user, access)
+ group.add_member(user, access)
end
it_behaves_like :group_member
@@ -4369,7 +4407,7 @@ RSpec.describe User do
let!(:project_runner) { create(:ci_runner, :project, projects: [project]) }
def add_user(access)
- project.add_user(user, access)
+ project.add_member(user, access)
end
it_behaves_like :project_member
@@ -4391,8 +4429,8 @@ RSpec.describe User do
let!(:another_user) { create(:user) }
def add_user(access)
- subgroup.add_user(user, access)
- group.add_user(another_user, :owner)
+ subgroup.add_member(user, access)
+ group.add_member(another_user, :owner)
end
it_behaves_like :group_member
@@ -4749,8 +4787,8 @@ RSpec.describe User do
let(:group2) { create :group, require_two_factor_authentication: true, two_factor_grace_period: 32 }
before do
- group1.add_user(user, GroupMember::OWNER)
- group2.add_user(user, GroupMember::OWNER)
+ group1.add_member(user, GroupMember::OWNER)
+ group2.add_member(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
@@ -4769,7 +4807,7 @@ RSpec.describe User do
let!(:group1a) { create :group, parent: group1 }
before do
- group1a.add_user(user, GroupMember::OWNER)
+ group1a.add_member(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
@@ -4784,7 +4822,7 @@ RSpec.describe User do
let!(:group1a) { create :group, require_two_factor_authentication: true, parent: group1 }
before do
- group1.add_user(user, GroupMember::OWNER)
+ group1.add_member(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
@@ -4805,7 +4843,7 @@ RSpec.describe User do
group_access: ProjectGroupLink.default_access
)
- group2.add_user(user, GroupMember::OWNER)
+ group2.add_member(user, GroupMember::OWNER)
end
it 'does not require 2FA' do
@@ -4819,7 +4857,7 @@ RSpec.describe User do
let(:group) { create :group }
before do
- group.add_user(user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
user.update_two_factor_requirement
end
@@ -4848,8 +4886,8 @@ RSpec.describe User do
let(:user) { create :user }
before do
- group.add_user(user, GroupMember::OWNER)
- group_not_requiring_2FA.add_user(user, GroupMember::OWNER)
+ group.add_member(user, GroupMember::OWNER)
+ group_not_requiring_2FA.add_member(user, GroupMember::OWNER)
end
context 'when user is direct member of group requiring 2FA' do
@@ -5884,8 +5922,44 @@ RSpec.describe User do
end
end
+ describe '#authenticatable_salt' do
+ let(:user) { create(:user) }
+
+ subject(:authenticatable_salt) { user.authenticatable_salt }
+
+ it 'uses password_salt' do
+ expect(authenticatable_salt).to eq(user.password_salt)
+ end
+
+ context 'when the encrypted_password is an unknown type' do
+ let(:encrypted_password) { '$argon2i$v=19$m=512,t=4,p=2$eM+ZMyYkpDRGaI3xXmuNcQ$c5DeJg3eb5dskVt1mDdxfw' }
+
+ before do
+ user.update_attribute(:encrypted_password, encrypted_password)
+ end
+
+ it 'returns the first 30 characters of the encrypted_password' do
+ expect(authenticatable_salt).to eq(encrypted_password[0, 29])
+ end
+ end
+
+ context 'when pbkdf2_password_encryption is disabled' do
+ before do
+ stub_feature_flags(pbkdf2_password_encryption: false)
+ end
+
+ it 'returns the first 30 characters of the encrypted_password' do
+ expect(authenticatable_salt).to eq(user.encrypted_password[0, 29])
+ end
+ end
+ end
+
+ def compare_pbkdf2_password(user, password)
+ Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.compare(user.encrypted_password, password)
+ end
+
describe '#valid_password?' do
- subject { user.valid_password?(password) }
+ subject(:validate_password) { user.valid_password?(password) }
context 'user with password not in disallowed list' do
let(:user) { create(:user) }
@@ -5898,6 +5972,15 @@ RSpec.describe User do
it { is_expected.to be_falsey }
end
+
+ context 'when pbkdf2_sha512_encryption is disabled and the user password is pbkdf2+sha512' do
+ it 'does not validate correctly' do
+ user # Create the user while the feature is enabled
+ stub_feature_flags(pbkdf2_password_encryption: false)
+
+ expect(validate_password).to be_falsey
+ end
+ end
end
context 'user with disallowed password' do
@@ -5912,6 +5995,174 @@ RSpec.describe User do
it { is_expected.to be_falsey }
end
end
+
+ context 'user with a bcrypt password hash' do
+ # Plaintext password 'eiFubohV6iro'
+ let(:encrypted_password) { '$2a$10$xLTxCKOa75IU4RQGqqOrTuZOgZdJEzfSzjG6ZSEi/C31TB/yLZYpi' }
+ let(:user) { create(:user, encrypted_password: encrypted_password) }
+
+ shared_examples 'not re-encrypting with PBKDF2' do
+ it 'does not re-encrypt with PBKDF2' do
+ validate_password
+
+ expect(user.reload.encrypted_password).to eq(encrypted_password)
+ end
+ end
+
+ context 'using the wrong password' do
+ let(:password) { 'WRONG PASSWORD' }
+
+ it { is_expected.to be_falsey }
+ it_behaves_like 'not re-encrypting with PBKDF2'
+
+ context 'when pbkdf2_password_encryption is disabled' do
+ before do
+ stub_feature_flags(pbkdf2_password_encryption: false)
+ end
+
+ it { is_expected.to be_falsey }
+ it_behaves_like 'not re-encrypting with PBKDF2'
+ end
+ end
+
+ context 'using the correct password' do
+ let(:password) { 'eiFubohV6iro' }
+
+ it { is_expected.to be_truthy }
+
+ it 'validates the password and re-encrypts with PBKDF2' do
+ validate_password
+
+ current_encrypted_password = user.reload.encrypted_password
+
+ expect(compare_pbkdf2_password(user, password)).to eq(true)
+ expect { ::BCrypt::Password.new(current_encrypted_password) }
+ .to raise_error(::BCrypt::Errors::InvalidHash)
+ end
+
+ context 'when pbkdf2_password_encryption is disabled' do
+ before do
+ stub_feature_flags(pbkdf2_password_encryption: false)
+ end
+
+ it { is_expected.to be_truthy }
+ it_behaves_like 'not re-encrypting with PBKDF2'
+ end
+
+ context 'when pbkdf2_password_encryption_write is disabled' do
+ before do
+ stub_feature_flags(pbkdf2_password_encryption_write: false)
+ end
+
+ it { is_expected.to be_truthy }
+ it_behaves_like 'not re-encrypting with PBKDF2'
+ end
+ end
+ end
+
+ context 'user with password hash that is neither PBKDF2 nor BCrypt' do
+ let(:user) { create(:user, encrypted_password: '$argon2i$v=19$m=512,t=4,p=2$eM+ZMyYkpDRGaI3xXmuNcQ$c5DeJg3eb5dskVt1mDdxfw') }
+ let(:password) { 'password' }
+
+ it { is_expected.to be_falsey }
+
+ context 'when pbkdf2_password_encryption is disabled' do
+ before do
+ stub_feature_flags(pbkdf2_password_encryption: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ # These entire test section can be removed once the :pbkdf2_password_encryption feature flag is removed.
+ describe '#password=' do
+ let(:user) { create(:user) }
+ let(:password) { 'Oot5iechahqu' }
+
+ def compare_bcrypt_password(user, password)
+ Devise::Encryptor.compare(User, user.encrypted_password, password)
+ end
+
+ context 'when pbkdf2_password_encryption is enabled' do
+ it 'calls PBKDF2 digest and not the default Devise encryptor' do
+ expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512).to receive(:digest).at_least(:once).and_call_original
+ expect(Devise::Encryptor).not_to receive(:digest)
+
+ user.password = password
+ end
+
+ it 'saves the password in PBKDF2 format' do
+ user.password = password
+ user.save!
+
+ expect(compare_pbkdf2_password(user, password)).to eq(true)
+ expect { compare_bcrypt_password(user, password) }.to raise_error(::BCrypt::Errors::InvalidHash)
+ end
+
+ context 'when pbkdf2_password_encryption_write is disabled' do
+ before do
+ stub_feature_flags(pbkdf2_password_encryption_write: false)
+ end
+
+ it 'calls default Devise encryptor and not the PBKDF2 encryptor' do
+ expect(Devise::Encryptor).to receive(:digest).at_least(:once).and_call_original
+ expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512).not_to receive(:digest)
+
+ user.password = password
+ end
+ end
+ end
+
+ context 'when pbkdf2_password_encryption is disabled' do
+ before do
+ stub_feature_flags(pbkdf2_password_encryption: false)
+ end
+
+ it 'calls default Devise encryptor and not the PBKDF2 encryptor' do
+ expect(Devise::Encryptor).to receive(:digest).at_least(:once).and_call_original
+ expect(Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512).not_to receive(:digest)
+
+ user.password = password
+ end
+
+ it 'saves the password in BCrypt format' do
+ user.password = password
+ user.save!
+
+ expect { compare_pbkdf2_password(user, password) }.to raise_error Devise::Pbkdf2Encryptable::Encryptors::InvalidHash
+ expect(compare_bcrypt_password(user, password)).to eq(true)
+ end
+ end
+ end
+
+ describe '#password_strategy' do
+ let(:user) { create(:user, encrypted_password: encrypted_password) }
+
+ context 'with a PBKDF2+SHA512 encrypted password' do
+ let(:encrypted_password) { '$pbkdf2-sha512$20000$boHGAw0hEyI$DBA67J7zNZebyzLtLk2X9wRDbmj1LNKVGnZLYyz6PGrIDGIl45fl/BPH0y1TPZnV90A20i.fD9C3G9Bp8jzzOA' }
+
+ it 'extracts the correct strategy', :aggregate_failures do
+ expect(user.password_strategy).to eq(:pbkdf2_sha512)
+ end
+ end
+
+ context 'with a BCrypt encrypted password' do
+ let(:encrypted_password) { '$2a$10$xLTxCKOa75IU4RQGqqOrTuZOgZdJEzfSzjG6ZSEi/C31TB/yLZYpi' }
+
+ it 'extracts the correct strategy', :aggregate_failures do
+ expect(user.password_strategy).to eq(:bcrypt)
+ end
+ end
+
+ context 'with an unknown encrypted password' do
+ let(:encrypted_password) { '$pbkdf2-sha256$6400$.6UI/S.nXIk8jcbdHx3Fhg$98jZicV16ODfEsEZeYPGHU3kbrUrvUEXOPimVSQDD44' }
+
+ it 'returns unknown strategy' do
+ expect(user.password_strategy).to eq(:unknown)
+ end
+ end
end
describe '#password_expired?' do
@@ -6165,6 +6416,96 @@ RSpec.describe User do
end
end
+ describe 'Users::NamespaceCallout' do
+ describe '#dismissed_callout_for_namespace?' do
+ let_it_be(:user, refind: true) { create(:user) }
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
+
+ let(:query) do
+ { feature_name: feature_name, namespace: namespace }
+ end
+
+ def have_dismissed_callout
+ be_dismissed_callout_for_namespace(**query)
+ end
+
+ context 'when no callout dismissal record exists' do
+ it 'returns false when no ignore_dismissal_earlier_than provided' do
+ expect(user).not_to have_dismissed_callout
+ end
+ end
+
+ context 'when dismissed callout exists' do
+ before_all do
+ create(:namespace_callout,
+ user: user,
+ namespace_id: namespace.id,
+ feature_name: feature_name,
+ dismissed_at: 4.months.ago)
+ end
+
+ it 'returns true when no ignore_dismissal_earlier_than provided' do
+ expect(user).to have_dismissed_callout
+ end
+
+ it 'returns true when ignore_dismissal_earlier_than is earlier than dismissed_at' do
+ query[:ignore_dismissal_earlier_than] = 6.months.ago
+
+ expect(user).to have_dismissed_callout
+ end
+
+ it 'returns false when ignore_dismissal_earlier_than is later than dismissed_at' do
+ query[:ignore_dismissal_earlier_than] = 2.months.ago
+
+ expect(user).not_to have_dismissed_callout
+ end
+ end
+ end
+
+ describe '#find_or_initialize_namespace_callout' do
+ let_it_be(:user, refind: true) { create(:user) }
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
+
+ subject(:callout_with_source) do
+ user.find_or_initialize_namespace_callout(feature_name, namespace.id)
+ end
+
+ context 'when callout exists' do
+ let!(:callout) do
+ create(:namespace_callout, user: user, feature_name: feature_name, namespace_id: namespace.id)
+ end
+
+ it 'returns existing callout' do
+ expect(callout_with_source).to eq(callout)
+ end
+ end
+
+ context 'when callout does not exist' do
+ context 'when feature name is valid' do
+ it 'initializes a new callout' do
+ expect(callout_with_source)
+ .to be_a_new(Users::NamespaceCallout)
+ .and be_valid
+ end
+ end
+
+ context 'when feature name is not valid' do
+ let(:feature_name) { 'notvalid' }
+
+ it 'initializes a new callout' do
+ expect(callout_with_source).to be_a_new(Users::NamespaceCallout)
+ end
+
+ it 'is not valid' do
+ expect(callout_with_source).not_to be_valid
+ end
+ end
+ end
+ end
+ end
+
describe '#dismissed_callout_for_group?' do
let_it_be(:user, refind: true) { create(:user) }
let_it_be(:group) { create(:group) }
diff --git a/spec/models/users/namespace_callout_spec.rb b/spec/models/users/namespace_callout_spec.rb
new file mode 100644
index 00000000000..f8207f2abc8
--- /dev/null
+++ b/spec/models/users/namespace_callout_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::NamespaceCallout do
+ let_it_be(:user) { create_default(:user) }
+ let_it_be(:namespace) { create_default(:namespace) }
+ let_it_be(:callout) { create(:namespace_callout) }
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:namespace) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:namespace) }
+ it { is_expected.to validate_presence_of(:user) }
+ it { is_expected.to validate_presence_of(:feature_name) }
+
+ specify do
+ is_expected.to validate_uniqueness_of(:feature_name)
+ .scoped_to(:user_id, :namespace_id)
+ .ignoring_case_sensitivity
+ end
+
+ it { is_expected.to allow_value(:web_hook_disabled).for(:feature_name) }
+
+ it 'rejects invalid feature names' do
+ expect { callout.feature_name = :non_existent_feature }.to raise_error(ArgumentError)
+ end
+ end
+
+ describe '#source_feature_name' do
+ it 'provides string based off source and feature' do
+ expect(callout.source_feature_name).to eq "#{callout.feature_name}_#{callout.namespace_id}"
+ end
+ end
+end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 51970064c54..96c396f085c 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -24,6 +24,14 @@ RSpec.describe WikiPage do
container.wiki
end
+ def disable_front_matter
+ stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
+ end
+
+ def enable_front_matter_for(thing)
+ stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => thing)
+ end
+
# Use for groups of tests that do not modify their `subject`.
#
# include_context 'subject is persisted page', title: 'my title'
@@ -40,6 +48,12 @@ RSpec.describe WikiPage do
it { expect(wiki_page).to have_attributes(front_matter: {}, content: content) }
end
+ shared_examples 'a page with front-matter' do
+ let(:front_matter) { { title: 'Foo', slugs: %w[slug_a slug_b] } }
+
+ it { expect(wiki_page.front_matter).to eq(front_matter) }
+ end
+
context 'the wiki page has front matter' do
let(:content) do
<<~MD
@@ -54,13 +68,27 @@ RSpec.describe WikiPage do
MD
end
- it 'has front-matter' do
- expect(wiki_page.front_matter).to eq({ title: 'Foo', slugs: %w[slug_a slug_b] })
- end
+ it_behaves_like 'a page with front-matter'
it 'strips the front matter from the content' do
expect(wiki_page.content.strip).to eq('My actual content')
end
+
+ context 'the feature flag is off' do
+ before do
+ disable_front_matter
+ end
+
+ it_behaves_like 'a page without front-matter'
+
+ context 'but enabled for the container' do
+ before do
+ enable_front_matter_for(container)
+ end
+
+ it_behaves_like 'a page with front-matter'
+ end
+ end
end
context 'the wiki page does not have front matter' do
@@ -443,6 +471,29 @@ RSpec.describe WikiPage do
end
end
+ context 'the front-matter feature flag is not enabled' do
+ before do
+ disable_front_matter
+ end
+
+ it 'does not update the front-matter' do
+ content = subject.content
+ subject.update(front_matter: { slugs: ['x'] })
+
+ page = wiki.find_page(subject.title)
+
+ expect([subject, page]).to all(have_attributes(front_matter: be_empty, content: content))
+ end
+
+ context 'but it is enabled for the container' do
+ before do
+ enable_front_matter_for(container)
+ end
+
+ it_behaves_like 'able to update front-matter'
+ end
+ end
+
it 'updates the wiki-page front-matter and content together' do
content = 'totally new content'
subject.update(content: content, front_matter: { slugs: ['x'] })
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 5e757c11f99..f33c8e0a186 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
RSpec.describe WorkItem do
+ let_it_be(:reusable_project) { create(:project) }
+
describe 'associations' do
+ it { is_expected.to belong_to(:namespace) }
it { is_expected.to have_one(:work_item_parent).class_name('WorkItem') }
it 'has one `parent_link`' do
@@ -38,7 +41,9 @@ RSpec.describe WorkItem do
it 'returns instances of supported widgets' do
is_expected.to match_array([instance_of(WorkItems::Widgets::Description),
- instance_of(WorkItems::Widgets::Hierarchy)])
+ instance_of(WorkItems::Widgets::Hierarchy),
+ instance_of(WorkItems::Widgets::Assignees),
+ instance_of(WorkItems::Widgets::Weight)])
end
end
@@ -52,5 +57,55 @@ RSpec.describe WorkItem do
create(:work_item)
end
end
+
+ context 'work item namespace' do
+ let(:work_item) { build(:work_item, project: reusable_project) }
+
+ it 'sets the namespace_id' do
+ expect(work_item).to be_valid
+ expect(work_item.namespace).to eq(reusable_project.project_namespace)
+ end
+
+ context 'when work item is saved' do
+ it 'sets the namespace_id' do
+ work_item.save!
+ expect(work_item.reload.namespace).to eq(reusable_project.project_namespace)
+ end
+ end
+
+ context 'when existing work item is saved' do
+ let(:work_item) { create(:work_item) }
+
+ before do
+ work_item.update!(namespace_id: nil)
+ end
+
+ it 'sets the namespace id' do
+ work_item.update!(title: "#{work_item.title} and something extra")
+
+ expect(work_item.namespace).to eq(work_item.project.project_namespace)
+ end
+ end
+ end
+ end
+
+ describe 'validations' do
+ subject { work_item.valid? }
+
+ describe 'issue_type' do
+ let(:work_item) { build(:work_item, issue_type: issue_type) }
+
+ context 'when a valid type' do
+ let(:issue_type) { :issue }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'empty type' do
+ let(:issue_type) { nil }
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
end
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
index 9516baa7340..a16b15bbfc9 100644
--- a/spec/models/work_items/parent_link_spec.rb
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -9,38 +9,46 @@ RSpec.describe WorkItems::ParentLink do
end
describe 'validations' do
+ subject { build(:parent_link) }
+
it { is_expected.to validate_presence_of(:work_item) }
it { is_expected.to validate_presence_of(:work_item_parent) }
+ it { is_expected.to validate_uniqueness_of(:work_item) }
describe 'hierarchy' do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { build(:work_item, project: project) }
+ let_it_be(:incident) { build(:work_item, :incident, project: project) }
let_it_be(:task1) { build(:work_item, :task, project: project) }
let_it_be(:task2) { build(:work_item, :task, project: project) }
- it 'is valid if not-task parent has task child' do
+ it 'is valid if issue parent has task child' do
expect(build(:parent_link, work_item: task1, work_item_parent: issue)).to be_valid
end
+ it 'is valid if incident parent has task child' do
+ expect(build(:parent_link, work_item: task1, work_item_parent: incident)).to be_valid
+ end
+
it 'is not valid if child is not task' do
link = build(:parent_link, work_item: issue)
expect(link).not_to be_valid
- expect(link.errors[:work_item]).to include('Only Task can be assigned as a child in hierarchy.')
+ expect(link.errors[:work_item]).to include('only Task can be assigned as a child in hierarchy.')
end
it 'is not valid if parent is task' do
link = build(:parent_link, work_item_parent: task1)
expect(link).not_to be_valid
- expect(link.errors[:work_item_parent]).to include('Only Issue can be parent of Task.')
+ expect(link.errors[:work_item_parent]).to include('only Issue and Incident can be parent of Task.')
end
it 'is not valid if parent is in other project' do
link = build(:parent_link, work_item_parent: task1, work_item: build(:work_item))
expect(link).not_to be_valid
- expect(link.errors[:work_item_parent]).to include('Parent must be in the same project as child.')
+ expect(link.errors[:work_item_parent]).to include('parent must be in the same project as child.')
end
context 'when parent already has maximum number of links' do
@@ -54,7 +62,7 @@ RSpec.describe WorkItems::ParentLink do
link2 = build(:parent_link, work_item_parent: issue, work_item: task2)
expect(link2).not_to be_valid
- expect(link2.errors[:work_item_parent]).to include('Parent already has maximum number of children.')
+ expect(link2.errors[:work_item_parent]).to include('parent already has maximum number of children.')
end
it 'existing link is still valid' do
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index 81663d0eb41..e91617effc0 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -65,7 +65,9 @@ RSpec.describe WorkItems::Type do
it 'returns list of all possible widgets' do
is_expected.to match_array([::WorkItems::Widgets::Description,
- ::WorkItems::Widgets::Hierarchy])
+ ::WorkItems::Widgets::Hierarchy,
+ ::WorkItems::Widgets::Assignees,
+ ::WorkItems::Widgets::Weight])
end
end
diff --git a/spec/models/work_items/widgets/assignees_spec.rb b/spec/models/work_items/widgets/assignees_spec.rb
new file mode 100644
index 00000000000..a2c93c07fde
--- /dev/null
+++ b/spec/models/work_items/widgets/assignees_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Assignees do
+ let_it_be(:work_item) { create(:work_item, assignees: [create(:user)]) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:assignees) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:assignees) }
+ end
+
+ describe '#assignees' do
+ subject { described_class.new(work_item).assignees }
+
+ it { is_expected.to eq(work_item.assignees) }
+ end
+
+ describe '#allows_multiple_assignees?' do
+ subject { described_class.new(work_item).allows_multiple_assignees? }
+
+ it { is_expected.to eq(work_item.allows_multiple_assignees?) }
+ end
+end
diff --git a/spec/models/work_items/widgets/hierarchy_spec.rb b/spec/models/work_items/widgets/hierarchy_spec.rb
index 0141731529b..ab2bcfee13f 100644
--- a/spec/models/work_items/widgets/hierarchy_spec.rb
+++ b/spec/models/work_items/widgets/hierarchy_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
RSpec.describe WorkItems::Widgets::Hierarchy do
- let_it_be(:work_item) { create(:work_item) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:task) { create(:work_item, :task, project: project) }
+ let_it_be(:work_item_parent) { create(:work_item, project: project) }
describe '.type' do
subject { described_class.type }
@@ -12,41 +15,57 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#type' do
- subject { described_class.new(work_item).type }
+ subject { described_class.new(task).type }
it { is_expected.to eq(:hierarchy) }
end
describe '#parent' do
- let_it_be(:parent_link) { create(:parent_link) }
+ let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent) }
subject { described_class.new(parent_link.work_item).parent }
- it { is_expected.to eq parent_link.work_item_parent }
+ it { is_expected.to eq(parent_link.work_item_parent) }
- context 'when work_items_hierarchy flag is disabled' do
+ context 'when work_items flag is disabled' do
before do
- stub_feature_flags(work_items_hierarchy: false)
+ stub_feature_flags(work_items: false)
end
it { is_expected.to be_nil }
end
+
+ context 'when work_items flag is enabled for the parent group' do
+ before do
+ stub_feature_flags(work_items: group)
+ end
+
+ it { is_expected.to eq(parent_link.work_item_parent) }
+ end
end
describe '#children' do
- let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item) }
- let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item) }
+ let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task) }
+ let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent) }
- subject { described_class.new(work_item).children }
+ subject { described_class.new(work_item_parent).children }
- it { is_expected.to match_array([parent_link1.work_item, parent_link2.work_item]) }
+ it { is_expected.to contain_exactly(parent_link1.work_item, parent_link2.work_item) }
- context 'when work_items_hierarchy flag is disabled' do
+ context 'when work_items flag is disabled' do
before do
- stub_feature_flags(work_items_hierarchy: false)
+ stub_feature_flags(work_items: false)
end
it { is_expected.to be_empty }
end
+
+ context 'when work_items flag is enabled for the parent group' do
+ before do
+ stub_feature_flags(work_items: group)
+ end
+
+ it { is_expected.to contain_exactly(parent_link1.work_item, parent_link2.work_item) }
+ end
end
end
diff --git a/spec/models/x509_certificate_spec.rb b/spec/models/x509_certificate_spec.rb
index d3b4470d3f4..5723bd80739 100644
--- a/spec/models/x509_certificate_spec.rb
+++ b/spec/models/x509_certificate_spec.rb
@@ -73,7 +73,9 @@ RSpec.describe X509Certificate do
it 'accepts correct subject_key_identifier' do
subject_key_identifiers = [
'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
- 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD'
+ 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD',
+ '79:FB:C1:E5:6B:53:8B:0A',
+ '79:fb:c1:e5:6b:53:8b:0a'
]
subject_key_identifiers.each do |identifier|
@@ -83,7 +85,6 @@ RSpec.describe X509Certificate do
it 'rejects invalid subject_key_identifier' do
subject_key_identifiers = [
- 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:GG',
'random string',
'12321342545356434523412341245452345623453542345234523453245'
diff --git a/spec/models/x509_issuer_spec.rb b/spec/models/x509_issuer_spec.rb
index f1067cad655..3d04adf7e26 100644
--- a/spec/models/x509_issuer_spec.rb
+++ b/spec/models/x509_issuer_spec.rb
@@ -39,7 +39,9 @@ RSpec.describe X509Issuer do
it 'accepts correct subject_key_identifier' do
subject_key_identifiers = [
'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
- 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD'
+ 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD',
+ '79:FB:C1:E5:6B:53:8B:0A',
+ '79:fb:c1:e5:6b:53:8b:0a'
]
subject_key_identifiers.each do |identifier|
@@ -49,7 +51,6 @@ RSpec.describe X509Issuer do
it 'rejects invalid subject_key_identifier' do
subject_key_identifiers = [
- 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:GG',
'random string',
'12321342545356434523412341245452345623453542345234523453245'
diff --git a/spec/policies/environment_policy_spec.rb b/spec/policies/environment_policy_spec.rb
index 649b1a770c0..701fc7ac9ae 100644
--- a/spec/policies/environment_policy_spec.rb
+++ b/spec/policies/environment_policy_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe EnvironmentPolicy do
with_them do
before do
- project.add_user(user, access_level) unless access_level.nil?
+ project.add_member(user, access_level) unless access_level.nil?
end
it { expect(policy.allowed?(:stop_environment)).to be allowed? }
@@ -49,7 +49,7 @@ RSpec.describe EnvironmentPolicy do
context 'with protected branch' do
with_them do
before do
- project.add_user(user, access_level) unless access_level.nil?
+ project.add_member(user, access_level) unless access_level.nil?
create(:protected_branch, :no_one_can_push,
name: 'master', project: project)
end
@@ -86,7 +86,7 @@ RSpec.describe EnvironmentPolicy do
with_them do
before do
- project.add_user(user, access_level) unless access_level.nil?
+ project.add_member(user, access_level) unless access_level.nil?
end
it { expect(policy.allowed?(:stop_environment)).to be allowed? }
@@ -120,7 +120,7 @@ RSpec.describe EnvironmentPolicy do
with_them do
before do
- project.add_user(user, access_level) unless access_level.nil?
+ project.add_member(user, access_level) unless access_level.nil?
end
it { expect(policy).to be_disallowed :destroy_environment }
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 04d7eca6f09..da0427420e4 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe GlobalPolicy do
end
context "for an admin" do
- let(:current_user) { create(:admin) }
+ let_it_be(:current_user) { create(:admin) }
context "when the public level is restricted" do
before do
@@ -118,7 +118,7 @@ RSpec.describe GlobalPolicy do
end
context 'admin' do
- let(:current_user) { create(:user, :admin) }
+ let_it_be(:current_user) { create(:user, :admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:read_custom_attribute) }
@@ -138,7 +138,7 @@ RSpec.describe GlobalPolicy do
end
context 'admin' do
- let(:current_user) { create(:admin) }
+ let_it_be(:current_user) { create(:admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:approve_user) }
@@ -156,7 +156,7 @@ RSpec.describe GlobalPolicy do
end
context 'admin' do
- let(:current_user) { create(:admin) }
+ let_it_be(:current_user) { create(:admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:reject_user) }
@@ -174,7 +174,7 @@ RSpec.describe GlobalPolicy do
end
context 'admin' do
- let(:current_user) { create(:user, :admin) }
+ let_it_be(:current_user) { create(:user, :admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:use_project_statistics_filters) }
@@ -591,4 +591,34 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:log_in) }
end
end
+
+ describe 'delete runners' do
+ context 'when anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.not_to be_allowed(:delete_runners) }
+ end
+
+ context 'regular user' do
+ it { is_expected.not_to be_allowed(:delete_runners) }
+ end
+
+ context 'when external' do
+ let(:current_user) { build(:user, :external) }
+
+ it { is_expected.not_to be_allowed(:delete_runners) }
+ end
+
+ context 'admin user' do
+ let_it_be(:current_user) { create(:user, :admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:delete_runners) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.to be_disallowed(:delete_runners) }
+ end
+ end
+ end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index c513baea517..3ef859376a4 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe GroupPolicy do
include_context 'GroupPolicy context'
+ using RSpec::Parameterized::TableSyntax
context 'public group with no user' do
let(:group) { create(:group, :public, :crm_enabled) }
@@ -1229,4 +1230,30 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_disallowed(:admin_crm_contact) }
it { is_expected.to be_disallowed(:admin_crm_organization) }
end
+
+ describe 'maintain_namespace' do
+ context 'with non-admin roles' do
+ where(:role, :allowed) do
+ :guest | false
+ :reporter | false
+ :developer | false
+ :maintainer | true
+ :owner | true
+ end
+
+ with_them do
+ let(:current_user) { public_send(role) }
+
+ it do
+ expect(subject.allowed?(:maintain_namespace)).to eq allowed
+ end
+ end
+ end
+
+ context 'as an admin', :enable_admin_mode do
+ let(:current_user) { admin }
+
+ it { is_expected.to be_allowed(:maintain_namespace) }
+ end
+ end
end
diff --git a/spec/policies/incident_management/timeline_event_policy_spec.rb b/spec/policies/incident_management/timeline_event_policy_spec.rb
new file mode 100644
index 00000000000..5a659054d7a
--- /dev/null
+++ b/spec/policies/incident_management/timeline_event_policy_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::TimelineEventPolicy, models: true do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:user) { developer }
+ let_it_be(:incident) { create(:incident, project: project, author: user) }
+
+ let_it_be(:editable_timeline_event) do
+ create(:incident_management_timeline_event, :editable, project: project, author: user, incident: incident)
+ end
+
+ let_it_be(:non_editable_timeline_event) do
+ create(:incident_management_timeline_event, :non_editable, project: project, author: user, incident: incident)
+ end
+
+ before do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#rules' do
+ subject(:policies) { described_class.new(user, timeline_event) }
+
+ context 'when a user is not able to manage timeline events' do
+ let_it_be(:user) { reporter }
+
+ context 'when timeline event is editable' do
+ let(:timeline_event) { editable_timeline_event }
+
+ it 'does not allow to edit the timeline event' do
+ is_expected.not_to be_allowed(:edit_incident_management_timeline_event)
+ end
+ end
+ end
+
+ context 'when a user is able to manage timeline events' do
+ let_it_be(:user) { developer }
+
+ context 'when timeline event is editable' do
+ let(:timeline_event) { editable_timeline_event }
+
+ it 'allows to edit the timeline event' do
+ is_expected.to be_allowed(:edit_incident_management_timeline_event)
+ end
+ end
+
+ context 'when timeline event is not editable' do
+ let(:timeline_event) { non_editable_timeline_event }
+
+ it 'does not allow to edit the timeline event' do
+ is_expected.not_to be_allowed(:edit_incident_management_timeline_event)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 557bda985af..fefbb59a830 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe IssuePolicy do
let(:reporter_from_group_link) { create(:user) }
let(:non_member) { create(:user) }
let(:support_bot) { User.support_bot }
+ let(:alert_bot) { User.alert_bot }
def permissions(user, issue)
described_class.new(user, issue)
@@ -41,6 +42,14 @@ RSpec.describe IssuePolicy do
end
end
+ shared_examples 'alert bot' do
+ it 'allows alert_bot to read and set metadata on issues' do
+ expect(permissions(alert_bot, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(alert_bot, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(alert_bot, new_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ end
+ end
+
context 'a private project' do
let(:project) { create(:project, :private) }
let(:issue) { create(:issue, project: project, assignees: [assignee], author: author) }
@@ -106,6 +115,7 @@ RSpec.describe IssuePolicy do
expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
+ it_behaves_like 'alert bot'
it_behaves_like 'support bot with service desk disabled'
it_behaves_like 'support bot with service desk enabled'
@@ -270,6 +280,7 @@ RSpec.describe IssuePolicy do
expect(permissions(support_bot, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
+ it_behaves_like 'alert bot'
it_behaves_like 'support bot with service desk enabled'
context 'when issues are private' do
@@ -326,6 +337,7 @@ RSpec.describe IssuePolicy do
expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
+ it_behaves_like 'alert bot'
it_behaves_like 'support bot with service desk disabled'
it_behaves_like 'support bot with service desk enabled'
end
diff --git a/spec/policies/merge_request_policy_spec.rb b/spec/policies/merge_request_policy_spec.rb
index e05de25f182..dd42e1b9313 100644
--- a/spec/policies/merge_request_policy_spec.rb
+++ b/spec/policies/merge_request_policy_spec.rb
@@ -51,7 +51,8 @@ RSpec.describe MergeRequestPolicy do
end
context 'when merge request is public' do
- let(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: author) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: user) }
+ let(:user) { author }
context 'and user is anonymous' do
subject { permissions(nil, merge_request) }
@@ -61,19 +62,62 @@ RSpec.describe MergeRequestPolicy do
end
end
- describe 'the author, who became a guest' do
- subject { permissions(author, merge_request) }
+ context 'and user is author' do
+ subject { permissions(user, merge_request) }
- it do
- is_expected.to be_allowed(:update_merge_request)
+ context 'and the user is a guest' do
+ let(:user) { guest }
+
+ it do
+ is_expected.to be_allowed(:update_merge_request)
+ end
+
+ it do
+ is_expected.to be_allowed(:reopen_merge_request)
+ end
+
+ it do
+ is_expected.to be_allowed(:approve_merge_request)
+ end
end
- it do
- is_expected.to be_allowed(:reopen_merge_request)
+ context 'and the user is a group member' do
+ let(:project) { create(:project, :public, group: group) }
+ let(:group) { create(:group) }
+ let(:user) { non_team_member }
+
+ before do
+ group.add_guest(non_team_member)
+ end
+
+ it do
+ is_expected.to be_allowed(:approve_merge_request)
+ end
end
- it do
- is_expected.to be_allowed(:approve_merge_request)
+ context 'and the user is a member of a shared group' do
+ let(:user) { non_team_member }
+
+ before do
+ group = create(:group)
+ project.project_group_links.create!(
+ group: group,
+ group_access: Gitlab::Access::DEVELOPER)
+
+ group.add_guest(non_team_member)
+ end
+
+ it do
+ is_expected.to be_allowed(:approve_merge_request)
+ end
+ end
+
+ context 'and the user is not a project member' do
+ let(:user) { non_team_member }
+
+ it do
+ is_expected.not_to be_allowed(:approve_merge_request)
+ end
end
end
end
diff --git a/spec/policies/namespace/root_storage_statistics_policy_spec.rb b/spec/policies/namespace/root_storage_statistics_policy_spec.rb
index e6b58bca4a8..89875f83c9b 100644
--- a/spec/policies/namespace/root_storage_statistics_policy_spec.rb
+++ b/spec/policies/namespace/root_storage_statistics_policy_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Namespace::RootStorageStatisticsPolicy do
with_them do
before do
- group.add_user(user, user_type) unless user_type == :non_member
+ group.add_member(user, user_type) unless user_type == :non_member
end
it { is_expected.to eq(outcome) }
diff --git a/spec/policies/namespaces/user_namespace_policy_spec.rb b/spec/policies/namespaces/user_namespace_policy_spec.rb
index 22c3f6a6d67..e8a3c9b828d 100644
--- a/spec/policies/namespaces/user_namespace_policy_spec.rb
+++ b/spec/policies/namespaces/user_namespace_policy_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Namespaces::UserNamespacePolicy do
let_it_be(:admin) { create(:admin) }
let_it_be(:namespace) { create(:user_namespace, owner: owner) }
- let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package] }
+ let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package, :maintain_namespace] }
subject { described_class.new(current_user, namespace) }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index d363a822d18..c041c72a0be 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -612,6 +612,24 @@ RSpec.describe ProjectPolicy do
end
end
+ describe 'create_task' do
+ context 'when user is member of the project' do
+ let(:current_user) { developer }
+
+ context 'when work_items feature flag is enabled' do
+ it { expect_allowed(:create_task) }
+ end
+
+ context 'when work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it { expect_disallowed(:create_task) }
+ end
+ end
+ end
+
describe 'update_max_artifacts_size' do
context 'when no user' do
let(:current_user) { anonymous }
@@ -1462,43 +1480,142 @@ RSpec.describe ProjectPolicy do
end
describe 'view_package_registry_project_settings' do
- context 'with registry enabled' do
+ context 'with packages disabled and' do
before do
- stub_config(registry: { enabled: true })
+ stub_config(packages: { enabled: false })
end
- context 'with an admin user' do
- let(:current_user) { admin }
+ context 'with registry enabled' do
+ before do
+ stub_config(registry: { enabled: true })
+ end
- context 'when admin mode enabled', :enable_admin_mode do
- it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ context 'with an admin user' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ end
+
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
end
- context 'when admin mode disabled' do
- it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ %i[owner maintainer].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[developer reporter guest non_member anonymous].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
end
end
- %i[owner maintainer].each do |role|
- context "with #{role}" do
- let(:current_user) { public_send(role) }
+ context 'with registry disabled' do
+ before do
+ stub_config(registry: { enabled: false })
+ end
+
+ context 'with admin user' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
- it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[owner maintainer developer reporter guest non_member anonymous].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
end
end
+ end
- %i[developer reporter guest non_member anonymous].each do |role|
- context "with #{role}" do
- let(:current_user) { public_send(role) }
+ context 'with registry disabled and' do
+ before do
+ stub_config(registry: { enabled: false })
+ end
- it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ context 'with packages enabled' do
+ before do
+ stub_config(packages: { enabled: true })
+ end
+
+ context 'with an admin user' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ end
+
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[owner maintainer].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[developer reporter guest non_member anonymous].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+ end
+
+ context 'with packages disabled' do
+ before do
+ stub_config(packages: { enabled: false })
+ end
+
+ context 'with admin user' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[owner maintainer developer reporter guest non_member anonymous].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
end
end
end
- context 'with registry disabled' do
+ context 'with registry & packages both disabled' do
before do
stub_config(registry: { enabled: false })
+ stub_config(packages: { enabled: false })
end
context 'with admin user' do
@@ -1718,7 +1835,7 @@ RSpec.describe ProjectPolicy do
%w(guest reporter developer maintainer).each do |role|
context role do
before do
- project.add_user(current_user, role.to_sym)
+ project.add_member(current_user, role.to_sym)
end
if role == 'guest'
@@ -1752,7 +1869,7 @@ RSpec.describe ProjectPolicy do
%w(guest reporter developer maintainer).each do |role|
context role do
before do
- project.add_user(current_user, role.to_sym)
+ project.add_member(current_user, role.to_sym)
end
it { is_expected.to be_allowed(:read_ci_cd_analytics) }
@@ -1782,7 +1899,7 @@ RSpec.describe ProjectPolicy do
%w(guest reporter developer maintainer).each do |role|
context role do
before do
- project.add_user(current_user, role.to_sym)
+ project.add_member(current_user, role.to_sym)
end
if role == 'guest'
diff --git a/spec/policies/project_statistics_policy_spec.rb b/spec/policies/project_statistics_policy_spec.rb
index 74630dc38ad..56e6161a264 100644
--- a/spec/policies/project_statistics_policy_spec.rb
+++ b/spec/policies/project_statistics_policy_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe ProjectStatisticsPolicy do
before do
unless [:unauthenticated, :non_member].include?(user_type)
- project.add_user(external, user_type)
+ project.add_member(external, user_type)
end
end
diff --git a/spec/policies/work_item_policy_spec.rb b/spec/policies/work_item_policy_spec.rb
index 9cfc4455979..f8ec7d9f9bc 100644
--- a/spec/policies/work_item_policy_spec.rb
+++ b/spec/policies/work_item_policy_spec.rb
@@ -131,4 +131,33 @@ RSpec.describe WorkItemPolicy do
end
end
end
+
+ describe 'admin_parent_link' do
+ context 'when user is reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:admin_parent_link) }
+ end
+
+ context 'when user is guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:admin_parent_link) }
+
+ context 'when guest authored the work item' do
+ let(:work_item_subject) { authored_work_item }
+ let(:current_user) { guest_author }
+
+ it { is_expected.to be_disallowed(:admin_parent_link) }
+ end
+
+ context 'when guest is assigned to the work item' do
+ before do
+ work_item.assignees = [guest]
+ end
+
+ it { is_expected.to be_disallowed(:admin_parent_link) }
+ end
+ end
+ end
end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index 7b7463e6abc..498b2a32a0e 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe BlobPresenter do
end
describe '#find_file_path' do
- it { expect(presenter.find_file_path).to eq("/#{project.full_path}/-/find_file/HEAD/files/ruby/regex.rb") }
+ it { expect(presenter.find_file_path).to eq("/#{project.full_path}/-/find_file/HEAD") }
end
describe '#blame_path' do
diff --git a/spec/presenters/ci/build_presenter_spec.rb b/spec/presenters/ci/build_presenter_spec.rb
index 1ff2ea3d225..6bf36a52419 100644
--- a/spec/presenters/ci/build_presenter_spec.rb
+++ b/spec/presenters/ci/build_presenter_spec.rb
@@ -29,36 +29,6 @@ RSpec.describe Ci::BuildPresenter do
end
end
- describe '#erased_by_user?' do
- it 'takes a build and optional params' do
- expect(presenter).not_to be_erased_by_user
- end
- end
-
- describe '#erased_by_name' do
- context 'when build is not erased' do
- before do
- expect(presenter).to receive(:erased_by_user?).and_return(false)
- end
-
- it 'returns nil' do
- expect(presenter.erased_by_name).to be_nil
- end
- end
-
- context 'when build is erased' do
- before do
- expect(presenter).to receive(:erased_by_user?).and_return(true)
- expect(build).to receive(:erased_by)
- .and_return(double(:user, name: 'John Doe'))
- end
-
- it 'returns the name of the eraser' do
- expect(presenter.erased_by_name).to eq('John Doe')
- end
- end
- end
-
describe '#status_title' do
context 'when build is auto-canceled' do
before do
@@ -168,58 +138,6 @@ RSpec.describe Ci::BuildPresenter do
end
end
- describe '#tooltip_message' do
- context 'When build has failed' do
- let(:build) { create(:ci_build, :script_failure, pipeline: pipeline) }
-
- it 'returns the reason of failure' do
- tooltip = subject.tooltip_message
-
- expect(tooltip).to eq("#{build.name} - failed - (script failure)")
- end
- end
-
- context 'When build has failed and retried' do
- let(:build) { create(:ci_build, :script_failure, :retried, pipeline: pipeline) }
-
- it 'includes the reason of failure and the retried title' do
- tooltip = subject.tooltip_message
-
- expect(tooltip).to eq("#{build.name} - failed - (script failure) (retried)")
- end
- end
-
- context 'When build has failed and is allowed to' do
- let(:build) { create(:ci_build, :script_failure, :allowed_to_fail, pipeline: pipeline) }
-
- it 'includes the reason of failure' do
- tooltip = subject.tooltip_message
-
- expect(tooltip).to eq("#{build.name} - failed - (script failure) (allowed to fail)")
- end
- end
-
- context 'For any other build (no retried)' do
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
-
- it 'includes build name and status' do
- tooltip = subject.tooltip_message
-
- expect(tooltip).to eq("#{build.name} - passed")
- end
- end
-
- context 'For any other build (retried)' do
- let(:build) { create(:ci_build, :success, :retried, pipeline: pipeline) }
-
- it 'includes build name and status' do
- tooltip = subject.tooltip_message
-
- expect(tooltip).to eq("#{build.name} - passed (retried)")
- end
- end
- end
-
describe '#execute_in' do
subject { presenter.execute_in }
diff --git a/spec/presenters/ci/legacy_stage_presenter_spec.rb b/spec/presenters/ci/legacy_stage_presenter_spec.rb
deleted file mode 100644
index 5268ef0f246..00000000000
--- a/spec/presenters/ci/legacy_stage_presenter_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::LegacyStagePresenter do
- let(:legacy_stage) { create(:ci_stage) }
- let(:presenter) { described_class.new(legacy_stage) }
-
- let!(:build) { create(:ci_build, :tags, :artifacts, pipeline: legacy_stage.pipeline, stage: legacy_stage.name) }
- let!(:retried_build) { create(:ci_build, :tags, :artifacts, :retried, pipeline: legacy_stage.pipeline, stage: legacy_stage.name) }
-
- before do
- create(:generic_commit_status, pipeline: legacy_stage.pipeline, stage: legacy_stage.name)
- end
-
- describe '#latest_ordered_statuses' do
- subject(:latest_ordered_statuses) { presenter.latest_ordered_statuses }
-
- it 'preloads build tags' do
- expect(latest_ordered_statuses.second.association(:tags)).to be_loaded
- end
-
- it 'preloads build artifacts archive' do
- expect(latest_ordered_statuses.second.association(:job_artifacts_archive)).to be_loaded
- end
-
- it 'preloads build artifacts metadata' do
- expect(latest_ordered_statuses.second.association(:metadata)).to be_loaded
- end
- end
-
- describe '#retried_ordered_statuses' do
- subject(:retried_ordered_statuses) { presenter.retried_ordered_statuses }
-
- it 'preloads build tags' do
- expect(retried_ordered_statuses.first.association(:tags)).to be_loaded
- end
-
- it 'preloads build artifacts archive' do
- expect(retried_ordered_statuses.first.association(:job_artifacts_archive)).to be_loaded
- end
-
- it 'preloads build artifacts metadata' do
- expect(retried_ordered_statuses.first.association(:metadata)).to be_loaded
- end
- end
-end
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index 6570ab56ed0..7349f444fac 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -148,84 +148,4 @@ RSpec.describe Clusters::ClusterPresenter do
it_behaves_like 'cluster health data'
end
end
-
- describe '#gitlab_managed_apps_logs_path' do
- context 'user can read logs' do
- let(:project) { cluster.project }
-
- before do
- project.add_maintainer(user)
- end
-
- it 'returns path to logs' do
- expect(presenter.gitlab_managed_apps_logs_path).to eq k8s_project_logs_path(project, cluster_id: cluster.id, format: :json)
- end
-
- context 'cluster has elastic stack integration enabled' do
- before do
- create(:clusters_integrations_elastic_stack, cluster: cluster)
- end
-
- it 'returns path to logs' do
- expect(presenter.gitlab_managed_apps_logs_path).to eq elasticsearch_project_logs_path(project, cluster_id: cluster.id, format: :json)
- end
- end
- end
-
- context 'group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [group]) }
- let(:group) { create(:group, name: 'Foo') }
-
- context 'user can read logs' do
- before do
- group.add_maintainer(user)
- end
-
- context 'there are projects within group' do
- let!(:project) { create(:project, namespace: group) }
-
- it 'returns path to logs' do
- expect(presenter.gitlab_managed_apps_logs_path).to eq k8s_project_logs_path(project, cluster_id: cluster.id, format: :json)
- end
- end
-
- context 'there are no projects within group' do
- it 'returns nil' do
- expect(presenter.gitlab_managed_apps_logs_path).to be_nil
- end
- end
- end
- end
-
- context 'instance cluster' do
- let(:cluster) { create(:cluster, cluster_type: :instance_type) }
- let!(:project) { create(:project) }
- let(:user) { create(:admin) }
-
- before do
- project.add_maintainer(user)
- stub_application_setting(admin_mode: false)
- end
-
- context 'user can read logs' do
- it 'returns path to logs' do
- expect(presenter.gitlab_managed_apps_logs_path).to eq k8s_project_logs_path(project, cluster_id: cluster.id, format: :json)
- end
- end
- end
-
- context 'user can NOT read logs' do
- let(:cluster) { create(:cluster, cluster_type: :instance_type) }
- let!(:project) { create(:project) }
-
- before do
- project.add_developer(user)
- stub_application_setting(admin_mode: false)
- end
-
- it 'returns nil' do
- expect(presenter.gitlab_managed_apps_logs_path).to be_nil
- end
- end
- end
end
diff --git a/spec/presenters/gitlab/blame_presenter_spec.rb b/spec/presenters/gitlab/blame_presenter_spec.rb
index ff128416692..b3b9e133a73 100644
--- a/spec/presenters/gitlab/blame_presenter_spec.rb
+++ b/spec/presenters/gitlab/blame_presenter_spec.rb
@@ -8,8 +8,9 @@ RSpec.describe Gitlab::BlamePresenter do
let(:commit) { project.commit('master') }
let(:blob) { project.repository.blob_at(commit.id, path) }
let(:blame) { Gitlab::Blame.new(blob, commit) }
+ let(:page) { 1 }
- subject { described_class.new(blame, project: project, path: path) }
+ subject { described_class.new(blame, project: project, path: path, page: page) }
it 'precalculates necessary data on init' do
expect_any_instance_of(described_class)
diff --git a/spec/requests/admin/background_migrations_controller_spec.rb b/spec/requests/admin/background_migrations_controller_spec.rb
index 884448fdd95..fe2a2470511 100644
--- a/spec/requests/admin/background_migrations_controller_spec.rb
+++ b/spec/requests/admin/background_migrations_controller_spec.rb
@@ -97,6 +97,7 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode do
describe 'POST #retry' do
let(:migration) { create(:batched_background_migration, :failed) }
+ let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
before do
create(:batched_background_migration_job, :failed, batched_migration: migration, batch_size: 10, min_value: 6, max_value: 15, attempts: 3)
@@ -107,7 +108,8 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode do
anything,
batch_min_value: 6,
batch_size: 5,
- job_arguments: migration.job_arguments
+ job_arguments: migration.job_arguments,
+ job_class: job_class
).and_return([6, 10])
end
end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index df9be2616c5..b6cb790bb71 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -133,7 +133,7 @@ RSpec.describe API::API do
'meta.caller_id' => 'GET /api/:version/broadcast_messages',
'meta.remote_ip' => an_instance_of(String),
'meta.client_id' => a_string_matching(%r{\Aip/.+}),
- 'meta.feature_category' => 'navigation',
+ 'meta.feature_category' => 'onboarding',
'route' => '/api/:version/broadcast_messages')
expect(data.stringify_keys).not_to include('meta.project', 'meta.root_namespace', 'meta.user')
@@ -209,7 +209,7 @@ RSpec.describe API::API do
'meta.caller_id' => 'GET /api/:version/broadcast_messages',
'meta.remote_ip' => an_instance_of(String),
'meta.client_id' => a_string_matching(%r{\Aip/.+}),
- 'meta.feature_category' => 'navigation',
+ 'meta.feature_category' => 'onboarding',
'route' => '/api/:version/broadcast_messages')
expect(data.stringify_keys).not_to include('meta.project', 'meta.root_namespace', 'meta.user')
diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb
index 782e14593f7..67ddaf2fda5 100644
--- a/spec/requests/api/award_emoji_spec.rb
+++ b/spec/requests/api/award_emoji_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe API::AwardEmoji do
+ let_it_be_with_reload(:project) { create(:project, :private) }
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
let_it_be(:note) { create(:note, project: project, noteable: issue) }
@@ -16,10 +16,46 @@ RSpec.describe API::AwardEmoji do
project.add_maintainer(user)
end
+ shared_examples 'request with insufficient permissions' do |request_method|
+ let(:request_params) { {} }
+
+ context 'when user is not signed in' do
+ it 'returns 404' do
+ process request_method, api(request_path), params: request_params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user does not have access' do
+ it 'returns 404' do
+ other_user = create(:user)
+
+ process request_method, api(request_path, other_user), params: request_params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ shared_examples 'unauthenticated request to public awardable' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ it 'returns the awarded emoji' do
+ get api(request_path)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
describe "GET /projects/:id/awardable/:awardable_id/award_emoji" do
context 'on an issue' do
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/award_emoji" }
+
it "returns an array of award_emoji" do
- get api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji", user)
+ get api(request_path, user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
@@ -48,6 +84,9 @@ RSpec.describe API::AwardEmoji do
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ it_behaves_like 'unauthenticated request to public awardable'
+ it_behaves_like 'request with insufficient permissions', :get
end
context 'on a merge request' do
@@ -73,34 +112,30 @@ RSpec.describe API::AwardEmoji do
expect(json_response.first['name']).to eq(award.name)
end
end
-
- context 'when the user has no access' do
- it 'returns a status code 404' do
- user1 = create(:user)
-
- get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/award_emoji", user1)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
describe 'GET /projects/:id/awardable/:awardable_id/notes/:note_id/award_emoji' do
- let!(:rocket) { create(:award_emoji, awardable: note, name: 'rocket') }
+ let!(:rocket) { create(:award_emoji, awardable: note, name: 'rocket') }
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji" }
it 'returns an array of award emoji' do
- get api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji", user)
+ get api(request_path, user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first['name']).to eq(rocket.name)
end
+
+ it_behaves_like 'unauthenticated request to public awardable'
+ it_behaves_like 'request with insufficient permissions', :get
end
describe "GET /projects/:id/awardable/:awardable_id/award_emoji/:award_id" do
context 'on an issue' do
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/award_emoji/#{award_emoji.id}" }
+
it "returns the award emoji" do
- get api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji/#{award_emoji.id}", user)
+ get api(request_path, user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(award_emoji.name)
@@ -113,6 +148,9 @@ RSpec.describe API::AwardEmoji do
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ it_behaves_like 'unauthenticated request to public awardable'
+ it_behaves_like 'request with insufficient permissions', :get
end
context 'on a merge request' do
@@ -139,28 +177,22 @@ RSpec.describe API::AwardEmoji do
expect(json_response['awardable_type']).to eq("Snippet")
end
end
-
- context 'when the user has no access' do
- it 'returns a status code 404' do
- user1 = create(:user)
-
- get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/award_emoji/#{downvote.id}", user1)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
describe 'GET /projects/:id/awardable/:awardable_id/notes/:note_id/award_emoji/:award_id' do
- let!(:rocket) { create(:award_emoji, awardable: note, name: 'rocket') }
+ let!(:rocket) { create(:award_emoji, awardable: note, name: 'rocket') }
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji/#{rocket.id}" }
it 'returns an award emoji' do
- get api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji/#{rocket.id}", user)
+ get api(request_path, user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to be_an Array
expect(json_response['name']).to eq(rocket.name)
end
+
+ it_behaves_like 'unauthenticated request to public awardable'
+ it_behaves_like 'request with insufficient permissions', :get
end
describe "POST /projects/:id/awardable/:awardable_id/award_emoji" do
@@ -189,12 +221,6 @@ RSpec.describe API::AwardEmoji do
expect(response).to have_gitlab_http_status(:bad_request)
end
- it "returns a 401 unauthorized error if the user is not authenticated" do
- post api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji"), params: { name: 'thumbsup' }
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
-
it "normalizes +1 as thumbsup award" do
post api("/projects/#{project.id}/issues/#{issue.iid}/award_emoji", user), params: { name: '+1' }
@@ -223,6 +249,11 @@ RSpec.describe API::AwardEmoji do
expect(json_response['user']['username']).to eq(user.username)
end
end
+
+ it_behaves_like 'request with insufficient permissions', :post do
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/award_emoji" }
+ let(:request_params) { { name: 'blowfish' } }
+ end
end
describe "POST /projects/:id/awardable/:awardable_id/notes/:note_id/award_emoji" do
@@ -260,6 +291,11 @@ RSpec.describe API::AwardEmoji do
expect(json_response["message"]).to match("has already been taken")
end
end
+
+ it_behaves_like 'request with insufficient permissions', :post do
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji" }
+ let(:request_params) { { name: 'rocket' } }
+ end
end
describe 'DELETE /projects/:id/awardable/:awardable_id/award_emoji/:award_id' do
@@ -319,9 +355,13 @@ RSpec.describe API::AwardEmoji do
let(:request) { api("/projects/#{project.id}/snippets/#{snippet.id}/award_emoji/#{award.id}", user) }
end
end
+
+ it_behaves_like 'request with insufficient permissions', :delete do
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/award_emoji/#{award_emoji.id}" }
+ end
end
- describe 'DELETE /projects/:id/awardable/:awardable_id/award_emoji/:award_emoji_id' do
+ describe 'DELETE /projects/:id/awardable/:awardable_id/notes/:note_id/award_emoji/:award_id' do
let!(:rocket) { create(:award_emoji, awardable: note, name: 'rocket', user: user) }
it 'deletes the award' do
@@ -335,5 +375,9 @@ RSpec.describe API::AwardEmoji do
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji/#{rocket.id}", user) }
end
+
+ it_behaves_like 'request with insufficient permissions', :delete do
+ let(:request_path) { "/projects/#{project.id}/issues/#{issue.iid}/notes/#{note.id}/award_emoji/#{rocket.id}" }
+ end
end
end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 3c6f9ac2816..746be1ccc44 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -216,13 +216,17 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['token']).to eq(job.token)
expect(json_response['job_info']).to eq(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
- expect(json_response['image']).to eq({ 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'pull_policy' => nil })
- expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
- 'alias' => nil, 'command' => nil, 'ports' => [], 'variables' => nil },
- { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
- 'alias' => 'docker', 'command' => 'sleep 30', 'ports' => [], 'variables' => [] },
- { 'name' => 'mysql:latest', 'entrypoint' => nil,
- 'alias' => nil, 'command' => nil, 'ports' => [], 'variables' => [{ 'key' => 'MYSQL_ROOT_PASSWORD', 'value' => 'root123.' }] }])
+ expect(json_response['image']).to eq(
+ { 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'pull_policy' => nil }
+ )
+ expect(json_response['services']).to eq([
+ { 'name' => 'postgres', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
+ 'variables' => nil, 'pull_policy' => nil },
+ { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh', 'alias' => 'docker', 'command' => 'sleep 30',
+ 'ports' => [], 'variables' => [], 'pull_policy' => nil },
+ { 'name' => 'mysql:latest', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
+ 'variables' => [{ 'key' => 'MYSQL_ROOT_PASSWORD', 'value' => 'root123.' }], 'pull_policy' => nil }
+ ])
expect(json_response['steps']).to eq(expected_steps)
expect(json_response['artifacts']).to eq(expected_artifacts)
expect(json_response['cache']).to match(expected_cache)
@@ -542,7 +546,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let!(:job) { create(:ci_build, :pending, :queued, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build, :pending, :queued, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:test_job) do
- create(:ci_build, :pending, :queued, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
+ create(:ci_build, :pending, :queued, pipeline: pipeline, name: 'deploy',
stage: 'deploy', stage_idx: 1,
options: { script: ['bash'], dependencies: [job2.name] })
end
@@ -566,7 +570,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let!(:job) { create(:ci_build, :pending, :queued, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build, :pending, :queued, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:empty_dependencies_job) do
- create(:ci_build, :pending, :queued, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
+ create(:ci_build, :pending, :queued, pipeline: pipeline, name: 'empty_dependencies_job',
stage: 'deploy', stage_idx: 1,
options: { script: ['bash'], dependencies: [] })
end
@@ -671,14 +675,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
- context 'when variables are stored in trigger_request' do
- before do
- trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } )
- end
-
- it_behaves_like 'expected variables behavior'
- end
-
context 'when variables are stored in pipeline_variables' do
before do
create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
@@ -849,10 +845,51 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
+ context 'when service has pull_policy' do
+ let(:job) { create(:ci_build, :pending, :queued, pipeline: pipeline, options: options) }
+
+ let(:options) do
+ {
+ services: [{
+ name: 'postgres:11.9',
+ pull_policy: ['if-not-present']
+ }]
+ }
+ end
+
+ it 'returns the service with pull policy' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'services' => [{ 'alias' => nil, 'command' => nil, 'entrypoint' => nil, 'name' => 'postgres:11.9',
+ 'ports' => [], 'pull_policy' => ['if-not-present'], 'variables' => [] }]
+ )
+ end
+
+ context 'when the FF ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'returns the service without pull policy' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'services' => [{ 'alias' => nil, 'command' => nil, 'entrypoint' => nil, 'name' => 'postgres:11.9',
+ 'ports' => [], 'variables' => [] }]
+ )
+ end
+ end
+ end
+
describe 'a job with excluded artifacts' do
context 'when excluded paths are defined' do
let(:job) do
- create(:ci_build, :pending, :queued, pipeline: pipeline, token: 'test-job-token', name: 'test',
+ create(:ci_build, :pending, :queued, pipeline: pipeline, name: 'test',
stage: 'deploy', stage_idx: 1,
options: { artifacts: { paths: ['abc'], exclude: ['cde'] } })
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 67c2ec91540..9ef845f06bf 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -392,14 +392,25 @@ RSpec.describe API::Commits do
end
end
- context 'when using warden' do
- it 'increments usage counters', :clean_gitlab_redis_sessions do
- stub_session('warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]])
+ context 'when using warden', :snowplow, :clean_gitlab_redis_sessions do
+ before do
+ stub_session('warden.user.user.key' => [[user.id], user.authenticatable_salt])
+ end
+
+ subject { post api(url), params: valid_c_params }
+ it 'increments usage counters' do
expect(::Gitlab::UsageDataCounters::WebIdeCounter).to receive(:increment_commits_count)
expect(::Gitlab::UsageDataCounters::EditorUniqueCounter).to receive(:track_web_ide_edit_action)
- post api(url), params: valid_c_params
+ subject
+ end
+
+ it_behaves_like 'Snowplow event tracking' do
+ let(:namespace) { project.namespace }
+ let(:category) { 'ide_edit' }
+ let(:action) { 'g_edit_by_web_ide' }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
end
end
diff --git a/spec/requests/api/conan_instance_packages_spec.rb b/spec/requests/api/conan_instance_packages_spec.rb
index ff3b332c620..54cad3093d7 100644
--- a/spec/requests/api/conan_instance_packages_spec.rb
+++ b/spec/requests/api/conan_instance_packages_spec.rb
@@ -17,6 +17,12 @@ RSpec.describe API::ConanInstancePackages do
let_it_be(:url) { '/packages/conan/v1/conans/search' }
it_behaves_like 'conan search endpoint'
+
+ it_behaves_like 'conan FIPS mode' do
+ let(:params) { { q: package.conan_recipe } }
+
+ subject { get api(url), params: params }
+ end
end
describe 'GET /api/v4/packages/conan/v1/users/authenticate' do
diff --git a/spec/requests/api/conan_project_packages_spec.rb b/spec/requests/api/conan_project_packages_spec.rb
index c108f2efaaf..e28105eb8eb 100644
--- a/spec/requests/api/conan_project_packages_spec.rb
+++ b/spec/requests/api/conan_project_packages_spec.rb
@@ -17,6 +17,12 @@ RSpec.describe API::ConanProjectPackages do
let(:url) { "/projects/#{project.id}/packages/conan/v1/conans/search" }
it_behaves_like 'conan search endpoint'
+
+ it_behaves_like 'conan FIPS mode' do
+ let(:params) { { q: package.conan_recipe } }
+
+ subject { get api(url), params: params }
+ end
end
describe 'GET /api/v4/projects/:id/packages/conan/v1/users/authenticate' do
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 93f21c880a4..a35c1630caa 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -26,90 +26,7 @@ RSpec.describe API::Environments do
expect(json_response.first['tier']).to eq(environment.tier)
expect(json_response.first['external_url']).to eq(environment.external_url)
expect(json_response.first['project']).to match_schema('public_api/v4/project')
- expect(json_response.first['enable_advanced_logs_querying']).to eq(false)
expect(json_response.first).not_to have_key('last_deployment')
- expect(json_response.first).not_to have_key('gitlab_managed_apps_logs_path')
- end
-
- context 'when the user can read pod logs' do
- context 'with successful deployment on cluster' do
- let_it_be(:deployment) { create(:deployment, :on_cluster, :success, environment: environment, project: project) }
-
- it 'returns environment with enable_advanced_logs_querying and logs_api_path' do
- get api("/projects/#{project.id}/environments", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.size).to eq(1)
- expect(json_response.first['gitlab_managed_apps_logs_path']).to eq(
- "/#{project.full_path}/-/logs/k8s.json?cluster_id=#{deployment.cluster_id}"
- )
- end
- end
-
- context 'when elastic stack is available' do
- before do
- allow_next_found_instance_of(Environment) do |env|
- allow(env).to receive(:elastic_stack_available?).and_return(true)
- end
- end
-
- it 'returns environment with enable_advanced_logs_querying and logs_api_path' do
- get api("/projects/#{project.id}/environments", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.size).to eq(1)
- expect(json_response.first['enable_advanced_logs_querying']).to eq(true)
- expect(json_response.first['logs_api_path']).to eq(
- "/#{project.full_path}/-/logs/elasticsearch.json?environment_name=#{environment.name}"
- )
- end
- end
-
- context 'when elastic stack is not available' do
- before do
- allow_next_found_instance_of(Environment) do |env|
- allow(env).to receive(:elastic_stack_available?).and_return(false)
- end
- end
-
- it 'returns environment with enable_advanced_logs_querying logs_api_path' do
- get api("/projects/#{project.id}/environments", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.size).to eq(1)
- expect(json_response.first['enable_advanced_logs_querying']).to eq(false)
- expect(json_response.first['logs_api_path']).to eq(
- "/#{project.full_path}/-/logs/k8s.json?environment_name=#{environment.name}"
- )
- end
- end
- end
-
- context 'when the user cannot read pod logs' do
- before do
- allow_next_found_instance_of(User) do |user|
- allow(user).to receive(:can?).and_call_original
- allow(user).to receive(:can?).with(:read_pod_logs, project).and_return(false)
- end
- end
-
- it 'does not contain enable_advanced_logs_querying' do
- get api("/projects/#{project.id}/environments", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.size).to eq(1)
- expect(json_response.first).not_to have_key('enable_advanced_logs_querying')
- expect(json_response.first).not_to have_key('logs_api_path')
- expect(json_response.first).not_to have_key('gitlab_managed_apps_logs_path')
- end
end
context 'when filtering' do
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index 110d6e2f99e..d6c3999f22f 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe API::Events do
let(:second_note) { create(:note_on_issue, project: create(:project)) }
before do
- second_note.project.add_user(user, :developer)
+ second_note.project.add_member(user, :developer)
[second_note].each do |note|
EventCreateService.new.leave_note(note, user)
diff --git a/spec/requests/api/feature_flags_user_lists_spec.rb b/spec/requests/api/feature_flags_user_lists_spec.rb
index e2a3f92df10..bfc57042ff4 100644
--- a/spec/requests/api/feature_flags_user_lists_spec.rb
+++ b/spec/requests/api/feature_flags_user_lists_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::FeatureFlagsUserLists do
let_it_be(:project, refind: true) { create(:project) }
+ let_it_be(:client, refind: true) { create(:operations_feature_flags_client, project: project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
@@ -215,6 +216,7 @@ RSpec.describe API::FeatureFlagsUserLists do
}
expect(response).to have_gitlab_http_status(:forbidden)
+ expect(client.reload.last_feature_flag_updated_at).to be_nil
end
it 'creates the flag' do
@@ -231,6 +233,7 @@ RSpec.describe API::FeatureFlagsUserLists do
})
expect(project.operations_feature_flags_user_lists.count).to eq(1)
expect(project.operations_feature_flags_user_lists.last.name).to eq('mylist')
+ expect(client.reload.last_feature_flag_updated_at).not_to be_nil
end
it 'requires name' do
@@ -298,6 +301,7 @@ RSpec.describe API::FeatureFlagsUserLists do
}
expect(response).to have_gitlab_http_status(:forbidden)
+ expect(client.reload.last_feature_flag_updated_at).to be_nil
end
it 'updates the list' do
@@ -313,6 +317,7 @@ RSpec.describe API::FeatureFlagsUserLists do
'user_xids' => '456,789'
})
expect(list.reload.name).to eq('mylist')
+ expect(client.reload.last_feature_flag_updated_at).not_to be_nil
end
it 'preserves attributes not listed in the request' do
@@ -377,6 +382,7 @@ RSpec.describe API::FeatureFlagsUserLists do
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response).to eq({ 'message' => '404 Not found' })
+ expect(client.reload.last_feature_flag_updated_at).to be_nil
end
it 'deletes the list' do
@@ -387,6 +393,7 @@ RSpec.describe API::FeatureFlagsUserLists do
expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_blank
expect(project.operations_feature_flags_user_lists.count).to eq(0)
+ expect(client.reload.last_feature_flag_updated_at).not_to be_nil
end
it 'does not delete the list if it is associated with a strategy' do
diff --git a/spec/requests/api/geo_spec.rb b/spec/requests/api/geo_spec.rb
index edbca5eb1c6..4e77fa9405c 100644
--- a/spec/requests/api/geo_spec.rb
+++ b/spec/requests/api/geo_spec.rb
@@ -10,12 +10,24 @@ RSpec.describe API::Geo do
include_context 'workhorse headers'
+ let(:non_proxy_response_schema) do
+ {
+ 'type' => 'object',
+ 'additionalProperties' => false,
+ 'required' => %w(geo_enabled),
+ 'properties' => {
+ 'geo_enabled' => { 'type' => 'boolean' }
+ }
+ }
+ end
+
context 'with valid auth' do
it 'returns empty data' do
subject
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_empty
+ expect(json_response).to match_schema(non_proxy_response_schema)
+ expect(json_response['geo_enabled']).to be_falsey
end
end
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
index eb206465bce..39ff108a9e1 100644
--- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -96,7 +96,8 @@ RSpec.describe 'get board lists' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { }
+ include_context 'no sort argument'
+
let(:first_param) { 2 }
let(:all_records) { lists.map { |list| a_graphql_entity_for(list) } }
end
diff --git a/spec/requests/api/graphql/ci/group_variables_spec.rb b/spec/requests/api/graphql/ci/group_variables_spec.rb
new file mode 100644
index 00000000000..5ea6646ec2c
--- /dev/null
+++ b/spec/requests/api/graphql/ci/group_variables_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.group(fullPath).ciVariables' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ %(
+ query {
+ group(fullPath: "#{group.full_path}") {
+ ciVariables {
+ nodes {
+ id
+ key
+ value
+ variableType
+ protected
+ masked
+ raw
+ environmentScope
+ }
+ }
+ }
+ }
+ )
+ end
+
+ context 'when the user can administer the group' do
+ before do
+ group.add_owner(user)
+ end
+
+ it "returns the group's CI variables" do
+ variable = create(:ci_group_variable, group: group, key: 'TEST_VAR', value: 'test',
+ masked: false, protected: true, raw: true, environment_scope: 'staging')
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('group', 'ciVariables', 'nodes')).to contain_exactly({
+ 'id' => variable.to_global_id.to_s,
+ 'key' => 'TEST_VAR',
+ 'value' => 'test',
+ 'variableType' => 'ENV_VAR',
+ 'masked' => false,
+ 'protected' => true,
+ 'raw' => true,
+ 'environmentScope' => 'staging'
+ })
+ end
+ end
+
+ context 'when the user cannot administer the group' do
+ it 'returns nothing' do
+ create(:ci_group_variable, group: group, value: 'verysecret', masked: true)
+
+ group.add_developer(user)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('group', 'ciVariables')).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/instance_variables_spec.rb b/spec/requests/api/graphql/ci/instance_variables_spec.rb
new file mode 100644
index 00000000000..7acf73a4e7a
--- /dev/null
+++ b/spec/requests/api/graphql/ci/instance_variables_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.ciVariables' do
+ include GraphqlHelpers
+
+ let(:query) do
+ %(
+ query {
+ ciVariables {
+ nodes {
+ id
+ key
+ value
+ variableType
+ protected
+ masked
+ raw
+ environmentScope
+ }
+ }
+ }
+ )
+ end
+
+ context 'when the user is an admin' do
+ let_it_be(:user) { create(:admin) }
+
+ it "returns the instance's CI variables" do
+ variable = create(:ci_instance_variable, key: 'TEST_VAR', value: 'test',
+ masked: false, protected: true, raw: true)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('ciVariables', 'nodes')).to contain_exactly({
+ 'id' => variable.to_global_id.to_s,
+ 'key' => 'TEST_VAR',
+ 'value' => 'test',
+ 'variableType' => 'ENV_VAR',
+ 'masked' => false,
+ 'protected' => true,
+ 'raw' => true,
+ 'environmentScope' => nil
+ })
+ end
+ end
+
+ context 'when the user is not an admin' do
+ let_it_be(:user) { create(:user) }
+
+ it 'returns nothing' do
+ create(:ci_instance_variable, value: 'verysecret', masked: true)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('ciVariables')).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/job_spec.rb b/spec/requests/api/graphql/ci/job_spec.rb
index 2fb90dcd92b..3721155c71b 100644
--- a/spec/requests/api/graphql/ci/job_spec.rb
+++ b/spec/requests/api/graphql/ci/job_spec.rb
@@ -13,8 +13,8 @@ RSpec.describe 'Query.project(fullPath).pipelines.job(id)' do
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be(:prepare_stage) { create(:ci_stage_entity, pipeline: pipeline, project: project, name: 'prepare') }
- let_it_be(:test_stage) { create(:ci_stage_entity, pipeline: pipeline, project: project, name: 'test') }
+ let_it_be(:prepare_stage) { create(:ci_stage, pipeline: pipeline, project: project, name: 'prepare') }
+ let_it_be(:test_stage) { create(:ci_stage, pipeline: pipeline, project: project, name: 'test') }
let_it_be(:job_1) { create(:ci_build, pipeline: pipeline, stage: 'prepare', name: 'Job 1') }
let_it_be(:job_2) { create(:ci_build, pipeline: pipeline, stage: 'test', name: 'Job 2') }
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index d1737fc22ae..8c4ab13fc35 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Query.project.pipeline' do
describe '.stages.groups.jobs' do
let(:pipeline) do
pipeline = create(:ci_pipeline, project: project, user: user)
- stage = create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'first', position: 1)
+ stage = create(:ci_stage, project: project, pipeline: pipeline, name: 'first', position: 1)
create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'my test job', scheduling_type: :stage)
pipeline
@@ -84,8 +84,8 @@ RSpec.describe 'Query.project.pipeline' do
context 'when there is more than one stage and job needs' do
before do
- build_stage = create(:ci_stage_entity, position: 2, name: 'build', project: project, pipeline: pipeline)
- test_stage = create(:ci_stage_entity, position: 3, name: 'test', project: project, pipeline: pipeline)
+ build_stage = create(:ci_stage, position: 2, name: 'build', project: project, pipeline: pipeline)
+ test_stage = create(:ci_stage, position: 3, name: 'test', project: project, pipeline: pipeline)
create(:ci_build, pipeline: pipeline, name: 'docker 1 2', scheduling_type: :stage, stage: build_stage, stage_idx: build_stage.position)
create(:ci_build, pipeline: pipeline, name: 'docker 2 2', stage: build_stage, stage_idx: build_stage.position, scheduling_type: :dag)
diff --git a/spec/requests/api/graphql/ci/manual_variables_spec.rb b/spec/requests/api/graphql/ci/manual_variables_spec.rb
new file mode 100644
index 00000000000..b7aa76511a3
--- /dev/null
+++ b/spec/requests/api/graphql/ci/manual_variables_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.project(fullPath).pipelines.jobs.manualVariables' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipelines {
+ nodes {
+ jobs {
+ nodes {
+ manualVariables {
+ nodes {
+ key
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns the manual variables for the jobs' do
+ job = create(:ci_build, :manual, pipeline: pipeline)
+ create(:ci_job_variable, key: 'MANUAL_TEST_VAR', job: job)
+
+ post_graphql(query, current_user: user)
+
+ variables_data = graphql_data.dig('project', 'pipelines', 'nodes').first
+ .dig('jobs', 'nodes').flat_map { |job| job.dig('manualVariables', 'nodes') }
+ expect(variables_data.map { |var| var['key'] }).to match_array(['MANUAL_TEST_VAR'])
+ end
+
+ it 'does not fetch job variables for jobs that are not manual' do
+ job = create(:ci_build, pipeline: pipeline)
+ create(:ci_job_variable, key: 'THIS_VAR_WOULD_SHOULD_NEVER_EXIST', job: job)
+
+ post_graphql(query, current_user: user)
+
+ variables_data = graphql_data.dig('project', 'pipelines', 'nodes').first
+ .dig('jobs', 'nodes').flat_map { |job| job.dig('manualVariables', 'nodes') }
+ expect(variables_data).to be_empty
+ end
+
+ it 'does not fetch job variables for bridges' do
+ create(:ci_bridge, :manual, pipeline: pipeline)
+
+ post_graphql(query, current_user: user)
+
+ variables_data = graphql_data.dig('project', 'pipelines', 'nodes').first
+ .dig('jobs', 'nodes').flat_map { |job| job.dig('manualVariables', 'nodes') }
+ expect(variables_data).to be_empty
+ end
+
+ it 'does not produce N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/367991' do
+ second_user = create(:user)
+ project.add_maintainer(second_user)
+ job = create(:ci_build, :manual, pipeline: pipeline)
+ create(:ci_job_variable, key: 'MANUAL_TEST_VAR_1', job: job)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: user)
+ end
+
+ variables_data = graphql_data.dig('project', 'pipelines', 'nodes').first
+ .dig('jobs', 'nodes').flat_map { |job| job.dig('manualVariables', 'nodes') }
+ expect(variables_data.map { |var| var['key'] }).to match_array(['MANUAL_TEST_VAR_1'])
+
+ job = create(:ci_build, :manual, pipeline: pipeline)
+ create(:ci_job_variable, key: 'MANUAL_TEST_VAR_2', job: job)
+
+ expect do
+ post_graphql(query, current_user: second_user)
+ end.not_to exceed_query_limit(control_count)
+
+ variables_data = graphql_data.dig('project', 'pipelines', 'nodes').first
+ .dig('jobs', 'nodes').flat_map { |job| job.dig('manualVariables', 'nodes') }
+ expect(variables_data.map { |var| var['key'] }).to match_array(%w(MANUAL_TEST_VAR_1 MANUAL_TEST_VAR_2))
+ end
+end
diff --git a/spec/requests/api/graphql/ci/pipelines_spec.rb b/spec/requests/api/graphql/ci/pipelines_spec.rb
index 741af676b6d..a968e5508cb 100644
--- a/spec/requests/api/graphql/ci/pipelines_spec.rb
+++ b/spec/requests/api/graphql/ci/pipelines_spec.rb
@@ -86,8 +86,8 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
describe '.stages' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project) }
- let_it_be(:stage) { create(:ci_stage_entity, pipeline: pipeline, project: project) }
- let_it_be(:other_stage) { create(:ci_stage_entity, pipeline: pipeline, project: project, name: 'other') }
+ let_it_be(:stage) { create(:ci_stage, pipeline: pipeline, project: project) }
+ let_it_be(:other_stage) { create(:ci_stage, pipeline: pipeline, project: project, name: 'other') }
let(:first_n) { var('Int') }
let(:query_path) do
diff --git a/spec/requests/api/graphql/ci/project_variables_spec.rb b/spec/requests/api/graphql/ci/project_variables_spec.rb
new file mode 100644
index 00000000000..e61f146b24c
--- /dev/null
+++ b/spec/requests/api/graphql/ci/project_variables_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.project(fullPath).ciVariables' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ ciVariables {
+ nodes {
+ id
+ key
+ value
+ variableType
+ protected
+ masked
+ raw
+ environmentScope
+ }
+ }
+ }
+ }
+ )
+ end
+
+ context 'when the user can administer builds' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it "returns the project's CI variables" do
+ variable = create(:ci_variable, project: project, key: 'TEST_VAR', value: 'test',
+ masked: false, protected: true, raw: true, environment_scope: 'production')
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('project', 'ciVariables', 'nodes')).to contain_exactly({
+ 'id' => variable.to_global_id.to_s,
+ 'key' => 'TEST_VAR',
+ 'value' => 'test',
+ 'variableType' => 'ENV_VAR',
+ 'masked' => false,
+ 'protected' => true,
+ 'raw' => true,
+ 'environmentScope' => 'production'
+ })
+ end
+ end
+
+ context 'when the user cannot administer builds' do
+ it 'returns nothing' do
+ create(:ci_variable, project: project, value: 'verysecret', masked: true)
+
+ project.add_developer(user)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('project', 'ciVariables')).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 446d1fb1bdb..e17a83d8e47 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -424,7 +424,7 @@ RSpec.describe 'Query.runner(id)' do
let(:user) { create(:user) }
before do
- group.add_user(user, Gitlab::Access::OWNER)
+ group.add_member(user, Gitlab::Access::OWNER)
end
it_behaves_like 'retrieval with no admin url' do
diff --git a/spec/requests/api/graphql/ci/stages_spec.rb b/spec/requests/api/graphql/ci/stages_spec.rb
index 50d2cf75097..1edd6e58486 100644
--- a/spec/requests/api/graphql/ci/stages_spec.rb
+++ b/spec/requests/api/graphql/ci/stages_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Query.project.pipeline.stages' do
end
before_all do
- create(:ci_stage_entity, pipeline: pipeline, name: 'deploy')
+ create(:ci_stage, pipeline: pipeline, name: 'deploy')
create_list(:ci_build, 2, pipeline: pipeline, stage: 'deploy')
end
diff --git a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
index 847fa72522e..14c55e61a65 100644
--- a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
+++ b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe 'container repository details' do
with_them do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility.to_s.upcase, false))
- project.add_user(user, role) unless role == :anonymous
+ project.add_member(user, role) unless role == :anonymous
end
it 'return the proper response' do
diff --git a/spec/requests/api/graphql/crm/contacts_spec.rb b/spec/requests/api/graphql/crm/contacts_spec.rb
new file mode 100644
index 00000000000..7e824140894
--- /dev/null
+++ b/spec/requests/api/graphql/crm/contacts_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting CRM contacts' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
+
+ let_it_be(:contact_a) do
+ create(
+ :contact,
+ group: group,
+ first_name: "ABC",
+ last_name: "DEF",
+ email: "ghi@test.com",
+ description: "LMNO",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:contact_b) do
+ create(
+ :contact,
+ group: group,
+ first_name: "ABC",
+ last_name: "DEF",
+ email: "vwx@test.com",
+ description: "YZ",
+ state: "active"
+ )
+ end
+
+ let_it_be(:contact_c) do
+ create(
+ :contact,
+ group: group,
+ first_name: "PQR",
+ last_name: "STU",
+ email: "aaa@test.com",
+ description: "YZ",
+ state: "active"
+ )
+ end
+
+ before do
+ group.add_reporter(current_user)
+ end
+
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_argument) { {} }
+ let(:first_param) { 2 }
+ let(:all_records) { [contact_a, contact_b, contact_c] }
+ let(:data_path) { [:group, :contacts] }
+
+ def pagination_query(params)
+ graphql_query_for(
+ :group,
+ { full_path: group.full_path },
+ query_graphql_field(:contacts, params, "#{page_info} nodes { id }")
+ )
+ end
+
+ def pagination_results_data(nodes)
+ nodes.map { |item| GlobalID::Locator.locate(item['id']) }
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/current_user/groups_query_spec.rb b/spec/requests/api/graphql/current_user/groups_query_spec.rb
index 39f323b21a3..ef0f32bacf0 100644
--- a/spec/requests/api/graphql/current_user/groups_query_spec.rb
+++ b/spec/requests/api/graphql/current_user/groups_query_spec.rb
@@ -8,8 +8,9 @@ RSpec.describe 'Query current user groups' do
let_it_be(:user) { create(:user) }
let_it_be(:guest_group) { create(:group, name: 'public guest', path: 'public-guest') }
let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer') }
- let_it_be(:public_developer_group) { create(:group, :private, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') }
- let_it_be(:public_maintainer_group) { create(:group, :private, name: 'a public maintainer', path: 'a-public-maintainer') }
+ let_it_be(:public_developer_group) { create(:group, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') }
+ let_it_be(:public_maintainer_group) { create(:group, name: 'a public maintainer', path: 'a-public-maintainer') }
+ let_it_be(:public_owner_group) { create(:group, name: 'a public owner', path: 'a-public-owner') }
let(:group_arguments) { {} }
let(:current_user) { user }
@@ -29,6 +30,7 @@ RSpec.describe 'Query current user groups' do
private_maintainer_group.add_maintainer(user)
public_developer_group.add_developer(user)
public_maintainer_group.add_maintainer(user)
+ public_owner_group.add_owner(user)
end
subject { graphql_data.dig('currentUser', 'groups', 'nodes') }
@@ -52,6 +54,7 @@ RSpec.describe 'Query current user groups' do
is_expected.to match(
expected_group_hash(
public_maintainer_group,
+ public_owner_group,
private_maintainer_group,
public_developer_group,
guest_group
@@ -66,6 +69,7 @@ RSpec.describe 'Query current user groups' do
is_expected.to match(
expected_group_hash(
public_maintainer_group,
+ public_owner_group,
private_maintainer_group,
public_developer_group
)
@@ -86,6 +90,32 @@ RSpec.describe 'Query current user groups' do
end
end
+ context 'when permission_scope is TRANSFER_PROJECTS' do
+ let(:group_arguments) { { permission_scope: :TRANSFER_PROJECTS } }
+
+ specify do
+ is_expected.to match(
+ expected_group_hash(
+ public_maintainer_group,
+ public_owner_group,
+ private_maintainer_group
+ )
+ )
+ end
+
+ context 'when search is provided' do
+ let(:group_arguments) { { permission_scope: :TRANSFER_PROJECTS, search: 'owner' } }
+
+ specify do
+ is_expected.to match(
+ expected_group_hash(
+ public_owner_group
+ )
+ )
+ end
+ end
+ end
+
context 'when search is provided' do
let(:group_arguments) { { search: 'maintainer' } }
diff --git a/spec/requests/api/graphql/group/container_repositories_spec.rb b/spec/requests/api/graphql/group/container_repositories_spec.rb
index be0b866af4a..8ec321c8d7c 100644
--- a/spec/requests/api/graphql/group/container_repositories_spec.rb
+++ b/spec/requests/api/graphql/group/container_repositories_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe 'getting container repositories in a group' do
group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
- group.add_user(user, role) unless role == :anonymous
+ group.add_member(user, role) unless role == :anonymous
end
it 'return the proper response' do
diff --git a/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
index cdb21512894..daa1483e956 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe 'getting dependency proxy blobs in a group' do
with_them do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
- group.add_user(user, role) unless role == :anonymous
+ group.add_member(user, role) unless role == :anonymous
end
it 'return the proper response' do
diff --git a/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
index d21c3046c1a..cc706c3051f 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe 'getting dependency proxy settings for a group' do
with_them do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
- group.add_user(user, role) unless role == :anonymous
+ group.add_member(user, role) unless role == :anonymous
end
it 'return the proper response' do
diff --git a/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb
index 40f4b082072..3b2b04b1322 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe 'getting dependency proxy image ttl policy for a group' do
with_them do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
- group.add_user(user, role) unless role == :anonymous
+ group.add_member(user, role) unless role == :anonymous
end
it 'return the proper response' do
diff --git a/spec/requests/api/graphql/group/dependency_proxy_manifests_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_manifests_spec.rb
index c7149c100b2..37ef7089c2f 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_manifests_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_manifests_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe 'getting dependency proxy manifests in a group' do
with_them do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility.to_s.upcase, false))
- group.add_user(user, role) unless role == :anonymous
+ group.add_member(user, role) unless role == :anonymous
end
it 'return the proper response' do
@@ -125,7 +125,8 @@ RSpec.describe 'getting dependency proxy manifests in a group' do
let_it_be(:descending_manifests) { manifests.reverse.map { |manifest| global_id_of(manifest) } }
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { '' }
+ include_context 'no sort argument'
+
let(:first_param) { 2 }
let(:all_records) { descending_manifests.map(&:to_s) }
end
@@ -134,7 +135,7 @@ RSpec.describe 'getting dependency proxy manifests in a group' do
def pagination_query(params)
# remove sort since the type does not accept sorting, but be future proof
graphql_query_for('group', { 'fullPath' => group.full_path },
- query_nodes(:dependencyProxyManifests, :id, include_pagination_info: true, args: params.merge(sort: nil))
+ query_nodes(:dependencyProxyManifests, :id, include_pagination_info: true, args: params)
)
end
end
diff --git a/spec/requests/api/graphql/group/group_members_spec.rb b/spec/requests/api/graphql/group/group_members_spec.rb
index fec866486ae..1ff5b134e92 100644
--- a/spec/requests/api/graphql/group/group_members_spec.rb
+++ b/spec/requests/api/graphql/group/group_members_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe 'getting group members information' do
let_it_be(:parent_group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
- let_it_be(:user_1) { create(:user, username: 'user') }
- let_it_be(:user_2) { create(:user, username: 'test') }
+ let_it_be(:user_1) { create(:user, username: 'user', name: 'Same Name') }
+ let_it_be(:user_2) { create(:user, username: 'test', name: 'Same Name') }
before_all do
[user_1, user_2].each { |user| parent_group.add_guest(user) }
@@ -45,11 +45,44 @@ RSpec.describe 'getting group members information' do
expect_array_response(user_1, user_2)
end
- it 'returns members that match the search query' do
- fetch_members(args: { search: 'test' })
+ describe 'search argument' do
+ it 'returns members that match the search query' do
+ fetch_members(args: { search: 'test' })
- expect(graphql_errors).to be_nil
- expect_array_response(user_2)
+ expect(graphql_errors).to be_nil
+ expect_array_response(user_2)
+ end
+
+ context 'when paginating' do
+ it 'returns correct results' do
+ fetch_members(args: { search: 'Same Name', first: 1 })
+
+ expect_array_response(user_1)
+
+ next_cursor = graphql_data_at(:group, :groupMembers, :pageInfo, :endCursor)
+ fetch_members(args: { search: 'Same Name', first: 1, after: next_cursor })
+
+ expect_array_response(user_2)
+ end
+
+ context 'when the use_keyset_aware_user_search_query FF is off' do
+ before do
+ stub_feature_flags(use_keyset_aware_user_search_query: false)
+ end
+
+ it 'raises error on the 2nd page due to missing cursor data' do
+ fetch_members(args: { search: 'Same Name', first: 1 })
+
+ # user_2 because the "old" order was undeterministic (insert order), no tie-breaker column
+ expect_array_response(user_2)
+
+ next_cursor = graphql_data_at(:group, :groupMembers, :pageInfo, :endCursor)
+ fetch_members(args: { search: 'Same Name', first: 1, after: next_cursor })
+
+ expect(graphql_errors.first['message']).to include('PG::UndefinedColumn')
+ end
+ end
+ end
end
end
@@ -196,6 +229,9 @@ RSpec.describe 'getting group members information' do
}
}
}
+ pageInfo {
+ endCursor
+ }
NODE
graphql_query_for("group",
diff --git a/spec/requests/api/graphql/mutations/issues/create_spec.rb b/spec/requests/api/graphql/mutations/issues/create_spec.rb
index 3d81b456c9c..9345735afe4 100644
--- a/spec/requests/api/graphql/mutations/issues/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/create_spec.rb
@@ -53,6 +53,42 @@ RSpec.describe 'Create an issue' do
let(:mutation_class) { ::Mutations::Issues::Create }
end
+ context 'when creating an issue of type TASK' do
+ before do
+ input['type'] = 'TASK'
+ end
+
+ context 'when work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'creates an issue with the default ISSUE type' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change(Issue, :count).by(1)
+
+ created_issue = Issue.last
+
+ expect(created_issue.work_item_type.base_type).to eq('issue')
+ expect(created_issue.issue_type).to eq('issue')
+ end
+ end
+
+ context 'when work_items feature flag is enabled' do
+ it 'creates an issue with TASK type' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change(Issue, :count).by(1)
+
+ created_issue = Issue.last
+
+ expect(created_issue.work_item_type.base_type).to eq('task')
+ expect(created_issue.issue_type).to eq('task')
+ end
+ end
+ end
+
context 'when position params are provided' do
let(:existing_issue) { create(:issue, project: project, relative_position: 50) }
diff --git a/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
index 8f3ae9f26f6..a432fb17a70 100644
--- a/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
@@ -10,11 +10,12 @@ RSpec.describe 'Adding a DiffNote' do
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :repository) }
let(:diff_refs) { noteable.diff_refs }
+ let(:body) { 'Body text' }
let(:base_variables) do
{
noteable_id: GitlabSchema.id_from_object(noteable).to_s,
- body: 'Body text',
+ body: body,
position: {
paths: {
old_path: 'files/ruby/popen.rb',
@@ -65,6 +66,17 @@ RSpec.describe 'Adding a DiffNote' do
it_behaves_like 'a Note mutation when the given resource id is not for a Noteable'
end
+ context 'with /merge quick action' do
+ let(:body) { "Body text \n/merge" }
+
+ it 'merges the merge request', :sidekiq_inline do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(noteable.reload.state).to eq('merged')
+ expect(mutation_response['note']['body']).to eq('Body text')
+ end
+ end
+
it 'returns the note with the correct position' do
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index eb7e6f840fe..1a5d3620f22 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Updating a Snippet' do
include GraphqlHelpers
+ include SessionHelpers
let_it_be(:original_content) { 'Initial content' }
let_it_be(:original_description) { 'Initial description' }
@@ -162,7 +163,7 @@ RSpec.describe 'Updating a Snippet' do
end
end
- context 'when the author is a member of the project' do
+ context 'when the author is a member of the project', :snowplow do
before do
project.add_developer(current_user)
end
@@ -185,6 +186,20 @@ RSpec.describe 'Updating a Snippet' do
it_behaves_like 'has spam protection' do
let(:mutation_class) { ::Mutations::Snippets::Update }
end
+
+ context 'when not sessionless', :clean_gitlab_redis_sessions do
+ before do
+ stub_session('warden.user.user.key' => [[current_user.id], current_user.authenticatable_salt])
+ end
+
+ it_behaves_like 'Snowplow event tracking' do
+ let(:user) { current_user }
+ let(:namespace) { project.namespace }
+ let(:category) { 'ide_edit' }
+ let(:action) { 'g_edit_by_snippet_ide' }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ end
+ end
end
it_behaves_like 'when the snippet is not found'
diff --git a/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
index 8d33f8e1806..b1356bbe6fd 100644
--- a/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe "Create a work item from a task in a work item's description" do
expect(work_item.description).to eq("- [ ] #{created_work_item.to_reference}+")
expect(created_work_item.issue_type).to eq('task')
expect(created_work_item.work_item_type.base_type).to eq('task')
+ expect(created_work_item.work_item_parent).to eq(work_item)
expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
expect(mutation_response['newWorkItem']).to include('id' => created_work_item.to_global_id.to_s)
end
diff --git a/spec/requests/api/graphql/mutations/work_items/create_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
index 6abdaa2c850..911568bc39f 100644
--- a/spec/requests/api/graphql/mutations/work_items/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
@@ -63,6 +63,95 @@ RSpec.describe 'Create a work item' do
let(:mutation_class) { ::Mutations::WorkItems::Create }
end
+ context 'with hierarchy widget input' do
+ let(:widgets_response) { mutation_response['workItem']['widgets'] }
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ widgets {
+ type
+ ... on WorkItemWidgetHierarchy {
+ parent {
+ id
+ }
+ children {
+ edges {
+ node {
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ let(:mutation) { graphql_mutation(:workItemCreate, input.merge('projectPath' => project.full_path), fields) }
+
+ context 'when setting parent' do
+ let_it_be(:parent) { create(:work_item, project: project) }
+
+ let(:input) do
+ {
+ title: 'item1',
+ workItemTypeId: WorkItems::Type.default_by_type(:task).to_global_id.to_s,
+ hierarchyWidget: { 'parentId' => parent.to_global_id.to_s }
+ }
+ end
+
+ it 'updates the work item parent' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(widgets_response).to include(
+ {
+ 'children' => { 'edges' => [] },
+ 'parent' => { 'id' => parent.to_global_id.to_s },
+ 'type' => 'HIERARCHY'
+ }
+ )
+ end
+
+ context 'when parent work item type is invalid' do
+ let_it_be(:parent) { create(:work_item, :task, project: project) }
+
+ it 'returns error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['errors'])
+ .to contain_exactly(/cannot be added: only Issue and Incident can be parent of Task./)
+ expect(mutation_response['workItem']).to be_nil
+ end
+ end
+
+ context 'when parent work item is not found' do
+ let_it_be(:parent) { build_stubbed(:work_item, id: non_existing_record_id)}
+
+ it 'returns a top level error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors.first['message']).to include('No object found for `parentId')
+ end
+ end
+ end
+
+ context 'when unsupported widget input is sent' do
+ let(:input) do
+ {
+ 'title' => 'new title',
+ 'description' => 'new description',
+ 'workItemTypeId' => WorkItems::Type.default_by_type(:test_case).to_global_id.to_s,
+ 'hierarchyWidget' => {}
+ }
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['Following widget keys are not supported by Test Case type: [:hierarchy_widget]']
+ end
+ end
+
context 'when the work_items feature flag is disabled' do
before do
stub_feature_flags(work_items: false)
diff --git a/spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb b/spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb
index 05d3587d342..e576d0ee7ef 100644
--- a/spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe "Delete a task in a work item's description" do
end.to change(WorkItem, :count).by(-1).and(
change(IssueLink, :count).by(-1)
).and(
- change(work_item, :description).from("- [ ] #{task.to_reference}+").to('')
+ change(work_item, :description).from("- [ ] #{task.to_reference}+").to("- [ ] #{task.title}")
)
expect(response).to have_gitlab_http_status(:success)
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index 71b03103115..77f7b9bacef 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -11,8 +11,17 @@ RSpec.describe 'Update a work item' do
let(:work_item_event) { 'CLOSE' }
let(:input) { { 'stateEvent' => work_item_event, 'title' => 'updated title' } }
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ state
+ title
+ }
+ errors
+ FIELDS
+ end
- let(:mutation) { graphql_mutation(:workItemUpdate, input.merge('id' => work_item.to_global_id.to_s)) }
+ let(:mutation) { graphql_mutation(:workItemUpdate, input.merge('id' => work_item.to_global_id.to_s), fields) }
let(:mutation_response) { graphql_mutation_response(:work_item_update) }
@@ -62,6 +71,20 @@ RSpec.describe 'Update a work item' do
end
end
+ context 'when unsupported widget input is sent' do
+ let_it_be(:test_case) { create(:work_item_type, :default, :test_case, name: 'some_test_case_name') }
+ let_it_be(:work_item) { create(:work_item, work_item_type: test_case, project: project) }
+
+ let(:input) do
+ {
+ 'hierarchyWidget' => {}
+ }
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ["Following widget keys are not supported by some_test_case_name type: [:hierarchy_widget]"]
+ end
+
it_behaves_like 'has spam protection' do
let(:mutation_class) { ::Mutations::WorkItems::Update }
end
@@ -80,5 +103,248 @@ RSpec.describe 'Update a work item' do
expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
end
end
+
+ context 'with description widget input' do
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ description
+ widgets {
+ type
+ ... on WorkItemWidgetDescription {
+ description
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ it_behaves_like 'update work item description widget' do
+ let(:new_description) { 'updated description' }
+ let(:input) do
+ { 'descriptionWidget' => { 'description' => new_description } }
+ end
+ end
+ end
+
+ context 'with weight widget input' do
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ widgets {
+ type
+ ... on WorkItemWidgetWeight {
+ weight
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ it_behaves_like 'update work item weight widget' do
+ let(:new_weight) { 2 }
+
+ let(:input) do
+ { 'weightWidget' => { 'weight' => new_weight } }
+ end
+ end
+ end
+
+ context 'with hierarchy widget input' do
+ let(:widgets_response) { mutation_response['workItem']['widgets'] }
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ description
+ widgets {
+ type
+ ... on WorkItemWidgetHierarchy {
+ parent {
+ id
+ }
+ children {
+ edges {
+ node {
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ context 'when updating parent' do
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:valid_parent) { create(:work_item, project: project) }
+ let_it_be(:invalid_parent) { create(:work_item, :task, project: project) }
+
+ context 'when parent work item type is invalid' do
+ let(:error) { "#{work_item.to_reference} cannot be added: only Issue and Incident can be parent of Task." }
+ let(:input) do
+ { 'hierarchyWidget' => { 'parentId' => invalid_parent.to_global_id.to_s }, 'title' => 'new title' }
+ end
+
+ it 'returns response with errors' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to not_change(work_item, :work_item_parent).and(not_change(work_item, :title))
+
+ expect(mutation_response['workItem']).to be_nil
+ expect(mutation_response['errors']).to match_array([error])
+ end
+ end
+
+ context 'when parent work item has a valid type' do
+ let(:input) { { 'hierarchyWidget' => { 'parentId' => valid_parent.to_global_id.to_s } } }
+
+ it 'sets the parent for the work item' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :work_item_parent).from(nil).to(valid_parent)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(widgets_response).to include(
+ {
+ 'children' => { 'edges' => [] },
+ 'parent' => { 'id' => valid_parent.to_global_id.to_s },
+ 'type' => 'HIERARCHY'
+ }
+ )
+ end
+
+ context 'when a parent is already present' do
+ let_it_be(:existing_parent) { create(:work_item, project: project) }
+
+ before do
+ work_item.update!(work_item_parent: existing_parent)
+ end
+
+ it 'is replaced with new parent' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :work_item_parent).from(existing_parent).to(valid_parent)
+ end
+ end
+ end
+
+ context 'when parentId is null' do
+ let(:input) { { 'hierarchyWidget' => { 'parentId' => nil } } }
+
+ context 'when parent is present' do
+ before do
+ work_item.update!(work_item_parent: valid_parent)
+ end
+
+ it 'removes parent and returns success message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :work_item_parent).from(valid_parent).to(nil)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(widgets_response)
+ .to include(
+ {
+ 'children' => { 'edges' => [] },
+ 'parent' => nil,
+ 'type' => 'HIERARCHY'
+ }
+ )
+ end
+ end
+
+ context 'when parent is not present' do
+ before do
+ work_item.update!(work_item_parent: nil)
+ end
+
+ it 'does not change work item and returns success message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.not_to change(work_item, :work_item_parent)
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+ end
+
+ context 'when parent work item is not found' do
+ let(:input) { { 'hierarchyWidget' => { 'parentId' => "gid://gitlab/WorkItem/#{non_existing_record_id}" } } }
+
+ it 'returns a top level error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors.first['message']).to include('No object found for `parentId')
+ end
+ end
+ end
+
+ context 'when updating children' do
+ let_it_be(:valid_child1) { create(:work_item, :task, project: project) }
+ let_it_be(:valid_child2) { create(:work_item, :task, project: project) }
+ let_it_be(:invalid_child) { create(:work_item, project: project) }
+
+ let(:input) { { 'hierarchyWidget' => { 'childrenIds' => children_ids } } }
+ let(:error) do
+ "#{invalid_child.to_reference} cannot be added: only Task can be assigned as a child in hierarchy."
+ end
+
+ context 'when child work item type is invalid' do
+ let(:children_ids) { [invalid_child.to_global_id.to_s] }
+
+ it 'returns response with errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['workItem']).to be_nil
+ expect(mutation_response['errors']).to match_array([error])
+ end
+ end
+
+ context 'when there is a mix of existing and non existing work items' do
+ let(:children_ids) { [valid_child1.to_global_id.to_s, "gid://gitlab/WorkItem/#{non_existing_record_id}"] }
+
+ it 'returns a top level error and does not add valid work item' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.not_to change(work_item.work_item_children, :count)
+
+ expect(graphql_errors.first['message']).to include('No object found for `childrenIds')
+ end
+ end
+
+ context 'when child work item type is valid' do
+ let(:children_ids) { [valid_child1.to_global_id.to_s, valid_child2.to_global_id.to_s] }
+
+ it 'updates the work item children' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item.work_item_children, :count).by(2)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(widgets_response).to include(
+ {
+ 'children' => { 'edges' => [
+ { 'node' => { 'id' => valid_child2.to_global_id.to_s } },
+ { 'node' => { 'id' => valid_child1.to_global_id.to_s } }
+ ] },
+ 'parent' => nil,
+ 'type' => 'HIERARCHY'
+ }
+ )
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb
index 595d8fe97ed..2a5cb937a2f 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb
@@ -9,16 +9,23 @@ RSpec.describe 'Update work item widgets' do
let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
let_it_be(:work_item, refind: true) { create(:work_item, project: project) }
- let(:input) do
- {
- 'descriptionWidget' => { 'description' => 'updated description' }
+ let(:input) { { 'descriptionWidget' => { 'description' => 'updated description' } } }
+ let(:mutation_response) { graphql_mutation_response(:work_item_update_widgets) }
+ let(:mutation) do
+ graphql_mutation(:workItemUpdateWidgets, input.merge('id' => work_item.to_global_id.to_s), <<~FIELDS)
+ errors
+ workItem {
+ description
+ widgets {
+ type
+ ... on WorkItemWidgetDescription {
+ description
+ }
+ }
}
+ FIELDS
end
- let(:mutation) { graphql_mutation(:workItemUpdateWidgets, input.merge('id' => work_item.to_global_id.to_s)) }
-
- let(:mutation_response) { graphql_mutation_response(:work_item_update_widgets) }
-
context 'the user is not allowed to update a work item' do
let(:current_user) { create(:user) }
@@ -28,32 +35,8 @@ RSpec.describe 'Update work item widgets' do
context 'when user has permissions to update a work item', :aggregate_failures do
let(:current_user) { developer }
- context 'when the updated work item is not valid' do
- it 'returns validation errors without the work item' do
- errors = ActiveModel::Errors.new(work_item).tap { |e| e.add(:description, 'error message') }
-
- allow_next_found_instance_of(::WorkItem) do |instance|
- allow(instance).to receive(:valid?).and_return(false)
- allow(instance).to receive(:errors).and_return(errors)
- end
-
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(mutation_response['workItem']).to be_nil
- expect(mutation_response['errors']).to match_array(['Description error message'])
- end
- end
-
- it 'updates the work item widgets' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- work_item.reload
- end.to change(work_item, :description).from(nil).to('updated description')
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['workItem']).to include(
- 'title' => work_item.title
- )
+ it_behaves_like 'update work item description widget' do
+ let(:new_description) { 'updated description' }
end
it_behaves_like 'has spam protection' do
@@ -69,7 +52,7 @@ RSpec.describe 'Update work item widgets' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
- end.to not_change(work_item, :title)
+ end.to not_change(work_item, :description)
expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
end
diff --git a/spec/requests/api/graphql/project/container_repositories_spec.rb b/spec/requests/api/graphql/project/container_repositories_spec.rb
index bbab6012f3f..01b117a89d8 100644
--- a/spec/requests/api/graphql/project/container_repositories_spec.rb
+++ b/spec/requests/api/graphql/project/container_repositories_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe 'getting container repositories in a project' do
with_them do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility.to_s.upcase, false))
- project.add_user(user, role) unless role == :anonymous
+ project.add_member(user, role) unless role == :anonymous
end
it 'return the proper response' do
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index 69e14eace66..596e023a027 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -223,6 +223,7 @@ RSpec.describe 'getting an issue list for a project' do
end
describe 'sorting and pagination' do
+ let_it_be(:sort_project) { create(:project, :public) }
let_it_be(:data_path) { [:project, :issues] }
def pagination_query(params)
@@ -237,8 +238,38 @@ RSpec.describe 'getting an issue list for a project' do
data.map { |issue| issue['iid'].to_i }
end
+ context 'when sorting by severity' do
+ let_it_be(:severty_issue1) { create(:issue, project: sort_project) }
+ let_it_be(:severty_issue2) { create(:issue, project: sort_project) }
+ let_it_be(:severty_issue3) { create(:issue, project: sort_project) }
+ let_it_be(:severty_issue4) { create(:issue, project: sort_project) }
+ let_it_be(:severty_issue5) { create(:issue, project: sort_project) }
+
+ before(:all) do
+ create(:issuable_severity, issue: severty_issue1, severity: :unknown)
+ create(:issuable_severity, issue: severty_issue2, severity: :low)
+ create(:issuable_severity, issue: severty_issue4, severity: :critical)
+ create(:issuable_severity, issue: severty_issue5, severity: :high)
+ end
+
+ context 'when ascending' do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :SEVERITY_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { [severty_issue3.iid, severty_issue1.iid, severty_issue2.iid, severty_issue5.iid, severty_issue4.iid] }
+ end
+ end
+
+ context 'when descending' do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :SEVERITY_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { [severty_issue4.iid, severty_issue5.iid, severty_issue2.iid, severty_issue1.iid, severty_issue3.iid] }
+ end
+ end
+ end
+
context 'when sorting by due date' do
- let_it_be(:sort_project) { create(:project, :public) }
let_it_be(:due_issue1) { create(:issue, project: sort_project, due_date: 3.days.from_now) }
let_it_be(:due_issue2) { create(:issue, project: sort_project, due_date: nil) }
let_it_be(:due_issue3) { create(:issue, project: sort_project, due_date: 2.days.ago) }
@@ -263,7 +294,6 @@ RSpec.describe 'getting an issue list for a project' do
end
context 'when sorting by relative position' do
- let_it_be(:sort_project) { create(:project, :public) }
let_it_be(:relative_issue1) { create(:issue, project: sort_project, relative_position: 2000) }
let_it_be(:relative_issue2) { create(:issue, project: sort_project, relative_position: nil) }
let_it_be(:relative_issue3) { create(:issue, project: sort_project, relative_position: 1000) }
@@ -285,7 +315,6 @@ RSpec.describe 'getting an issue list for a project' do
end
context 'when sorting by priority' do
- let_it_be(:sort_project) { create(:project, :public) }
let_it_be(:on_project) { { project: sort_project } }
let_it_be(:early_milestone) { create(:milestone, **on_project, due_date: 10.days.from_now) }
let_it_be(:late_milestone) { create(:milestone, **on_project, due_date: 30.days.from_now) }
@@ -321,7 +350,6 @@ RSpec.describe 'getting an issue list for a project' do
end
context 'when sorting by label priority' do
- let_it_be(:sort_project) { create(:project, :public) }
let_it_be(:label1) { create(:label, project: sort_project, priority: 1) }
let_it_be(:label2) { create(:label, project: sort_project, priority: 5) }
let_it_be(:label3) { create(:label, project: sort_project, priority: 10) }
@@ -348,7 +376,6 @@ RSpec.describe 'getting an issue list for a project' do
end
context 'when sorting by milestone due date' do
- let_it_be(:sort_project) { create(:project, :public) }
let_it_be(:early_milestone) { create(:milestone, project: sort_project, due_date: 10.days.from_now) }
let_it_be(:late_milestone) { create(:milestone, project: sort_project, due_date: 30.days.from_now) }
let_it_be(:milestone_issue1) { create(:issue, project: sort_project) }
diff --git a/spec/requests/api/graphql/project/jobs_spec.rb b/spec/requests/api/graphql/project/jobs_spec.rb
index 1a823ede9ac..7d0eb203d60 100644
--- a/spec/requests/api/graphql/project/jobs_spec.rb
+++ b/spec/requests/api/graphql/project/jobs_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe 'Query.project.jobs' do
end
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
- build_stage = create(:ci_stage_entity, position: 1, name: 'build', project: project, pipeline: pipeline)
- test_stage = create(:ci_stage_entity, position: 2, name: 'test', project: project, pipeline: pipeline)
+ build_stage = create(:ci_stage, position: 1, name: 'build', project: project, pipeline: pipeline)
+ test_stage = create(:ci_stage, position: 2, name: 'test', project: project, pipeline: pipeline)
create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 1 2', stage: build_stage)
create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 2 2', stage: build_stage)
create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 1 2', stage: test_stage)
diff --git a/spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb b/spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb
index a025c57d4b8..33e1dbcba27 100644
--- a/spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb
+++ b/spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe 'getting the packages cleanup policy linked to a project' do
with_them do
before do
project.update!(visibility: visibility.to_s)
- project.add_user(current_user, role) unless role == :anonymous
+ project.add_member(current_user, role) unless role == :anonymous
end
it 'return the proper response' do
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index ccf97918021..08c6a2d9927 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -105,6 +105,62 @@ RSpec.describe 'getting pipeline information nested in a project' do
end
end
+ context 'when a job has been retried' do
+ let_it_be(:retried) do
+ create(:ci_build, :retried,
+ name: build_job.name,
+ pipeline: pipeline,
+ stage_idx: 0,
+ stage: build_job.stage)
+ end
+
+ let(:fields) do
+ query_graphql_field(:jobs, { retried: retried_argument },
+ query_graphql_field(:nodes, {}, all_graphql_fields_for('CiJob', max_depth: 3)))
+ end
+
+ context 'when we filter out retried jobs' do
+ let(:retried_argument) { false }
+
+ it 'contains latest jobs' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(*path, :jobs, :nodes)).to include(
+ a_graphql_entity_for(build_job, :name, :duration, :retried)
+ )
+
+ expect(graphql_data_at(*path, :jobs, :nodes)).not_to include(
+ a_graphql_entity_for(retried)
+ )
+ end
+ end
+
+ context 'when we filter to only retried jobs' do
+ let(:retried_argument) { true }
+
+ it 'contains only retried jobs' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(*path, :jobs, :nodes)).to contain_exactly(
+ a_graphql_entity_for(retried)
+ )
+ end
+ end
+
+ context 'when we pass null explicitly' do
+ let(:retried_argument) { nil }
+
+ it 'contains all jobs' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(*path, :jobs, :nodes)).to include(
+ a_graphql_entity_for(build_job),
+ a_graphql_entity_for(retried)
+ )
+ end
+ end
+ end
+
context 'when requesting only builds with certain statuses' do
let(:variables) do
{
@@ -290,8 +346,8 @@ RSpec.describe 'getting pipeline information nested in a project' do
end
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
- build_stage = create(:ci_stage_entity, position: 1, name: 'build', project: project, pipeline: pipeline)
- test_stage = create(:ci_stage_entity, position: 2, name: 'test', project: project, pipeline: pipeline)
+ build_stage = create(:ci_stage, position: 1, name: 'build', project: project, pipeline: pipeline)
+ test_stage = create(:ci_stage, position: 2, name: 'test', project: project, pipeline: pipeline)
create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 1 2', stage: build_stage)
create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 2 2', stage: build_stage)
create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 1 2', stage: test_stage)
diff --git a/spec/requests/api/graphql/project/project_members_spec.rb b/spec/requests/api/graphql/project/project_members_spec.rb
index c3281b44954..4225c3ad3e8 100644
--- a/spec/requests/api/graphql/project/project_members_spec.rb
+++ b/spec/requests/api/graphql/project/project_members_spec.rb
@@ -8,8 +8,8 @@ RSpec.describe 'getting project members information' do
let_it_be(:parent_group) { create(:group, :public) }
let_it_be(:parent_project) { create(:project, :public, group: parent_group) }
let_it_be(:user) { create(:user) }
- let_it_be(:user_1) { create(:user, username: 'user') }
- let_it_be(:user_2) { create(:user, username: 'test') }
+ let_it_be(:user_1) { create(:user, username: 'user', name: 'Same Name') }
+ let_it_be(:user_2) { create(:user, username: 'test', name: 'Same Name') }
before_all do
[user_1, user_2].each { |user| parent_group.add_guest(user) }
@@ -29,11 +29,44 @@ RSpec.describe 'getting project members information' do
expect_array_response(user_1, user_2)
end
- it 'returns members that match the search query' do
- fetch_members(project: parent_project, args: { search: 'test' })
+ describe 'search argument' do
+ it 'returns members that match the search query' do
+ fetch_members(project: parent_project, args: { search: 'test' })
- expect(graphql_errors).to be_nil
- expect_array_response(user_2)
+ expect(graphql_errors).to be_nil
+ expect_array_response(user_2)
+ end
+
+ context 'when paginating' do
+ it 'returns correct results' do
+ fetch_members(project: parent_project, args: { search: 'Same Name', first: 1 })
+
+ expect_array_response(user_1)
+
+ next_cursor = graphql_data_at(:project, :projectMembers, :pageInfo, :endCursor)
+ fetch_members(project: parent_project, args: { search: 'Same Name', first: 1, after: next_cursor })
+
+ expect_array_response(user_2)
+ end
+
+ context 'when the use_keyset_aware_user_search_query FF is off' do
+ before do
+ stub_feature_flags(use_keyset_aware_user_search_query: false)
+ end
+
+ it 'raises error on the 2nd page due to missing cursor data' do
+ fetch_members(project: parent_project, args: { search: 'Same Name', first: 1 })
+
+ # user_2 because the "old" order was undeterministic (insert order), no tie-breaker column
+ expect_array_response(user_2)
+
+ next_cursor = graphql_data_at(:project, :projectMembers, :pageInfo, :endCursor)
+ fetch_members(project: parent_project, args: { search: 'Same Name', first: 1, after: next_cursor })
+
+ expect(graphql_errors.first['message']).to include('PG::UndefinedColumn')
+ end
+ end
+ end
end
end
@@ -231,6 +264,9 @@ RSpec.describe 'getting project members information' do
}
}
}
+ pageInfo {
+ endCursor
+ }
NODE
graphql_query_for('project',
diff --git a/spec/requests/api/graphql/todo_query_spec.rb b/spec/requests/api/graphql/todo_query_spec.rb
new file mode 100644
index 00000000000..3f743f4402a
--- /dev/null
+++ b/spec/requests/api/graphql/todo_query_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Todo Query' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { nil }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ let_it_be(:todo_owner) { create(:user) }
+
+ let_it_be(:todo) { create(:todo, user: todo_owner, target: project) }
+
+ before do
+ project.add_developer(todo_owner)
+ end
+
+ let(:fields) do
+ <<~GRAPHQL
+ id
+ GRAPHQL
+ end
+
+ let(:query) do
+ graphql_query_for(:todo, { id: todo.to_global_id.to_s }, fields)
+ end
+
+ subject do
+ result = GitlabSchema.execute(query, context: { current_user: current_user }).to_h
+ graphql_dig_at(result, :data, :todo)
+ end
+
+ context 'when requesting user is todo owner' do
+ let(:current_user) { todo_owner }
+
+ it { is_expected.to include('id' => todo.to_global_id.to_s) }
+ end
+
+ context 'when requesting user is not todo owner' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when unauthenticated' do
+ it { is_expected.to be_nil }
+ end
+end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index 09bda8ee0d5..f17d2ebbb7e 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Query.work_item(id)' do
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :private) }
- let_it_be(:work_item) { create(:work_item, project: project, description: '- List item') }
+ let_it_be(:work_item) { create(:work_item, project: project, description: '- List item', weight: 1) }
let_it_be(:child_item1) { create(:work_item, :task, project: project) }
let_it_be(:child_item2) { create(:work_item, :task, confidential: true, project: project) }
let_it_be(:child_link1) { create(:parent_link, work_item_parent: work_item, work_item: child_item1) }
@@ -64,16 +64,13 @@ RSpec.describe 'Query.work_item(id)' do
it 'returns widget information' do
expect(work_item_data).to include(
'id' => work_item.to_gid.to_s,
- 'widgets' => match_array([
+ 'widgets' => include(
hash_including(
'type' => 'DESCRIPTION',
'description' => work_item.description,
'descriptionHtml' => ::MarkupHelper.markdown_field(work_item, :description, {})
- ),
- hash_including(
- 'type' => 'HIERARCHY'
)
- ])
+ )
)
end
end
@@ -101,10 +98,7 @@ RSpec.describe 'Query.work_item(id)' do
it 'returns widget information' do
expect(work_item_data).to include(
'id' => work_item.to_gid.to_s,
- 'widgets' => match_array([
- hash_including(
- 'type' => 'DESCRIPTION'
- ),
+ 'widgets' => include(
hash_including(
'type' => 'HIERARCHY',
'parent' => nil,
@@ -113,7 +107,7 @@ RSpec.describe 'Query.work_item(id)' do
hash_including('id' => child_link2.work_item.to_gid.to_s)
]) }
)
- ])
+ )
)
end
@@ -137,10 +131,7 @@ RSpec.describe 'Query.work_item(id)' do
it 'filters out not accessible children or parent' do
expect(work_item_data).to include(
'id' => work_item.to_gid.to_s,
- 'widgets' => match_array([
- hash_including(
- 'type' => 'DESCRIPTION'
- ),
+ 'widgets' => include(
hash_including(
'type' => 'HIERARCHY',
'parent' => nil,
@@ -148,7 +139,7 @@ RSpec.describe 'Query.work_item(id)' do
hash_including('id' => child_link1.work_item.to_gid.to_s)
]) }
)
- ])
+ )
)
end
end
@@ -160,20 +151,85 @@ RSpec.describe 'Query.work_item(id)' do
it 'returns parent information' do
expect(work_item_data).to include(
'id' => work_item.to_gid.to_s,
- 'widgets' => match_array([
- hash_including(
- 'type' => 'DESCRIPTION'
- ),
+ 'widgets' => include(
hash_including(
'type' => 'HIERARCHY',
'parent' => hash_including('id' => parent_link.work_item_parent.to_gid.to_s),
'children' => { 'nodes' => match_array([]) }
)
- ])
+ )
)
end
end
end
+
+ describe 'weight widget' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetWeight {
+ weight
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns widget information' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'widgets' => include(
+ hash_including(
+ 'type' => 'WEIGHT',
+ 'weight' => work_item.weight
+ )
+ )
+ )
+ end
+ end
+
+ describe 'assignees widget' do
+ let(:assignees) { create_list(:user, 2) }
+ let(:work_item) { create(:work_item, project: project, assignees: assignees) }
+
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetAssignees {
+ allowsMultipleAssignees
+ canInviteMembers
+ assignees {
+ nodes {
+ id
+ username
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns widget information' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'widgets' => include(
+ hash_including(
+ 'type' => 'ASSIGNEES',
+ 'allowsMultipleAssignees' => boolean,
+ 'canInviteMembers' => boolean,
+ 'assignees' => {
+ 'nodes' => match_array(
+ assignees.map { |a| { 'id' => a.to_gid.to_s, 'username' => a.username } }
+ )
+ }
+ )
+ )
+ )
+ end
+ end
end
context 'when an Issue Global ID is provided' do
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
index ffa313d4464..bda46f85140 100644
--- a/spec/requests/api/group_export_spec.rb
+++ b/spec/requests/api/group_export_spec.rb
@@ -32,9 +32,9 @@ RSpec.describe API::GroupExport do
context 'when export file exists' do
before do
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(0)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy).to receive(:increment).and_return(0)
+ end
upload.export_file = fixture_file_upload('spec/fixtures/group_export.tar.gz', "`/tar.gz")
upload.save!
@@ -149,9 +149,11 @@ RSpec.describe API::GroupExport do
before do
group.add_owner(user)
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:group_export][:threshold].call + 1)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy)
+ .to receive(:increment)
+ .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:group_export][:threshold].call + 1)
+ end
end
it 'throttles the endpoint' do
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index 6d5676bbe35..a7b4bea362f 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe API::GroupVariables do
let(:access_level) {}
before do
- group.add_user(user, access_level) if access_level
+ group.add_member(user, access_level) if access_level
end
describe 'GET /groups/:id/variables' do
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 56f08249bdd..3bc3cce5310 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -645,7 +645,7 @@ RSpec.describe API::Groups do
project = create(:project, namespace: group2, path: 'Foo')
create(:project_group_link, project: project, group: group1)
- get api("/groups/#{group1.id}", user1), params: { with_projects: false }
+ get api("/groups/#{group2.id}", user1), params: { with_projects: false }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['projects']).to be_nil
@@ -748,6 +748,18 @@ RSpec.describe API::Groups do
expect(json_response).to include('runners_token')
end
+ it "returns runners_token and no projects when with_projects option is set to false" do
+ project = create(:project, namespace: group2, path: 'Foo')
+ create(:project_group_link, project: project, group: group1)
+
+ get api("/groups/#{group2.id}", admin), params: { with_projects: false }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['projects']).to be_nil
+ expect(json_response['shared_projects']).to be_nil
+ expect(json_response).to include('runners_token')
+ end
+
it "does not return a non existing group" do
get api("/groups/#{non_existing_record_id}", admin)
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
index cd9a0746581..b2db7f7caef 100644
--- a/spec/requests/api/integrations_spec.rb
+++ b/spec/requests/api/integrations_spec.rb
@@ -55,25 +55,20 @@ RSpec.describe API::Integrations do
describe "PUT /projects/:id/#{endpoint}/#{integration.dasherize}" do
include_context integration
- # NOTE: Some attributes are not supported for PUT requests, even though in most cases they should be.
- # For some of them the problem is somewhere else, i.e. most chat integrations don't support the `*_channel`
- # fields but they're incorrectly included in `#fields`.
- #
+ # NOTE: Some attributes are not supported for PUT requests, even though they probably should be.
# We can fix these manually, or with a generic approach like https://gitlab.com/gitlab-org/gitlab/-/issues/348208
- let(:missing_channel_attributes) { %i[push_channel issue_channel confidential_issue_channel merge_request_channel note_channel confidential_note_channel tag_push_channel pipeline_channel wiki_page_channel] }
let(:missing_attributes) do
{
datadog: %i[archive_trace_events],
- discord: missing_channel_attributes + %i[branches_to_be_notified notify_only_broken_pipelines],
- hangouts_chat: missing_channel_attributes + %i[notify_only_broken_pipelines],
+ discord: %i[branches_to_be_notified notify_only_broken_pipelines],
+ hangouts_chat: %i[notify_only_broken_pipelines],
jira: %i[issues_enabled project_key vulnerabilities_enabled vulnerabilities_issuetype],
mattermost: %i[deployment_channel labels_to_be_notified],
- microsoft_teams: missing_channel_attributes,
mock_ci: %i[enable_ssl_verification],
prometheus: %i[manual_configuration],
slack: %i[alert_events alert_channel deployment_channel labels_to_be_notified],
- unify_circuit: missing_channel_attributes + %i[branches_to_be_notified notify_only_broken_pipelines],
- webex_teams: missing_channel_attributes + %i[branches_to_be_notified notify_only_broken_pipelines]
+ unify_circuit: %i[branches_to_be_notified notify_only_broken_pipelines],
+ webex_teams: %i[branches_to_be_notified notify_only_broken_pipelines]
}
end
@@ -368,6 +363,31 @@ RSpec.describe API::Integrations do
end
end
+ describe 'Jira integration' do
+ let(:integration_name) { 'jira' }
+ let(:params) do
+ { url: 'https://jira.example.com', username: 'username', password: 'password' }
+ end
+
+ before do
+ project.create_jira_integration(active: true, properties: params)
+ end
+
+ it 'returns the jira_issue_transition_id for get request' do
+ get api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']).to include('jira_issue_transition_id' => nil)
+ end
+
+ it 'returns the jira_issue_transition_id for put request' do
+ put api("/projects/#{project.id}/#{endpoint}/#{integration_name}", user), params: params.merge(jira_issue_transition_id: '1')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']['jira_issue_transition_id']).to eq('1')
+ end
+ end
+
describe 'Pipelines Email Integration' do
let(:integration_name) { 'pipelines-email' }
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 93e4e72f78f..acfe476a864 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -51,64 +51,6 @@ RSpec.describe API::Internal::Base do
end
end
- describe 'GET /internal/error_tracking_allowed' do
- let_it_be(:project) { create(:project) }
-
- let(:params) { { project_id: project.id, public_key: 'key' } }
-
- context 'when the secret header is missing' do
- it 'responds with unauthorized entity' do
- post api("/internal/error_tracking_allowed"), params: params
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
- end
-
- context 'when some params are missing' do
- it 'responds with unprocessable entity' do
- post api("/internal/error_tracking_allowed"), params: params.except(:public_key),
- headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- context 'when the error tracking is disabled' do
- it 'returns enabled: false' do
- create(:error_tracking_client_key, project: project, active: false)
-
- post api("/internal/error_tracking_allowed"), params: params,
- headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq({ 'enabled' => false })
- end
-
- context 'when the error tracking record does not exist' do
- it 'returns enabled: false' do
- post api("/internal/error_tracking_allowed"), params: params,
- headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq({ 'enabled' => false })
- end
- end
- end
-
- context 'when the error tracking is enabled' do
- it 'returns enabled: true' do
- client_key = create(:error_tracking_client_key, project: project, active: true)
- params[:public_key] = client_key.public_key
-
- post api("/internal/error_tracking_allowed"), params: params,
- headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq({ 'enabled' => true })
- end
- end
- end
-
describe 'GET /internal/two_factor_recovery_codes' do
let(:key_id) { key.id }
diff --git a/spec/requests/api/internal/error_tracking_spec.rb b/spec/requests/api/internal/error_tracking_spec.rb
new file mode 100644
index 00000000000..69eb54d5ed2
--- /dev/null
+++ b/spec/requests/api/internal/error_tracking_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Internal::ErrorTracking do
+ let(:secret_token) { Gitlab::CurrentSettings.error_tracking_access_token }
+ let(:headers) do
+ { ::API::Internal::ErrorTracking::GITLAB_ERROR_TRACKING_TOKEN_HEADER => Base64.encode64(secret_token) }
+ end
+
+ describe 'GET /internal/error_tracking/allowed' do
+ let_it_be(:project) { create(:project) }
+
+ let(:params) { { project_id: project.id, public_key: 'key' } }
+
+ subject(:send_request) do
+ post api('/internal/error_tracking/allowed'), params: params, headers: headers
+ end
+
+ before do
+ # Because the feature flag is disabled in specs we have to enable it explicitly.
+ stub_feature_flags(use_click_house_database_for_error_tracking: true)
+ stub_feature_flags(gitlab_error_tracking: true)
+ end
+
+ context 'when the secret header is missing' do
+ let(:headers) { {} }
+
+ it 'responds with unauthorized entity' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when some params are missing' do
+ let(:params) { { project_id: project.id } }
+
+ it 'responds with unprocessable entity' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ context 'when public_key is unknown' do
+ it 'returns enabled: false' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq('enabled' => false)
+ end
+ end
+
+ context 'when unknown project_id is unknown' do
+ it 'responds with 404 not found' do
+ params[:project_id] = non_existing_record_id
+
+ send_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the error tracking is disabled' do
+ it 'returns enabled: false' do
+ create(:error_tracking_client_key, :disabled, project: project)
+
+ send_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq('enabled' => false)
+ end
+ end
+
+ context 'when the error tracking is enabled' do
+ let_it_be(:client_key) { create(:error_tracking_client_key, project: project) }
+
+ before do
+ params[:public_key] = client_key.public_key
+
+ stub_application_setting(error_tracking_enabled: true)
+ stub_application_setting(error_tracking_api_url: 'https://localhost/error_tracking')
+ end
+
+ it 'returns enabled: true' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq('enabled' => true)
+ end
+
+ context 'when feature flags use_click_house_database_for_error_tracking or gitlab_error_tracking are disabled' do
+ before do
+ stub_feature_flags(use_click_house_database_for_error_tracking: false)
+ stub_feature_flags(gitlab_error_tracking: false)
+ end
+
+ it 'returns enabled: false' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq('enabled' => false)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 0e566dd8c0e..c0a979995c9 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -169,12 +169,12 @@ RSpec.describe API::Internal::Kubernetes do
'features' => {}
),
'gitaly_repository' => a_hash_including(
- 'default_branch' => project.default_branch_or_main,
'storage_name' => project.repository_storage,
'relative_path' => project.disk_path + '.git',
'gl_repository' => "project-#{project.id}",
'gl_project_path' => project.full_path
- )
+ ),
+ 'default_branch' => project.default_branch_or_main
)
)
end
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index 64ad5733c1b..53154aef21e 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -69,6 +69,20 @@ RSpec.describe API::Invitations do
end
end
+ context 'when invitee is already an invited member' do
+ it 'updates the member for that email' do
+ member = source.add_developer(email)
+
+ expect do
+ post invitations_url(source, maintainer),
+ params: { email: email, access_level: Member::MAINTAINER }
+
+ expect(response).to have_gitlab_http_status(:created)
+ end.to change { member.reset.access_level }.from(Member::DEVELOPER).to(Member::MAINTAINER)
+ .and not_change { source.members.invite.count }
+ end
+ end
+
it 'adds a new member by email' do
expect do
post invitations_url(source, maintainer),
@@ -320,7 +334,7 @@ RSpec.describe API::Invitations do
let(:source) { project }
end
- it 'records queries', :request_store, :use_sql_query_cache do
+ it 'does not exceed expected queries count for emails', :request_store, :use_sql_query_cache do
post invitations_url(project, maintainer), params: { email: email, access_level: Member::DEVELOPER }
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
@@ -336,7 +350,25 @@ RSpec.describe API::Invitations do
end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
end
- it 'records queries with secondary emails', :request_store, :use_sql_query_cache do
+ it 'does not exceed expected queries count for user_ids', :request_store, :use_sql_query_cache do
+ stranger2 = create(:user)
+
+ post invitations_url(project, maintainer), params: { user_id: stranger.id, access_level: Member::DEVELOPER }
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post invitations_url(project, maintainer), params: { user_id: stranger2.id, access_level: Member::DEVELOPER }
+ end
+
+ users = create_list(:user, 5)
+
+ unresolved_n_plus_ones = 136 # 54 for 1 vs 190 for 5 - currently there are 34 queries added per user
+
+ expect do
+ post invitations_url(project, maintainer), params: { user_id: users.map(&:id).join(','), access_level: Member::DEVELOPER }
+ end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
+ end
+
+ it 'does not exceed expected queries count with secondary emails', :request_store, :use_sql_query_cache do
create(:email, email: email, user: create(:user))
post invitations_url(project, maintainer), params: { email: email, access_level: Member::DEVELOPER }
@@ -365,7 +397,7 @@ RSpec.describe API::Invitations do
let(:source) { group }
end
- it 'records queries', :request_store, :use_sql_query_cache do
+ it 'does not exceed expected queries count for emails', :request_store, :use_sql_query_cache do
post invitations_url(group, maintainer), params: { email: email, access_level: Member::DEVELOPER }
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
@@ -381,7 +413,7 @@ RSpec.describe API::Invitations do
end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
end
- it 'records queries with secondary emails', :request_store, :use_sql_query_cache do
+ it 'does not exceed expected queries count for secondary emails', :request_store, :use_sql_query_cache do
create(:email, email: email, user: create(:user))
post invitations_url(group, maintainer), params: { email: email, access_level: Member::DEVELOPER }
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 480baff6eed..dd7d32f3565 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe API::Issues do
let_it_be(:milestone) { create(:milestone, title: '1.0.0', project: project) }
let_it_be(:empty_milestone) { create(:milestone, title: '2.0.0', project: project) }
+ let_it_be(:task) { create(:issue, :task, author: user, project: project) }
let_it_be(:closed_issue) do
create :closed_issue,
@@ -1151,19 +1152,6 @@ RSpec.describe API::Issues do
expected_url = expose_url(api_v4_project_issue_path(id: new_issue.project_id, issue_iid: new_issue.iid))
expect(json_response.dig('_links', 'closed_as_duplicate_of')).to eq(expected_url)
end
-
- context 'feature flag is disabled' do
- before do
- stub_feature_flags(closed_as_duplicate_of_issues_api: false)
- end
-
- it 'does not return the issue as closed_as_duplicate_of' do
- get api("/projects/#{project.id}/issues/#{issue_closed_as_dup.iid}", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.dig('_links', 'closed_as_duplicate_of')).to eq(nil)
- end
- end
end
end
end
diff --git a/spec/requests/api/markdown_snapshot_spec.rb b/spec/requests/api/markdown_snapshot_spec.rb
index 37607a4e866..1270efdfd6f 100644
--- a/spec/requests/api/markdown_snapshot_spec.rb
+++ b/spec/requests/api/markdown_snapshot_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
# See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
# for documentation on this spec.
RSpec.describe API::Markdown, 'Snapshot' do
+ # noinspection RubyMismatchedArgumentType (ignore RBS type warning: __dir__ can be nil, but 2nd argument can't be nil)
glfm_specification_dir = File.expand_path('../../../glfm_specification', __dir__)
- glfm_example_snapshots_dir = File.expand_path('../../fixtures/glfm/example_snapshots', __dir__)
- include_context 'with API::Markdown Snapshot shared context', glfm_specification_dir, glfm_example_snapshots_dir
+ include_context 'with API::Markdown Snapshot shared context', glfm_specification_dir
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index bc325aad823..ba82d2facc6 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -226,14 +226,26 @@ RSpec.describe API::MavenPackages do
end
end
+ shared_examples 'file download in FIPS mode' do
+ context 'in FIPS mode', :fips_mode do
+ it_behaves_like 'successfully returning the file'
+
+ it 'rejects the request for an md5 file' do
+ download_file(file_name: package_file.file_name + '.md5')
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+ end
+
describe 'GET /api/v4/packages/maven/*path/:file_name' do
context 'a public project' do
subject { download_file(file_name: package_file.file_name) }
shared_examples 'getting a file' do
it_behaves_like 'tracking the file download event'
-
it_behaves_like 'successfully returning the file'
+ it_behaves_like 'file download in FIPS mode'
it 'returns sha1 of the file' do
download_file(file_name: package_file.file_name + '.sha1')
@@ -402,8 +414,8 @@ RSpec.describe API::MavenPackages do
shared_examples 'getting a file for a group' do
it_behaves_like 'tracking the file download event'
-
it_behaves_like 'successfully returning the file'
+ it_behaves_like 'file download in FIPS mode'
it 'returns sha1 of the file' do
download_file(file_name: package_file.file_name + '.sha1')
@@ -625,8 +637,8 @@ RSpec.describe API::MavenPackages do
subject { download_file(file_name: package_file.file_name) }
it_behaves_like 'tracking the file download event'
-
it_behaves_like 'successfully returning the file'
+ it_behaves_like 'file download in FIPS mode'
it 'returns sha1 of the file' do
download_file(file_name: package_file.file_name + '.sha1')
@@ -833,6 +845,16 @@ RSpec.describe API::MavenPackages do
subject { upload_file_with_token(params: params) }
+ context 'FIPS mode', :fips_mode do
+ it_behaves_like 'package workhorse uploads'
+
+ it 'rejects the request for md5 file' do
+ upload_file_with_token(params: params, file_extension: 'jar.md5')
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
context 'file size is too large' do
it 'rejects the request' do
allow_next_instance_of(UploadedFile) do |uploaded_file|
@@ -995,12 +1017,22 @@ RSpec.describe API::MavenPackages do
end
context 'for md5 file' do
+ subject { upload_file_with_token(params: params, file_extension: 'jar.md5') }
+
it 'returns an empty body' do
- upload_file_with_token(params: params, file_extension: 'jar.md5')
+ subject
expect(response.body).to eq('')
expect(response).to have_gitlab_http_status(:ok)
end
+
+ context 'with FIPS mode enabled', :fips_mode do
+ it 'rejects the request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
end
end
diff --git a/spec/requests/api/metadata_spec.rb b/spec/requests/api/metadata_spec.rb
new file mode 100644
index 00000000000..dbca06b7f3e
--- /dev/null
+++ b/spec/requests/api/metadata_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Metadata do
+ shared_examples_for 'GET /metadata' do
+ context 'when unauthenticated' do
+ it 'returns authentication error' do
+ get api('/metadata')
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when authenticated as user' do
+ let(:user) { create(:user) }
+
+ it 'returns the metadata information' do
+ get api('/metadata', user)
+
+ expect_metadata
+ end
+ end
+
+ context 'when authenticated with token' do
+ let(:personal_access_token) { create(:personal_access_token, scopes: scopes) }
+
+ context 'with api scope' do
+ let(:scopes) { %i(api) }
+
+ it 'returns the metadata information' do
+ get api('/metadata', personal_access_token: personal_access_token)
+
+ expect_metadata
+ end
+
+ it 'returns "200" response on head requests' do
+ head api('/metadata', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with read_user scope' do
+ let(:scopes) { %i(read_user) }
+
+ it 'returns the metadata information' do
+ get api('/metadata', personal_access_token: personal_access_token)
+
+ expect_metadata
+ end
+
+ it 'returns "200" response on head requests' do
+ head api('/metadata', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with neither api nor read_user scope' do
+ let(:scopes) { %i(read_repository) }
+
+ it 'returns authorization error' do
+ get api('/metadata', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ def expect_metadata
+ aggregate_failures("testing response") do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/metadata')
+ end
+ end
+ end
+
+ context 'with graphql enabled' do
+ before do
+ stub_feature_flags(graphql: true)
+ end
+
+ include_examples 'GET /metadata'
+ end
+
+ context 'with graphql disabled' do
+ before do
+ stub_feature_flags(graphql: false)
+ end
+
+ include_examples 'GET /metadata'
+ end
+end
diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb
index 7c3f1890095..62809b432af 100644
--- a/spec/requests/api/npm_project_packages_spec.rb
+++ b/spec/requests/api/npm_project_packages_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe API::NpmProjectPackages do
end
describe 'GET /api/v4/projects/:id/packages/npm/*package_name/-/*file_name' do
+ let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace } }
let(:package_file) { package.package_files.first }
let(:headers) { {} }
@@ -61,18 +62,18 @@ RSpec.describe API::NpmProjectPackages do
let(:headers) { build_token_auth_header(token.token) }
it_behaves_like 'successfully downloads the file'
+ it_behaves_like 'a package tracking event', 'API::NpmPackages', 'pull_package'
end
context 'with job token' do
let(:headers) { build_token_auth_header(job.token) }
it_behaves_like 'successfully downloads the file'
+ it_behaves_like 'a package tracking event', 'API::NpmPackages', 'pull_package'
end
end
context 'a public project' do
- let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace } }
-
it_behaves_like 'successfully downloads the file'
it_behaves_like 'a package tracking event', 'API::NpmPackages', 'pull_package'
@@ -112,6 +113,15 @@ RSpec.describe API::NpmProjectPackages do
end
it_behaves_like 'a package file that requires auth'
+
+ context 'with a job token for a different user' do
+ let_it_be(:other_user) { create(:user) }
+ let_it_be_with_reload(:other_job) { create(:ci_build, :running, user: other_user) }
+
+ let(:headers) { build_token_auth_header(other_job.token) }
+
+ it_behaves_like 'successfully downloads the file'
+ end
end
end
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 35844631287..8d3622ca17d 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -100,6 +100,7 @@ ci_cd_settings:
forward_deployment_enabled: ci_forward_deployment_enabled
job_token_scope_enabled: ci_job_token_scope_enabled
separated_caches: ci_separated_caches
+ opt_in_jwt: ci_opt_in_jwt
build_import_state: # import_state
unexposed_attributes:
@@ -123,6 +124,11 @@ project_feature:
- created_at
- metrics_dashboard_access_level
- package_registry_access_level
+ - monitor_access_level
+ - infrastructure_access_level
+ - feature_flags_access_level
+ - environments_access_level
+ - releases_access_level
- project_id
- updated_at
computed_attributes:
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 8a8cd8512f8..d74fd82ca09 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -248,9 +248,10 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache do
let(:request) { get api(download_path, admin) }
before do
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:project_download_export][:threshold].call + 1)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ threshold = Gitlab::ApplicationRateLimiter.rate_limits[:project_download_export][:threshold].call
+ allow(strategy).to receive(:increment).and_return(threshold + 1)
+ end
end
it 'prevents requesting project export' do
@@ -433,9 +434,10 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache do
context 'when rate limit is exceeded across projects' do
before do
- allow(Gitlab::ApplicationRateLimiter)
- .to receive(:increment)
- .and_return(Gitlab::ApplicationRateLimiter.rate_limits[:project_export][:threshold].call + 1)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ threshold = Gitlab::ApplicationRateLimiter.rate_limits[:project_export][:threshold].call
+ allow(strategy).to receive(:increment).and_return(threshold + 1)
+ end
end
it 'prevents requesting project export' do
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index 26e0adc11b3..2d925620a91 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe API::ProjectHooks, 'ProjectHooks' do
- let(:user) { create(:user) }
- let(:user3) { create(:user) }
- let!(:project) { create(:project, creator_id: user.id, namespace: user.namespace) }
- let!(:hook) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: user.id, namespace: user.namespace) }
+ let_it_be_with_refind(:hook) do
create(:project_hook,
:all_events_enabled,
project: project,
@@ -15,232 +15,55 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
push_events_branch_filter: 'master')
end
- before do
+ before_all do
project.add_maintainer(user)
project.add_developer(user3)
end
- describe "GET /projects/:id/hooks" do
- context "authorized user" do
- it "returns project hooks" do
- get api("/projects/#{project.id}/hooks", user)
+ it_behaves_like 'web-hook API endpoints', '/projects/:id' do
+ let(:unauthorized_user) { user3 }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_an Array
- expect(response).to include_pagination_headers
- expect(json_response.count).to eq(1)
- expect(json_response.first['url']).to eq("http://example.com")
- expect(json_response.first['issues_events']).to eq(true)
- expect(json_response.first['confidential_issues_events']).to eq(true)
- expect(json_response.first['push_events']).to eq(true)
- expect(json_response.first['merge_requests_events']).to eq(true)
- expect(json_response.first['tag_push_events']).to eq(true)
- expect(json_response.first['note_events']).to eq(true)
- expect(json_response.first['confidential_note_events']).to eq(true)
- expect(json_response.first['job_events']).to eq(true)
- expect(json_response.first['pipeline_events']).to eq(true)
- expect(json_response.first['wiki_page_events']).to eq(true)
- expect(json_response.first['deployment_events']).to eq(true)
- expect(json_response.first['releases_events']).to eq(true)
- expect(json_response.first['enable_ssl_verification']).to eq(true)
- expect(json_response.first['push_events_branch_filter']).to eq('master')
- expect(json_response.first['alert_status']).to eq('executable')
- expect(json_response.first['disabled_until']).to be_nil
- end
+ def scope
+ project.hooks
end
- context "unauthorized user" do
- it "does not access project hooks" do
- get api("/projects/#{project.id}/hooks", user3)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- describe "GET /projects/:id/hooks/:hook_id" do
- context "authorized user" do
- it "returns a project hook" do
- get api("/projects/#{project.id}/hooks/#{hook.id}", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['url']).to eq(hook.url)
- expect(json_response['issues_events']).to eq(hook.issues_events)
- expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events)
- expect(json_response['push_events']).to eq(hook.push_events)
- expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events)
- expect(json_response['tag_push_events']).to eq(hook.tag_push_events)
- expect(json_response['note_events']).to eq(hook.note_events)
- expect(json_response['confidential_note_events']).to eq(hook.confidential_note_events)
- expect(json_response['job_events']).to eq(hook.job_events)
- expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
- expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
- expect(json_response['releases_events']).to eq(hook.releases_events)
- expect(json_response['deployment_events']).to eq(true)
- expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
- expect(json_response['alert_status']).to eq(hook.alert_status.to_s)
- expect(json_response['disabled_until']).to be_nil
- end
-
- it "returns a 404 error if hook id is not available" do
- get api("/projects/#{project.id}/hooks/#{non_existing_record_id}", user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context "unauthorized user" do
- it "does not access an existing hook" do
- get api("/projects/#{project.id}/hooks/#{hook.id}", user3)
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- describe "POST /projects/:id/hooks" do
- it "adds hook to project" do
- expect do
- post(api("/projects/#{project.id}/hooks", user),
- params: { url: "http://example.com", issues_events: true,
- confidential_issues_events: true, wiki_page_events: true,
- job_events: true, deployment_events: true, releases_events: true,
- push_events_branch_filter: 'some-feature-branch' })
- end.to change {project.hooks.count}.by(1)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['url']).to eq('http://example.com')
- expect(json_response['issues_events']).to eq(true)
- expect(json_response['confidential_issues_events']).to eq(true)
- expect(json_response['push_events']).to eq(true)
- expect(json_response['merge_requests_events']).to eq(false)
- expect(json_response['tag_push_events']).to eq(false)
- expect(json_response['note_events']).to eq(false)
- expect(json_response['confidential_note_events']).to eq(nil)
- expect(json_response['job_events']).to eq(true)
- expect(json_response['pipeline_events']).to eq(false)
- expect(json_response['wiki_page_events']).to eq(true)
- expect(json_response['deployment_events']).to eq(true)
- expect(json_response['releases_events']).to eq(true)
- expect(json_response['enable_ssl_verification']).to eq(true)
- expect(json_response['push_events_branch_filter']).to eq('some-feature-branch')
- expect(json_response).not_to include('token')
+ def collection_uri
+ "/projects/#{project.id}/hooks"
end
- it "adds the token without including it in the response" do
- token = "secret token"
-
- expect do
- post api("/projects/#{project.id}/hooks", user), params: { url: "http://example.com", token: token }
- end.to change {project.hooks.count}.by(1)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response["url"]).to eq("http://example.com")
- expect(json_response).not_to include("token")
-
- hook = project.hooks.find(json_response["id"])
-
- expect(hook.url).to eq("http://example.com")
- expect(hook.token).to eq(token)
+ def match_collection_schema
+ match_response_schema('public_api/v4/project_hooks')
end
- it "returns a 400 error if url not given" do
- post api("/projects/#{project.id}/hooks", user)
- expect(response).to have_gitlab_http_status(:bad_request)
+ def hook_uri(hook_id = hook.id)
+ "/projects/#{project.id}/hooks/#{hook_id}"
end
- it "returns a 422 error if url not valid" do
- post api("/projects/#{project.id}/hooks", user), params: { url: "ftp://example.com" }
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ def match_hook_schema
+ match_response_schema('public_api/v4/project_hook')
end
- it "returns a 422 error if branch filter is not valid" do
- post api("/projects/#{project.id}/hooks", user), params: { url: "http://example.com", push_events_branch_filter: '~badbranchname/' }
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ def event_names
+ %i[
+ push_events
+ tag_push_events
+ merge_requests_events
+ issues_events
+ confidential_issues_events
+ note_events
+ confidential_note_events
+ pipeline_events
+ wiki_page_events
+ job_events
+ deployment_events
+ releases_events
+ ]
end
- end
-
- describe "PUT /projects/:id/hooks/:hook_id" do
- it "updates an existing project hook" do
- put api("/projects/#{project.id}/hooks/#{hook.id}", user),
- params: { url: 'http://example.org', push_events: false, job_events: true }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['url']).to eq('http://example.org')
- expect(json_response['issues_events']).to eq(hook.issues_events)
- expect(json_response['confidential_issues_events']).to eq(hook.confidential_issues_events)
- expect(json_response['push_events']).to eq(false)
- expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events)
- expect(json_response['tag_push_events']).to eq(hook.tag_push_events)
- expect(json_response['note_events']).to eq(hook.note_events)
- expect(json_response['confidential_note_events']).to eq(hook.confidential_note_events)
- expect(json_response['job_events']).to eq(hook.job_events)
- expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
- expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
- expect(json_response['releases_events']).to eq(hook.releases_events)
- expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
+ let(:default_values) do
+ { push_events: true, confidential_note_events: nil }
end
- it "adds the token without including it in the response" do
- token = "secret token"
-
- put api("/projects/#{project.id}/hooks/#{hook.id}", user), params: { url: "http://example.org", token: token }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["url"]).to eq("http://example.org")
- expect(json_response).not_to include("token")
-
- expect(hook.reload.url).to eq("http://example.org")
- expect(hook.reload.token).to eq(token)
- end
-
- it "returns 404 error if hook id not found" do
- put api("/projects/#{project.id}/hooks/#{non_existing_record_id}", user), params: { url: 'http://example.org' }
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it "returns 400 error if url is not given" do
- put api("/projects/#{project.id}/hooks/#{hook.id}", user)
- expect(response).to have_gitlab_http_status(:bad_request)
- end
-
- it "returns a 422 error if url is not valid" do
- put api("/projects/#{project.id}/hooks/#{hook.id}", user), params: { url: 'ftp://example.com' }
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- describe "DELETE /projects/:id/hooks/:hook_id" do
- it "deletes hook from project" do
- expect do
- delete api("/projects/#{project.id}/hooks/#{hook.id}", user)
-
- expect(response).to have_gitlab_http_status(:no_content)
- end.to change {project.hooks.count}.by(-1)
- end
-
- it "returns a 404 error when deleting non existent hook" do
- delete api("/projects/#{project.id}/hooks/42", user)
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it "returns a 404 error if hook id not given" do
- delete api("/projects/#{project.id}/hooks", user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it "returns a 404 if a user attempts to delete project hooks they do not own" do
- test_user = create(:user)
- other_project = create(:project)
- other_project.add_maintainer(test_user)
-
- delete api("/projects/#{other_project.id}/hooks/#{hook.id}", test_user)
- expect(response).to have_gitlab_http_status(:not_found)
- expect(WebHook.exists?(hook.id)).to be_truthy
- end
-
- it_behaves_like '412 response' do
- let(:request) { api("/projects/#{project.id}/hooks/#{hook.id}", user) }
- end
+ it_behaves_like 'web-hook API endpoints with branch-filter', '/projects/:id'
end
end
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 7e6d80c047c..8655e5b0238 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -462,6 +462,16 @@ RSpec.describe API::ProjectImport, :aggregate_failures do
expect(json_response).to include('import_status' => 'failed',
'import_error' => 'error')
end
+
+ it 'returns the import status if canceled' do
+ project = create(:project, :import_canceled)
+ project.add_maintainer(user)
+
+ get api("/projects/#{project.id}/import", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to include('import_status' => 'canceled')
+ end
end
describe 'POST /projects/import/authorize' do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 431d2e56cb5..ae689d7327b 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -328,6 +328,45 @@ RSpec.describe API::Projects do
end
end
+ context 'filter by topic_id' do
+ let_it_be(:topic1) { create(:topic) }
+ let_it_be(:topic2) { create(:topic) }
+
+ let(:current_user) { user }
+
+ before do
+ project.topics << topic1
+ end
+
+ context 'with id of assigned topic' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { topic_id: topic1.id } }
+ let(:projects) { [project] }
+ end
+ end
+
+ context 'with id of unassigned topic' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { topic_id: topic2.id } }
+ let(:projects) { [] }
+ end
+ end
+
+ context 'with non-existing topic id' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { topic_id: non_existing_record_id } }
+ let(:projects) { [] }
+ end
+ end
+
+ context 'with empty topic id' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { topic_id: '' } }
+ let(:projects) { user_projects }
+ end
+ end
+ end
+
context 'and with_issues_enabled=true' do
it 'only returns projects with issues enabled' do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
@@ -2388,6 +2427,7 @@ RSpec.describe API::Projects do
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
expect(json_response['ci_forward_deployment_enabled']).to eq(project.ci_forward_deployment_enabled)
+ expect(json_response['ci_separated_caches']).to eq(project.ci_separated_caches)
expect(json_response['merge_method']).to eq(project.merge_method.to_s)
expect(json_response['squash_option']).to eq(project.squash_option.to_s)
expect(json_response['readme_url']).to eq(project.readme_url)
@@ -3199,7 +3239,7 @@ RSpec.describe API::Projects do
measure_project.add_developer(create(:user))
measure_project.add_developer(create(:user)) # make this 2nd one to find any n+1
- unresolved_n_plus_ones = 21 # 21 queries added per member
+ unresolved_n_plus_ones = 27 # 27 queries added per member
expect do
post api("/projects/#{project.id}/import_project_members/#{measure_project.id}", user)
@@ -3652,6 +3692,7 @@ RSpec.describe API::Projects do
merge_method: 'ff',
ci_default_git_depth: 20,
ci_forward_deployment_enabled: false,
+ ci_separated_caches: false,
description: 'new description' }
put api("/projects/#{project3.id}", user4), params: project_param
diff --git a/spec/requests/api/protected_tags_spec.rb b/spec/requests/api/protected_tags_spec.rb
index cc7261dafc9..84b7df86f31 100644
--- a/spec/requests/api/protected_tags_spec.rb
+++ b/spec/requests/api/protected_tags_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe API::ProtectedTags do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :repository) }
- let(:project2) { create(:project, path: 'project2', namespace: user.namespace) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project2) { create(:project, path: 'project2', namespace: user.namespace) }
+
let(:protected_name) { 'feature' }
let(:tag_name) { protected_name }
let!(:protected_tag) do
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index a24b852cdac..9e0d3780fd8 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -197,7 +197,7 @@ RSpec.describe API::PypiPackages do
let(:url) { "/projects/#{project.id}/packages/pypi" }
let(:headers) { {} }
let(:requires_python) { '>=3.7' }
- let(:base_params) { { requires_python: requires_python, version: '1.0.0', name: 'sample-project', sha256_digest: '1' * 64 } }
+ let(:base_params) { { requires_python: requires_python, version: '1.0.0', name: 'sample-project', sha256_digest: '1' * 64, md5_digest: '1' * 32 } }
let(:params) { base_params.merge(content: temp_file(file_name)) }
let(:send_rewritten_field) { true }
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: user } }
@@ -254,6 +254,19 @@ RSpec.describe API::PypiPackages do
it_behaves_like 'PyPI package creation', :developer, :created, true
end
+
+ context 'without md5_digest' do
+ let(:token) { personal_access_token.token }
+ let(:user_headers) { basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+ let(:params) { base_params.merge(content: temp_file(file_name)) }
+
+ before do
+ params.delete(:md5_digest)
+ end
+
+ it_behaves_like 'PyPI package creation', :developer, :created, true, false
+ end
end
context 'with required_python too big' do
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index d6d2bd5baf2..cf0165d123f 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -784,6 +784,40 @@ RSpec.describe API::Repositories do
expect(json_response['notes']).to be_present
end
+ it 'supports specified config file path' do
+ spy = instance_spy(Repositories::ChangelogService)
+
+ expect(Repositories::ChangelogService)
+ .to receive(:new)
+ .with(
+ project,
+ user,
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: DateTime.new(2020, 1, 1),
+ trailer: 'Foo',
+ config_file: 'specified_changelog_config.yml'
+ )
+ .and_return(spy)
+
+ expect(spy).to receive(:execute).with(commit_to_changelog: false)
+
+ get(
+ api("/projects/#{project.id}/repository/changelog", user),
+ params: {
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: '2020-01-01',
+ trailer: 'Foo',
+ config_file: 'specified_changelog_config.yml'
+ }
+ )
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
context 'when previous tag version does not exist' do
it_behaves_like '422 response' do
let(:request) { get api("/projects/#{project.id}/repository/changelog", user), params: { version: 'v0.0.0' } }
@@ -905,5 +939,45 @@ RSpec.describe API::Repositories do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to eq('Failed to generate the changelog: oops')
end
+
+ it "support specified config file path" do
+ spy = instance_spy(Repositories::ChangelogService)
+
+ expect(Repositories::ChangelogService)
+ .to receive(:new)
+ .with(
+ project,
+ user,
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: DateTime.new(2020, 1, 1),
+ branch: 'kittens',
+ trailer: 'Foo',
+ config_file: 'specified_changelog_config.yml',
+ file: 'FOO.md',
+ message: 'Commit message'
+ )
+ .and_return(spy)
+
+ allow(spy).to receive(:execute).with(commit_to_changelog: true)
+
+ post(
+ api("/projects/#{project.id}/repository/changelog", user),
+ params: {
+ version: '1.0.0',
+ from: 'foo',
+ to: 'bar',
+ date: '2020-01-01',
+ branch: 'kittens',
+ trailer: 'Foo',
+ config_file: 'specified_changelog_config.yml',
+ file: 'FOO.md',
+ message: 'Commit message'
+ }
+ )
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index cfda06da8f3..d4a8e591622 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -373,7 +373,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
end
end
- context "snowplow tracking settings" do
+ context "snowplow tracking settings", :do_not_stub_snowplow_by_default do
let(:settings) do
{
snowplow_collector_hostname: "snowplow.example.com",
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 13160519996..0ba1011684a 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -9,9 +9,9 @@ RSpec.describe API::Snippets, factory_default: :keep do
let_it_be(:user) { create(:user) }
let_it_be(:other_user) { create(:user) }
- let_it_be(:public_snippet) { create(:personal_snippet, :repository, :public, author: user) }
- let_it_be(:private_snippet) { create(:personal_snippet, :repository, :private, author: user) }
- let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: user) }
+ let_it_be(:public_snippet) { create(:personal_snippet, :repository, :public, author: user) }
+ let_it_be_with_refind(:private_snippet) { create(:personal_snippet, :repository, :private, author: user) }
+ let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: user) }
let_it_be(:user_token) { create(:personal_access_token, user: user) }
let_it_be(:other_user_token) { create(:personal_access_token, user: other_user) }
@@ -63,6 +63,23 @@ RSpec.describe API::Snippets, factory_default: :keep do
expect(snippet["id"]).not_to eq(public_snippet.id)
end
end
+
+ context 'filtering snippets by created_after/created_before' do
+ let_it_be(:private_snippet_before_time_range) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-20T00:00:00Z")) }
+ let_it_be(:private_snippet_in_time_range1) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-22T00:00:00Z")) }
+ let_it_be(:private_snippet_in_time_range2) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-24T00:00:00Z")) }
+ let_it_be(:private_snippet_after_time_range) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-26T00:00:00Z")) }
+
+ let(:path) { "/snippets?created_after=2021-08-21T00:00:00Z&created_before=2021-08-25T00:00:00Z" }
+
+ it 'returns snippets available for user in given time range' do
+ get api(path, personal_access_token: user_token)
+
+ expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
+ private_snippet_in_time_range1.id,
+ private_snippet_in_time_range2.id)
+ end
+ end
end
describe 'GET /snippets/public' do
@@ -98,6 +115,21 @@ RSpec.describe API::Snippets, factory_default: :keep do
expect(response).to have_gitlab_http_status(:unauthorized)
end
+
+ context 'filtering public snippets by created_after/created_before' do
+ let_it_be(:public_snippet_before_time_range) { create(:personal_snippet, :repository, :public, author: other_user, created_at: Time.parse("2021-08-20T00:00:00Z")) }
+ let_it_be(:public_snippet_in_time_range) { create(:personal_snippet, :repository, :public, author: other_user, created_at: Time.parse("2021-08-22T00:00:00Z")) }
+ let_it_be(:public_snippet_after_time_range) { create(:personal_snippet, :repository, :public, author: other_user, created_at: Time.parse("2021-08-24T00:00:00Z")) }
+
+ let(:path) { "/snippets/public?created_after=2021-08-21T00:00:00Z&created_before=2021-08-23T00:00:00Z" }
+
+ it 'returns public snippets available to user in given time range' do
+ get api(path, personal_access_token: user_token)
+
+ expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
+ public_snippet_in_time_range.id)
+ end
+ end
end
describe 'GET /snippets/:id/raw' do
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index 2460a98129f..0f1dbea2e73 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -3,221 +3,58 @@
require 'spec_helper'
RSpec.describe API::SystemHooks do
- include StubRequests
+ let_it_be(:non_admin) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be_with_refind(:hook) { create(:system_hook, url: "http://example.com") }
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
- let!(:hook) { create(:system_hook, url: "http://example.com") }
+ it_behaves_like 'web-hook API endpoints', '' do
+ let(:user) { admin }
+ let(:unauthorized_user) { non_admin }
- before do
- stub_full_request(hook.url, method: :post)
- end
-
- describe "GET /hooks" do
- context "when no user" do
- it "returns authentication error" do
- get api("/hooks")
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ def scope
+ SystemHook
end
- context "when not an admin" do
- it "returns forbidden error" do
- get api("/hooks", user)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ def collection_uri
+ "/hooks"
end
- context "when authenticated as admin" do
- it "returns an array of hooks" do
- get api("/hooks", admin)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(response).to match_response_schema('public_api/v4/system_hooks')
- expect(json_response.first).not_to have_key("token")
- expect(json_response.first['url']).to eq(hook.url)
- expect(json_response.first['push_events']).to be false
- expect(json_response.first['tag_push_events']).to be false
- expect(json_response.first['merge_requests_events']).to be false
- expect(json_response.first['repository_update_events']).to be true
- expect(json_response.first['enable_ssl_verification']).to be true
- expect(json_response.first['disabled_until']).to be nil
- expect(json_response.first['alert_status']).to eq 'executable'
- end
+ def match_collection_schema
+ match_response_schema('public_api/v4/system_hooks')
end
- end
- describe "GET /hooks/:id" do
- context "when no user" do
- it "returns authentication error" do
- get api("/hooks/#{hook.id}")
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ def hook_uri(hook_id = hook.id)
+ "/hooks/#{hook_id}"
end
- context "when not an admin" do
- it "returns forbidden error" do
- get api("/hooks/#{hook.id}", user)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context "when authenticated as admin" do
- it "gets a hook", :aggregate_failures do
- get api("/hooks/#{hook.id}", admin)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/system_hook')
- expect(json_response).to match(
- 'id' => be(hook.id),
- 'url' => eq(hook.url),
- 'created_at' => eq(hook.created_at.iso8601(3)),
- 'push_events' => be(hook.push_events),
- 'tag_push_events' => be(hook.tag_push_events),
- 'merge_requests_events' => be(hook.merge_requests_events),
- 'repository_update_events' => be(hook.repository_update_events),
- 'enable_ssl_verification' => be(hook.enable_ssl_verification),
- 'alert_status' => eq(hook.alert_status.to_s),
- 'disabled_until' => eq(hook.disabled_until&.iso8601(3))
- )
- end
-
- context 'the hook is disabled' do
- before do
- hook.disable!
- end
-
- it "has the correct alert status", :aggregate_failures do
- get api("/hooks/#{hook.id}", admin)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/system_hook')
- expect(json_response).to include('alert_status' => 'disabled')
- end
- end
-
- context 'the hook is backed-off' do
- before do
- hook.backoff!
- end
-
- it "has the correct alert status", :aggregate_failures do
- get api("/hooks/#{hook.id}", admin)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/system_hook')
- expect(json_response).to include(
- 'alert_status' => 'temporarily_disabled',
- 'disabled_until' => hook.disabled_until.iso8601(3)
- )
- end
- end
-
- it 'returns 404 if the system hook does not exist' do
- get api("/hooks/#{non_existing_record_id}", admin)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ def match_hook_schema
+ match_response_schema('public_api/v4/system_hook')
end
- end
- describe "POST /hooks" do
- it "creates new hook" do
- expect do
- post api("/hooks", admin), params: { url: 'http://example.com' }
- end.to change { SystemHook.count }.by(1)
+ def event_names
+ %i[
+ push_events
+ tag_push_events
+ merge_requests_events
+ repository_update_events
+ ]
end
- it "responds with 400 if url not given" do
- post api("/hooks", admin)
-
- expect(response).to have_gitlab_http_status(:bad_request)
+ def hook_param_overrides
+ {}
end
- it "responds with 400 if url is invalid" do
- post api("/hooks", admin), params: { url: 'hp://mep.mep' }
-
- expect(response).to have_gitlab_http_status(:bad_request)
+ let(:update_params) do
+ {
+ push_events: false,
+ tag_push_events: true
+ }
end
- it "does not create new hook without url" do
- expect do
- post api("/hooks", admin)
- end.not_to change { SystemHook.count }
+ let(:default_values) do
+ { repository_update_events: true }
end
- it 'sets default values for events' do
- stub_full_request('http://mep.mep', method: :post)
-
- post api('/hooks', admin), params: { url: 'http://mep.mep' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response).to match_response_schema('public_api/v4/system_hook')
- expect(json_response['enable_ssl_verification']).to be true
- expect(json_response['push_events']).to be false
- expect(json_response['tag_push_events']).to be false
- expect(json_response['merge_requests_events']).to be false
- expect(json_response['repository_update_events']).to be true
- end
-
- it 'sets explicit values for events' do
- stub_full_request('http://mep.mep', method: :post)
-
- post api('/hooks', admin),
- params: {
- url: 'http://mep.mep',
- enable_ssl_verification: false,
- push_events: true,
- tag_push_events: true,
- merge_requests_events: true,
- repository_update_events: false
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response).to match_response_schema('public_api/v4/system_hook')
- expect(json_response['enable_ssl_verification']).to be false
- expect(json_response['push_events']).to be true
- expect(json_response['tag_push_events']).to be true
- expect(json_response['merge_requests_events']).to be true
- expect(json_response['repository_update_events']).to be false
- end
- end
-
- describe 'POST /hooks/:id' do
- it "returns and trigger hook by id" do
- post api("/hooks/#{hook.id}", admin)
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['event_name']).to eq('project_create')
- end
-
- it "returns 404 on failure" do
- post api("/hooks/404", admin)
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- describe "DELETE /hooks/:id" do
- it "deletes a hook" do
- expect do
- delete api("/hooks/#{hook.id}", admin)
-
- expect(response).to have_gitlab_http_status(:no_content)
- end.to change { SystemHook.count }.by(-1)
- end
-
- it 'returns 404 if the system hook does not exist' do
- delete api("/hooks/#{non_existing_record_id}", admin)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it_behaves_like '412 response' do
- let(:request) { api("/hooks/#{hook.id}", admin) }
- end
+ it_behaves_like 'web-hook API endpoints test hook', ''
end
end
diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb
index 3558babf2f1..e81e9e0bf2f 100644
--- a/spec/requests/api/tags_spec.rb
+++ b/spec/requests/api/tags_spec.rb
@@ -90,6 +90,13 @@ RSpec.describe API::Tags do
let(:request) { get api(route, current_user) }
end
end
+
+ context 'when repository does not exist' do
+ it_behaves_like '404 response' do
+ let(:project) { create(:project, creator: user) }
+ let(:request) { get api(route, current_user) }
+ end
+ end
end
context 'when unauthenticated', 'and project is public' do
diff --git a/spec/requests/api/terraform/modules/v1/packages_spec.rb b/spec/requests/api/terraform/modules/v1/packages_spec.rb
index 12bce4da011..dff44a45de4 100644
--- a/spec/requests/api/terraform/modules/v1/packages_spec.rb
+++ b/spec/requests/api/terraform/modules/v1/packages_spec.rb
@@ -98,6 +98,216 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
end
end
+ describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/download' do
+ context 'empty registry' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/module-2/system/download") }
+ let(:headers) { {} }
+
+ subject { get(url, headers: headers) }
+
+ it 'returns not found when there is no module' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with valid namespace' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/download") }
+ let(:headers) { {} }
+
+ subject { get(url, headers: headers) }
+
+ before_all do
+ create(:terraform_module_package, project: project, name: package.name, version: '1.0.1')
+ end
+
+ where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | 'redirects to version download' | :found
+ :public | :guest | true | :personal_access_token | 'redirects to version download' | :found
+ :public | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | 'redirects to version download' | :found
+ :public | :guest | false | :personal_access_token | 'redirects to version download' | :found
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | 'redirects to version download' | :found
+ :private | :developer | true | :personal_access_token | 'redirects to version download' | :found
+ :private | :guest | true | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | 'redirects to version download' | :found
+ :public | :guest | true | :job_token | 'redirects to version download' | :found
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | 'redirects to version download' | :found
+ :public | :guest | false | :job_token | 'redirects to version download' | :found
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | 'redirects to version download' | :found
+ :private | :guest | true | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
+
+ before do
+ group.update!(visibility: visibility.to_s)
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
+ describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system' do
+ context 'empty registry' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/non-existent/system") }
+ let(:headers) { { 'Authorization' => "Bearer #{tokens[:personal_access_token]}" } }
+
+ subject { get(url, headers: headers) }
+
+ it 'returns not found when there is no module' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with valid namespace' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}") }
+
+ subject { get(url, headers: headers) }
+
+ where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | 'returns terraform module version' | :success
+ :public | :guest | true | :personal_access_token | 'returns terraform module version' | :success
+ :public | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | 'returns terraform module version' | :success
+ :public | :guest | false | :personal_access_token | 'returns terraform module version' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | 'returns terraform module version' | :success
+ :private | :developer | true | :personal_access_token | 'returns terraform module version' | :success
+ :private | :guest | true | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | 'returns terraform module version' | :success
+ :public | :guest | true | :job_token | 'returns terraform module version' | :success
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | 'returns terraform module version' | :success
+ :public | :guest | false | :job_token | 'returns terraform module version' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | 'returns terraform module version' | :success
+ :private | :guest | true | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
+
+ before do
+ group.update!(visibility: visibility.to_s)
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
+ describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/:module_version' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}") }
+ let(:headers) { {} }
+
+ subject { get(url, headers: headers) }
+
+ context 'not found' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/2.0.0") }
+ let(:headers) { { 'Authorization' => "Bearer #{tokens[:job_token]}" } }
+
+ subject { get(url, headers: headers) }
+
+ it 'returns not found when the specified version is not present in the registry' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with valid namespace' do
+ where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | 'returns terraform module version' | :success
+ :public | :guest | true | :personal_access_token | 'returns terraform module version' | :success
+ :public | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | 'returns terraform module version' | :success
+ :public | :guest | false | :personal_access_token | 'returns terraform module version' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | 'returns terraform module version' | :success
+ :private | :developer | true | :personal_access_token | 'returns terraform module version' | :success
+ :private | :guest | true | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | 'returns terraform module version' | :success
+ :public | :guest | true | :job_token | 'returns terraform module version' | :success
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | 'returns terraform module version' | :success
+ :public | :guest | false | :job_token | 'returns terraform module version' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | 'returns terraform module version' | :success
+ :private | :guest | true | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
+
+ before do
+ group.update!(visibility: visibility.to_s)
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/:module_version/download' do
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}/download") }
let(:headers) { {} }
diff --git a/spec/requests/api/unleash_spec.rb b/spec/requests/api/unleash_spec.rb
index 6cb801538c6..7bdb89fb286 100644
--- a/spec/requests/api/unleash_spec.rb
+++ b/spec/requests/api/unleash_spec.rb
@@ -168,7 +168,7 @@ RSpec.describe API::Unleash do
end
%w(/feature_flags/unleash/:project_id/features /feature_flags/unleash/:project_id/client/features).each do |features_endpoint|
- describe "GET #{features_endpoint}" do
+ describe "GET #{features_endpoint}", :use_clean_rails_redis_caching do
let(:features_url) { features_endpoint.sub(':project_id', project_id.to_s) }
let(:client) { create(:operations_feature_flags_client, project: project) }
@@ -176,6 +176,46 @@ RSpec.describe API::Unleash do
it_behaves_like 'authenticated request'
+ context 'when a client fetches feature flags several times' do
+ let(:headers) { { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' } }
+
+ before do
+ create_list(:operations_feature_flag, 3, project: project)
+ end
+
+ it 'serializes feature flags for the first time and read cached data from the second time' do
+ expect(API::Entities::Unleash::ClientFeatureFlags)
+ .to receive(:represent).with(instance_of(Operations::FeatureFlagsClient), any_args)
+ .once
+
+ 5.times { get api(features_url), params: params, headers: headers }
+ end
+
+ it 'increments the cache key when feature flags are modified' do
+ expect(API::Entities::Unleash::ClientFeatureFlags)
+ .to receive(:represent).with(instance_of(Operations::FeatureFlagsClient), any_args)
+ .twice
+
+ 2.times { get api(features_url), params: params, headers: headers }
+
+ ::FeatureFlags::CreateService.new(project, project.owner, name: 'feature_flag').execute
+
+ 3.times { get api(features_url), params: params, headers: headers }
+ end
+
+ context 'when cache_unleash_client_api is disabled' do
+ before do
+ stub_feature_flags(cache_unleash_client_api: false)
+ end
+
+ it 'serializes feature flags every time' do
+ expect(::API::Entities::UnleashFeature).to receive(:represent).exactly(5).times
+
+ 5.times { get api(features_url), params: params, headers: headers }
+ end
+ end
+ end
+
context 'with version 2 feature flags' do
it 'does not return a flag without any strategies' do
create(:operations_feature_flag, project: project,
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index d4dc7375e9e..68d5fad8ff4 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -17,6 +17,8 @@ RSpec.describe API::Users do
let(:deactivated_user) { create(:user, state: 'deactivated') }
let(:banned_user) { create(:user, :banned) }
let(:internal_user) { create(:user, :bot) }
+ let(:user_with_2fa) { create(:user, :two_factor_via_otp) }
+ let(:admin_with_2fa) { create(:admin, :two_factor_via_otp) }
context 'admin notes' do
let_it_be(:admin) { create(:admin, note: '2019-10-06 | 2FA added | user requested | www.gitlab.com') }
@@ -81,6 +83,79 @@ RSpec.describe API::Users do
end
end
+ describe "PATCH /users/:id/disable_two_factor" do
+ context "when current user is an admin" do
+ it "returns a 204 when 2FA is disabled for the target user" do
+ expect do
+ patch api("/users/#{user_with_2fa.id}/disable_two_factor", admin)
+ end.to change { user_with_2fa.reload.two_factor_enabled? }
+ .from(true)
+ .to(false)
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it "uses TwoFactor Destroy Service" do
+ destroy_service = instance_double(TwoFactor::DestroyService, execute: nil)
+ expect(TwoFactor::DestroyService).to receive(:new)
+ .with(admin, user: user_with_2fa)
+ .and_return(destroy_service)
+ expect(destroy_service).to receive(:execute)
+
+ patch api("/users/#{user_with_2fa.id}/disable_two_factor", admin)
+ end
+
+ it "returns a 400 if 2FA is not enabled for the target user" do
+ expect(TwoFactor::DestroyService).to receive(:new).and_call_original
+
+ expect do
+ patch api("/users/#{user.id}/disable_two_factor", admin)
+ end.not_to change { user.reload.two_factor_enabled? }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq("400 Bad request - Two-factor authentication is not enabled for this user")
+ end
+
+ it "returns a 403 if the target user is an admin" do
+ expect(TwoFactor::DestroyService).to receive(:new).never
+
+ expect do
+ patch api("/users/#{admin_with_2fa.id}/disable_two_factor", admin)
+ end.not_to change { admin_with_2fa.reload.two_factor_enabled? }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq("403 Forbidden - Two-factor authentication for admins cannot be disabled via the API. Use the Rails console")
+ end
+
+ it "returns a 404 if the target user cannot be found" do
+ expect(TwoFactor::DestroyService).to receive(:new).never
+
+ patch api("/users/#{non_existing_record_id}/disable_two_factor", admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq("404 User Not Found")
+ end
+ end
+
+ context "when current user is not an admin" do
+ it "returns a 403" do
+ expect do
+ patch api("/users/#{user_with_2fa.id}/disable_two_factor", user)
+ end.not_to change { user_with_2fa.reload.two_factor_enabled? }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq("403 Forbidden")
+ end
+ end
+
+ context "when unauthenticated" do
+ it "returns a 401" do
+ patch api("/users/#{user_with_2fa.id}/disable_two_factor")
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
describe 'GET /users/' do
context 'when unauthenticated' do
it "does not contain certain fields" do
@@ -110,6 +185,40 @@ RSpec.describe API::Users do
expect(json_response.first['note']).to eq '2018-11-05 | 2FA removed | user requested | www.gitlab.com'
end
end
+
+ context 'N+1 queries' do
+ before do
+ create_list(:user, 2)
+ end
+
+ it 'avoids N+1 queries when requested by admin' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get api("/users", admin)
+ end.count
+
+ create_list(:user, 3)
+
+ # There is a still a pending N+1 query related to fetching
+ # project count for each user.
+ # Refer issue https://gitlab.com/gitlab-org/gitlab/-/issues/367080
+
+ expect do
+ get api("/users", admin)
+ end.not_to exceed_all_query_limit(control_count + 3)
+ end
+
+ it 'avoids N+1 queries when requested by a regular user' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get api("/users", user)
+ end.count
+
+ create_list(:user, 3)
+
+ expect do
+ get api("/users", user)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
end
end
diff --git a/spec/requests/groups/harbor/artifacts_controller_spec.rb b/spec/requests/groups/harbor/artifacts_controller_spec.rb
new file mode 100644
index 00000000000..ea9529119a6
--- /dev/null
+++ b/spec/requests/groups/harbor/artifacts_controller_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::Harbor::ArtifactsController do
+ it_behaves_like 'a harbor artifacts controller', anonymous_status_code: '404' do
+ let_it_be(:container) { create(:group) }
+ let_it_be(:harbor_integration) { create(:harbor_integration, group: container, project: nil) }
+ end
+end
diff --git a/spec/requests/groups/harbor/repositories_controller_spec.rb b/spec/requests/groups/harbor/repositories_controller_spec.rb
index 3e475dc410e..b4022561f54 100644
--- a/spec/requests/groups/harbor/repositories_controller_spec.rb
+++ b/spec/requests/groups/harbor/repositories_controller_spec.rb
@@ -3,67 +3,8 @@
require 'spec_helper'
RSpec.describe Groups::Harbor::RepositoriesController do
- let_it_be(:group, reload: true) { create(:group) }
- let_it_be(:user) { create(:user) }
-
- shared_examples 'responds with 404 status' do
- it 'returns 404' do
- subject
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- shared_examples 'responds with 200 status' do
- it 'renders the index template' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:index)
- end
- end
-
- before do
- stub_feature_flags(harbor_registry_integration: true)
- group.add_reporter(user)
- login_as(user)
- end
-
- describe 'GET #index' do
- subject do
- get group_harbor_registries_path(group)
- response
- end
-
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
- end
-
- describe 'GET #show' do
- subject do
- get group_harbor_registry_path(group, 1)
- response
- end
-
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
+ it_behaves_like 'a harbor repositories controller', anonymous_status_code: '404' do
+ let_it_be(:container, reload: true) { create(:group) }
+ let_it_be(:harbor_integration) { create(:harbor_integration, group: container, project: nil) }
end
end
diff --git a/spec/requests/groups/harbor/tags_controller_spec.rb b/spec/requests/groups/harbor/tags_controller_spec.rb
new file mode 100644
index 00000000000..257d4366837
--- /dev/null
+++ b/spec/requests/groups/harbor/tags_controller_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::Harbor::TagsController do
+ it_behaves_like 'a harbor tags controller', anonymous_status_code: '404' do
+ let_it_be(:container) { create(:group) }
+ let_it_be(:harbor_integration) { create(:harbor_integration, group: container, project: nil) }
+ end
+end
diff --git a/spec/requests/jira_authorizations_spec.rb b/spec/requests/jira_authorizations_spec.rb
index b43d36e94f4..f67288b286b 100644
--- a/spec/requests/jira_authorizations_spec.rb
+++ b/spec/requests/jira_authorizations_spec.rb
@@ -27,14 +27,16 @@ RSpec.describe 'Jira authorization requests' do
redirect_uri: redirect_uri
})
oauth_response = json_response
+ oauth_response_access_token, scope, token_type = oauth_response.values_at('access_token', 'scope', 'token_type')
post '/login/oauth/access_token', params: post_data.merge({
code: generate_access_grant.token
})
jira_response = response.body
+ jira_response_access_token = Rack::Utils.parse_nested_query(jira_response)['access_token']
- access_token, scope, token_type = oauth_response.values_at('access_token', 'scope', 'token_type')
- expect(jira_response).to eq("access_token=#{access_token}&scope=#{scope}&token_type=#{token_type}")
+ expect(jira_response).to include("scope=#{scope}&token_type=#{token_type}")
+ expect(oauth_response_access_token).not_to eql(jira_response_access_token)
end
context 'when authorization fails' do
diff --git a/spec/requests/jira_connect/oauth_application_ids_controller_spec.rb b/spec/requests/jira_connect/oauth_application_ids_controller_spec.rb
index ffeaf1075f3..b0c2eaec4e2 100644
--- a/spec/requests/jira_connect/oauth_application_ids_controller_spec.rb
+++ b/spec/requests/jira_connect/oauth_application_ids_controller_spec.rb
@@ -3,7 +3,40 @@
require 'spec_helper'
RSpec.describe JiraConnect::OauthApplicationIdsController do
+ describe 'OPTIONS /-/jira_connect/oauth_application_id' do
+ before do
+ stub_application_setting(jira_connect_application_key: '123456')
+
+ options '/-/jira_connect/oauth_application_id', headers: { 'Origin' => 'http://notgitlab.com' }
+ end
+
+ it 'returns 200' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'allows cross-origin requests', :aggregate_failures do
+ expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq 'GET, OPTIONS'
+ expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
+ end
+
+ context 'on GitLab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'renders not found' do
+ options '/-/jira_connect/oauth_application_id'
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.headers['Access-Control-Allow-Origin']).not_to eq '*'
+ end
+ end
+ end
+
describe 'GET /-/jira_connect/oauth_application_id' do
+ let(:cors_request_headers) { { 'Origin' => 'http://notgitlab.com' } }
+
before do
stub_application_setting(jira_connect_application_key: '123456')
end
@@ -15,6 +48,14 @@ RSpec.describe JiraConnect::OauthApplicationIdsController do
expect(json_response).to eq({ "application_id" => "123456" })
end
+ it 'allows cross-origin requests', :aggregate_failures do
+ get '/-/jira_connect/oauth_application_id', headers: cors_request_headers
+
+ expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq 'GET, OPTIONS'
+ expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
+ end
+
context 'application ID is empty' do
before do
stub_application_setting(jira_connect_application_key: '')
@@ -38,5 +79,17 @@ RSpec.describe JiraConnect::OauthApplicationIdsController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'on GitLab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'renders not found' do
+ get '/-/jira_connect/oauth_application_id'
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
end
diff --git a/spec/requests/jira_connect/subscriptions_controller_spec.rb b/spec/requests/jira_connect/subscriptions_controller_spec.rb
new file mode 100644
index 00000000000..b10d07b3771
--- /dev/null
+++ b/spec/requests/jira_connect/subscriptions_controller_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::SubscriptionsController do
+ describe 'GET /-/jira_connect/subscriptions' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'http://self-managed-gitlab.com') }
+
+ let(:qsh) do
+ Atlassian::Jwt.create_query_string_hash('https://gitlab.test/subscriptions', 'GET', 'https://gitlab.test')
+ end
+
+ let(:jwt) { Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret) }
+
+ before do
+ get '/-/jira_connect/subscriptions', params: { jwt: jwt }
+ end
+
+ subject(:content_security_policy) { response.headers['Content-Security-Policy'] }
+
+ it { is_expected.to include('http://self-managed-gitlab.com/-/jira_connect/oauth_application_ids')}
+
+ context 'with no self-managed instance configured' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: '') }
+
+ it { is_expected.not_to include('http://self-managed-gitlab.com')}
+ end
+ end
+end
diff --git a/spec/requests/jwks_controller_spec.rb b/spec/requests/jwks_controller_spec.rb
index 6dbb5988f58..c9dcc238c29 100644
--- a/spec/requests/jwks_controller_spec.rb
+++ b/spec/requests/jwks_controller_spec.rb
@@ -18,9 +18,9 @@ RSpec.describe JwksController do
end
describe 'GET /-/jwks' do
- let(:ci_jwt_signing_key) { OpenSSL::PKey::RSA.generate(1024) }
- let(:ci_jwk) { ci_jwt_signing_key.to_jwk }
- let(:oidc_jwk) { OpenSSL::PKey::RSA.new(Rails.application.secrets.openid_connect_signing_key).to_jwk }
+ let_it_be(:ci_jwt_signing_key) { OpenSSL::PKey::RSA.generate(3072) }
+ let_it_be(:ci_jwk) { ci_jwt_signing_key.to_jwk }
+ let_it_be(:oidc_jwk) { OpenSSL::PKey::RSA.new(Rails.application.secrets.openid_connect_signing_key).to_jwk }
before do
stub_application_setting(ci_jwt_signing_key: ci_jwt_signing_key.to_s)
diff --git a/spec/requests/oauth_tokens_spec.rb b/spec/requests/oauth_tokens_spec.rb
index 30659a5b896..180341fc85d 100644
--- a/spec/requests/oauth_tokens_spec.rb
+++ b/spec/requests/oauth_tokens_spec.rb
@@ -5,44 +5,92 @@ require 'spec_helper'
RSpec.describe 'OAuth Tokens requests' do
let(:user) { create :user }
let(:application) { create :oauth_application, scopes: 'api' }
+ let(:grant_type) { 'authorization_code' }
+ let(:refresh_token) { nil }
def request_access_token(user)
post '/oauth/token',
params: {
- grant_type: 'authorization_code',
+ grant_type: grant_type,
code: generate_access_grant(user).token,
redirect_uri: application.redirect_uri,
client_id: application.uid,
- client_secret: application.secret
+ client_secret: application.secret,
+ refresh_token: refresh_token
+
}
end
def generate_access_grant(user)
- create :oauth_access_grant, application: application, resource_owner_id: user.id
+ create(:oauth_access_grant, application: application, resource_owner_id: user.id)
end
context 'when there is already a token for the application' do
- let!(:existing_token) { create :oauth_access_token, application: application, resource_owner_id: user.id }
+ let!(:existing_token) { create(:oauth_access_token, application: application, resource_owner_id: user.id) }
- context 'and the request is done by the resource owner' do
- it 'reuses and returns the stored token' do
+ shared_examples 'issues a new token' do
+ it 'issues a new token' do
expect do
request_access_token(user)
- end.not_to change { Doorkeeper::AccessToken.count }
+ end.to change { Doorkeeper::AccessToken.count }.from(1).to(2)
+
+ expect(json_response['access_token']).not_to eq existing_token.token
+ expect(json_response['refresh_token']).not_to eq existing_token.refresh_token
+ end
+ end
- expect(json_response['access_token']).to eq existing_token.token
+ shared_examples 'revokes previous token' do
+ it 'revokes previous token' do
+ expect { request_access_token(user) }.to(
+ change { existing_token.reload.revoked_at }.from(nil))
end
end
- context 'and the request is done by a different user' do
- let(:other_user) { create :user }
+ context 'and the request is done by the resource owner' do
+ context 'with authorization code grant type' do
+ include_examples 'issues a new token'
- it 'generates and returns a different token for a different owner' do
- expect do
- request_access_token(other_user)
- end.to change { Doorkeeper::AccessToken.count }.by(1)
+ it 'does not revoke previous token' do
+ request_access_token(user)
+
+ expect(existing_token.reload.revoked_at).to be_nil
+ end
+ end
+
+ context 'with refresh token grant type' do
+ let(:grant_type) { 'refresh_token' }
+ let(:refresh_token) { existing_token.refresh_token }
+
+ include_examples 'issues a new token'
+ include_examples 'revokes previous token'
+
+ context 'expired refresh token' do
+ let!(:existing_token) do
+ create(:oauth_access_token, application: application,
+ resource_owner_id: user.id,
+ created_at: 10.minutes.ago,
+ expires_in: 5)
+ end
+
+ include_examples 'issues a new token'
+ include_examples 'revokes previous token'
+ end
+
+ context 'revoked refresh token' do
+ let!(:existing_token) do
+ create(:oauth_access_token, application: application,
+ resource_owner_id: user.id,
+ created_at: 2.hours.ago,
+ revoked_at: 1.hour.ago,
+ expires_in: 5)
+ end
+
+ it 'does not issue a new token' do
+ request_access_token(user)
- expect(json_response['access_token']).not_to be_nil
+ expect(json_response['error']).to eq('invalid_grant')
+ end
+ end
end
end
end
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index c647fee1564..afaa6168bfd 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -120,9 +120,9 @@ RSpec.describe 'OpenID Connect requests' do
let!(:group4) { create :group, parent: group3 }
before do
- group1.add_user(user, GroupMember::OWNER)
- group3.add_user(user, Gitlab::Access::DEVELOPER)
- group4.add_user(user, Gitlab::Access::MAINTAINER)
+ group1.add_member(user, GroupMember::OWNER)
+ group3.add_member(user, Gitlab::Access::DEVELOPER)
+ group4.add_member(user, Gitlab::Access::MAINTAINER)
request_user_info!
end
@@ -163,8 +163,8 @@ RSpec.describe 'OpenID Connect requests' do
let!(:group4) { create :group, parent: group3 }
before do
- group1.add_user(user, Gitlab::Access::OWNER)
- group3.add_user(user, Gitlab::Access::DEVELOPER)
+ group1.add_member(user, Gitlab::Access::OWNER)
+ group3.add_member(user, Gitlab::Access::DEVELOPER)
request_access_token!
@payload = JSON::JWT.decode(json_response['id_token'], :skip_verification)
@@ -358,8 +358,8 @@ RSpec.describe 'OpenID Connect requests' do
let!(:group4) { create :group, parent: group3 }
before do
- group1.add_user(user, Gitlab::Access::OWNER)
- group3.add_user(user, Gitlab::Access::DEVELOPER)
+ group1.add_member(user, Gitlab::Access::OWNER)
+ group3.add_member(user, Gitlab::Access::DEVELOPER)
request_access_token!
@payload = JSON::JWT.decode(json_response['id_token'], :skip_verification)
diff --git a/spec/requests/projects/environments_controller_spec.rb b/spec/requests/projects/environments_controller_spec.rb
index 5cdf507abef..0890b0c45da 100644
--- a/spec/requests/projects/environments_controller_spec.rb
+++ b/spec/requests/projects/environments_controller_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Projects::EnvironmentsController do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be_with_refind(:project) { create(:project, :repository) }
let(:environment) { create(:environment, name: 'production', project: project) }
diff --git a/spec/requests/projects/google_cloud/configuration_controller_spec.rb b/spec/requests/projects/google_cloud/configuration_controller_spec.rb
new file mode 100644
index 00000000000..08d4ad2f9ba
--- /dev/null
+++ b/spec/requests/projects/google_cloud/configuration_controller_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Mock Types
+MockGoogleOAuth2Credentials = Struct.new(:app_id, :app_secret)
+
+RSpec.describe Projects::GoogleCloud::ConfigurationController do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:url) { project_google_cloud_configuration_path(project) }
+
+ let_it_be(:user_guest) { create(:user) }
+ let_it_be(:user_developer) { create(:user) }
+ let_it_be(:user_maintainer) { create(:user) }
+
+ let_it_be(:unauthorized_members) { [user_guest, user_developer] }
+ let_it_be(:authorized_members) { [user_maintainer] }
+
+ before do
+ project.add_guest(user_guest)
+ project.add_developer(user_developer)
+ project.add_maintainer(user_maintainer)
+ end
+
+ context 'when accessed by unauthorized members' do
+ it 'returns not found on GET request' do
+ unauthorized_members.each do |unauthorized_member|
+ sign_in(unauthorized_member)
+
+ get url
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'error_access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: unauthorized_member
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when accessed by authorized members' do
+ it 'returns successful' do
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to be_successful
+ expect(response).to render_template('projects/google_cloud/configuration/index')
+ end
+ end
+
+ context 'but gitlab instance is not configured for google oauth2' do
+ it 'returns forbidden' do
+ unconfigured_google_oauth2 = MockGoogleOAuth2Credentials.new('', '')
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for)
+ .with('google_oauth2')
+ .and_return(unconfigured_google_oauth2)
+
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'google_oauth2_enabled!',
+ label: 'error_access_denied',
+ extra: { reason: 'google_oauth2_not_configured',
+ config: unconfigured_google_oauth2 },
+ project: project,
+ user: authorized_member
+ )
+ end
+ end
+ end
+
+ context 'but feature flag is disabled' do
+ before do
+ stub_feature_flags(incubation_5mp_google_cloud: false)
+ end
+
+ it 'returns not found' do
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'feature_flag_enabled!',
+ label: 'error_access_denied',
+ property: 'feature_flag_not_enabled',
+ project: project,
+ user: authorized_member
+ )
+ end
+ end
+ end
+
+ context 'but google oauth2 token is not valid' do
+ it 'does not return revoke oauth url' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(false)
+ end
+
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to be_successful
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'configuration#index',
+ label: 'success',
+ extra: {
+ configurationUrl: project_google_cloud_configuration_path(project),
+ deploymentsUrl: project_google_cloud_deployments_path(project),
+ databasesUrl: project_google_cloud_databases_path(project),
+ serviceAccounts: [],
+ createServiceAccountUrl: project_google_cloud_service_accounts_path(project),
+ emptyIllustrationUrl: ActionController::Base.helpers.image_path('illustrations/pipelines_empty.svg'),
+ configureGcpRegionsUrl: project_google_cloud_gcp_regions_path(project),
+ gcpRegions: [],
+ revokeOauthUrl: nil
+ },
+ project: project,
+ user: authorized_member
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/google_cloud/databases_controller_spec.rb b/spec/requests/projects/google_cloud/databases_controller_spec.rb
new file mode 100644
index 00000000000..c9335f8f317
--- /dev/null
+++ b/spec/requests/projects/google_cloud/databases_controller_spec.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Mock Types
+MockGoogleOAuth2Credentials = Struct.new(:app_id, :app_secret)
+
+RSpec.describe Projects::GoogleCloud::DatabasesController do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:url) { project_google_cloud_databases_path(project) }
+
+ let_it_be(:user_guest) { create(:user) }
+ let_it_be(:user_developer) { create(:user) }
+ let_it_be(:user_maintainer) { create(:user) }
+
+ let_it_be(:unauthorized_members) { [user_guest, user_developer] }
+ let_it_be(:authorized_members) { [user_maintainer] }
+
+ before do
+ project.add_guest(user_guest)
+ project.add_developer(user_developer)
+ project.add_maintainer(user_maintainer)
+ end
+
+ context 'when accessed by unauthorized members' do
+ it 'returns not found on GET request' do
+ unauthorized_members.each do |unauthorized_member|
+ sign_in(unauthorized_member)
+
+ get url
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'error_access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: unauthorized_member
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when accessed by authorized members' do
+ it 'returns successful' do
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to be_successful
+ expect(response).to render_template('projects/google_cloud/databases/index')
+ end
+ end
+
+ context 'but gitlab instance is not configured for google oauth2' do
+ it 'returns forbidden' do
+ unconfigured_google_oauth2 = MockGoogleOAuth2Credentials.new('', '')
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for)
+ .with('google_oauth2')
+ .and_return(unconfigured_google_oauth2)
+
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'google_oauth2_enabled!',
+ label: 'error_access_denied',
+ extra: { reason: 'google_oauth2_not_configured',
+ config: unconfigured_google_oauth2 },
+ project: project,
+ user: authorized_member
+ )
+ end
+ end
+ end
+
+ context 'but feature flag is disabled' do
+ before do
+ stub_feature_flags(incubation_5mp_google_cloud: false)
+ end
+
+ it 'returns not found' do
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'feature_flag_enabled!',
+ label: 'error_access_denied',
+ property: 'feature_flag_not_enabled',
+ project: project,
+ user: authorized_member
+ )
+ end
+ end
+ end
+
+ context 'but google oauth2 token is not valid' do
+ it 'does not return revoke oauth url' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(false)
+ end
+
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to be_successful
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'databases#index',
+ label: 'success',
+ extra: {
+ configurationUrl: project_google_cloud_configuration_path(project),
+ deploymentsUrl: project_google_cloud_deployments_path(project),
+ databasesUrl: project_google_cloud_databases_path(project)
+ },
+ project: project,
+ user: authorized_member
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/google_cloud/deployments_controller_spec.rb b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
index 7bd9609a7dc..9e854e01516 100644
--- a/spec/requests/projects/google_cloud/deployments_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
@@ -9,10 +9,9 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
let_it_be(:user_guest) { create(:user) }
let_it_be(:user_developer) { create(:user) }
let_it_be(:user_maintainer) { create(:user) }
- let_it_be(:user_creator) { project.creator }
let_it_be(:unauthorized_members) { [user_guest, user_developer] }
- let_it_be(:authorized_members) { [user_maintainer, user_creator] }
+ let_it_be(:authorized_members) { [user_maintainer] }
let_it_be(:urls_list) { %W[#{project_google_cloud_deployments_cloud_run_path(project)} #{project_google_cloud_deployments_cloud_storage_path(project)}] }
@@ -32,7 +31,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'admin_project_google_cloud!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'invalid_user',
project: project,
user: nil
@@ -51,7 +50,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'admin_project_google_cloud!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'invalid_user',
project: project,
user: nil
@@ -87,15 +86,15 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
end
end
- it 'redirects to google_cloud home on enable service error' do
+ it 'redirects to google cloud deployments on enable service error' do
get url
- expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(response).to redirect_to(project_google_cloud_deployments_path(project))
# since GPC_PROJECT_ID is not set, enable cloud run service should return an error
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'deployments#cloud_run',
- label: 'enable_cloud_run_error',
+ label: 'error_enable_cloud_run',
extra: { message: 'No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.',
status: :error },
project: project,
@@ -103,7 +102,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
)
end
- it 'redirects to gcp_error' do
+ it 'redirects to google cloud deployments with error' do
mock_gcp_error = Google::Apis::ClientError.new('some_error')
allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |service|
@@ -112,11 +111,11 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
get url
- expect(response).to render_template(:gcp_error)
+ expect(response).to redirect_to(project_google_cloud_deployments_path(project))
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'deployments#cloud_run',
- label: 'gcp_error',
+ label: 'error_gcp',
extra: mock_gcp_error,
project: project,
user: user_maintainer
@@ -124,7 +123,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
end
context 'GCP_PROJECT_IDs are defined' do
- it 'redirects to google_cloud home on generate pipeline error' do
+ it 'redirects to google_cloud deployments on generate pipeline error' do
allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |enable_cloud_run_service|
allow(enable_cloud_run_service).to receive(:execute).and_return({ status: :success })
end
@@ -135,11 +134,11 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
get url
- expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(response).to redirect_to(project_google_cloud_deployments_path(project))
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'deployments#cloud_run',
- label: 'generate_pipeline_error',
+ label: 'error_generate_pipeline',
extra: { status: :error },
project: project,
user: user_maintainer
@@ -162,7 +161,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'deployments#cloud_run',
- label: 'cloud_run_success',
+ label: 'success',
extra: { "title": "Enable deployments to Cloud Run",
"description": "This merge request includes a Cloud Run deployment job in the pipeline definition (.gitlab-ci.yml).\n\nThe `deploy-to-cloud-run` job:\n* Requires the following environment variables\n * `GCP_PROJECT_ID`\n * `GCP_SERVICE_ACCOUNT_KEY`\n* Job definition can be found at: https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/library\n\nThis pipeline definition has been committed to the branch ``.\nYou may modify the pipeline definition further or accept the changes as-is if suitable.\n",
"source_project_id": project.id,
diff --git a/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb b/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb
index 56474b6520d..f88273080d5 100644
--- a/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe Projects::GoogleCloud::GcpRegionsController do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:repository) { project.repository }
- let(:user_guest) { create(:user) }
- let(:user_maintainer) { create(:user) }
+ let_it_be(:user_guest) { create(:user) }
+ let_it_be(:user_maintainer) { create(:user) }
RSpec.shared_examples "should track not_found event" do
it "tracks event" do
@@ -15,7 +15,7 @@ RSpec.describe Projects::GoogleCloud::GcpRegionsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'admin_project_google_cloud!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'invalid_user',
project: project,
user: nil
@@ -29,7 +29,7 @@ RSpec.describe Projects::GoogleCloud::GcpRegionsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'admin_project_google_cloud!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'invalid_user',
project: project,
user: nil
@@ -43,7 +43,7 @@ RSpec.describe Projects::GoogleCloud::GcpRegionsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'feature_flag_enabled!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'feature_flag_not_enabled',
project: project,
user: user_maintainer
@@ -57,7 +57,7 @@ RSpec.describe Projects::GoogleCloud::GcpRegionsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'google_oauth2_enabled!',
- label: 'access_denied',
+ label: 'error_access_denied',
extra: { reason: 'google_oauth2_not_configured', config: config },
project: project,
user: user_maintainer
@@ -144,8 +144,8 @@ RSpec.describe Projects::GoogleCloud::GcpRegionsController do
sign_in(user_maintainer)
end
- it 'redirects to google cloud index' do
- is_expected.to redirect_to(project_google_cloud_index_path(project))
+ it 'redirects to google cloud configurations' do
+ is_expected.to redirect_to(project_google_cloud_configuration_path(project))
end
end
end
diff --git a/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb
index 07590d3710e..36441a184cb 100644
--- a/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb
@@ -47,13 +47,13 @@ RSpec.describe Projects::GoogleCloud::RevokeOauthController do
post url
expect(request.session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
- expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(response).to redirect_to(project_google_cloud_configuration_path(project))
expect(flash[:notice]).to eq('Google OAuth2 token revocation requested')
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'revoke_oauth#create',
- label: 'create',
- property: 'success',
+ label: 'success',
+ property: '{}',
project: project,
user: user
)
@@ -70,13 +70,13 @@ RSpec.describe Projects::GoogleCloud::RevokeOauthController do
post url
expect(request.session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
- expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(response).to redirect_to(project_google_cloud_configuration_path(project))
expect(flash[:alert]).to eq('Google OAuth2 token revocation request failed')
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'revoke_oauth#create',
- label: 'create',
- property: 'failed',
+ label: 'error',
+ property: '{}',
project: project,
user: user
)
diff --git a/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb
index 4b32965e2b0..ae2519855db 100644
--- a/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb
@@ -8,13 +8,15 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
describe 'GET index', :snowplow do
let_it_be(:url) { "#{project_google_cloud_service_accounts_path(project)}" }
- let(:user_guest) { create(:user) }
- let(:user_developer) { create(:user) }
- let(:user_maintainer) { create(:user) }
- let(:user_creator) { project.creator }
+ let_it_be(:user_guest) { create(:user) }
+ let_it_be(:user_developer) { create(:user) }
+ let_it_be(:user_maintainer) { create(:user) }
+ let_it_be(:user_creator) { project.creator }
- let(:unauthorized_members) { [user_guest, user_developer] }
- let(:authorized_members) { [user_maintainer, user_creator] }
+ let_it_be(:unauthorized_members) { [user_guest, user_developer] }
+ let_it_be(:authorized_members) { [user_maintainer, user_creator] }
+
+ let_it_be(:google_client_error) { Google::Apis::ClientError.new('client-error') }
before do
project.add_guest(user_guest)
@@ -30,7 +32,7 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'admin_project_google_cloud!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'invalid_user',
project: project,
user: nil
@@ -53,7 +55,7 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'admin_project_google_cloud!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'invalid_user',
project: project,
user: unauthorized_member
@@ -71,7 +73,7 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
expect_snowplow_event(
category: 'Projects::GoogleCloud',
action: 'admin_project_google_cloud!',
- label: 'access_denied',
+ label: 'error_access_denied',
property: 'invalid_user',
project: project,
user: unauthorized_member
@@ -116,7 +118,7 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
end
end
- it 'renders no_gcp_projects' do
+ it 'flashes error and redirects to google cloud configurations' do
authorized_members.each do |authorized_member|
allow_next_instance_of(BranchesFinder) do |branches_finder|
allow(branches_finder).to receive(:execute).and_return([])
@@ -130,7 +132,16 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
get url
- expect(response).to render_template('projects/google_cloud/errors/no_gcp_projects')
+ expect(response).to redirect_to(project_google_cloud_configuration_path(project))
+ expect(flash[:warning]).to eq('No Google Cloud projects - You need at least one Google Cloud project')
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'service_accounts#index',
+ label: 'error_form',
+ property: 'no_gcp_projects',
+ project: project,
+ user: authorized_member
+ )
end
end
end
@@ -171,7 +182,7 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
post url, params: { gcp_project: 'prj1', ref: 'env1' }
- expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(response).to redirect_to(project_google_cloud_configuration_path(project))
end
end
end
@@ -181,29 +192,47 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
before do
allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
allow(client).to receive(:validate_token).and_return(true)
- allow(client).to receive(:list_projects).and_raise(Google::Apis::ClientError.new(''))
- allow(client).to receive(:create_service_account).and_raise(Google::Apis::ClientError.new(''))
- allow(client).to receive(:create_service_account_key).and_raise(Google::Apis::ClientError.new(''))
+ allow(client).to receive(:list_projects).and_raise(google_client_error)
+ allow(client).to receive(:create_service_account).and_raise(google_client_error)
+ allow(client).to receive(:create_service_account_key).and_raise(google_client_error)
end
end
- it 'renders gcp_error template on GET' do
+ it 'GET flashes error and redirects to -/google_cloud/configurations' do
authorized_members.each do |authorized_member|
sign_in(authorized_member)
get url
- expect(response).to render_template(:gcp_error)
+ expect(response).to redirect_to(project_google_cloud_configuration_path(project))
+ expect(flash[:warning]).to eq('Google Cloud Error - client-error')
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'service_accounts#index',
+ label: 'error_gcp',
+ extra: google_client_error,
+ project: project,
+ user: authorized_member
+ )
end
end
- it 'renders gcp_error template on POST' do
+ it 'POST flashes error and redirects to -/google_cloud/configurations' do
authorized_members.each do |authorized_member|
sign_in(authorized_member)
post url, params: { gcp_project: 'prj1', environment: 'env1' }
- expect(response).to render_template(:gcp_error)
+ expect(response).to redirect_to(project_google_cloud_configuration_path(project))
+ expect(flash[:warning]).to eq('Google Cloud Error - client-error')
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'service_accounts#create',
+ label: 'error_gcp',
+ extra: google_client_error,
+ project: project,
+ user: authorized_member
+ )
end
end
end
diff --git a/spec/requests/projects/google_cloud_controller_spec.rb b/spec/requests/projects/google_cloud_controller_spec.rb
deleted file mode 100644
index d0814990989..00000000000
--- a/spec/requests/projects/google_cloud_controller_spec.rb
+++ /dev/null
@@ -1,178 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# Mock Types
-MockGoogleOAuth2Credentials = Struct.new(:app_id, :app_secret)
-
-RSpec.describe Projects::GoogleCloudController do
- let_it_be(:project) { create(:project, :public) }
-
- describe 'GET index', :snowplow do
- let_it_be(:url) { "#{project_google_cloud_index_path(project)}" }
-
- context 'when a public request is made' do
- it 'returns not found' do
- get url
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect_snowplow_event(
- category: 'Projects::GoogleCloud',
- action: 'admin_project_google_cloud!',
- label: 'access_denied',
- property: 'invalid_user',
- project: project,
- user: nil)
- end
- end
-
- context 'when a project.guest makes request' do
- let(:user) { create(:user) }
-
- it 'returns not found' do
- project.add_guest(user)
- sign_in(user)
-
- get url
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect_snowplow_event(
- category: 'Projects::GoogleCloud',
- action: 'admin_project_google_cloud!',
- label: 'access_denied',
- property: 'invalid_user',
- project: project,
- user: user
- )
- end
- end
-
- context 'when project.developer makes request' do
- let(:user) { create(:user) }
-
- it 'returns not found' do
- project.add_developer(user)
- sign_in(user)
-
- get url
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect_snowplow_event(
- category: 'Projects::GoogleCloud',
- action: 'admin_project_google_cloud!',
- label: 'access_denied',
- property: 'invalid_user',
- project: project,
- user: user
- )
- end
- end
-
- context 'when project.maintainer makes request' do
- let(:user) { create(:user) }
-
- it 'returns successful' do
- project.add_maintainer(user)
- sign_in(user)
-
- get url
-
- expect(response).to be_successful
- end
- end
-
- context 'when project.creator makes request' do
- let(:user) { project.creator }
-
- it 'returns successful' do
- sign_in(user)
-
- get url
-
- expect(response).to be_successful
- end
- end
-
- describe 'when authorized user makes request' do
- let(:user) { project.creator }
-
- context 'but gitlab instance is not configured for google oauth2' do
- it 'returns forbidden' do
- unconfigured_google_oauth2 = MockGoogleOAuth2Credentials.new('', '')
- allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for)
- .with('google_oauth2')
- .and_return(unconfigured_google_oauth2)
-
- sign_in(user)
-
- get url
-
- expect(response).to have_gitlab_http_status(:forbidden)
- expect_snowplow_event(
- category: 'Projects::GoogleCloud',
- action: 'google_oauth2_enabled!',
- label: 'access_denied',
- extra: { reason: 'google_oauth2_not_configured',
- config: unconfigured_google_oauth2 },
- project: project,
- user: user
- )
- end
- end
-
- context 'but feature flag is disabled' do
- before do
- stub_feature_flags(incubation_5mp_google_cloud: false)
- end
-
- it 'returns not found' do
- sign_in(user)
-
- get url
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect_snowplow_event(
- category: 'Projects::GoogleCloud',
- action: 'feature_flag_enabled!',
- label: 'access_denied',
- property: 'feature_flag_not_enabled',
- project: project,
- user: user
- )
- end
- end
-
- context 'but google oauth2 token is not valid' do
- it 'does not return revoke oauth url' do
- allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
- allow(client).to receive(:validate_token).and_return(false)
- end
-
- sign_in(user)
-
- get url
-
- expect(response).to be_successful
- expect_snowplow_event(
- category: 'Projects::GoogleCloud',
- action: 'google_cloud#index',
- label: 'index',
- extra: {
- screen: 'home',
- serviceAccounts: [],
- createServiceAccountUrl: project_google_cloud_service_accounts_path(project),
- enableCloudRunUrl: project_google_cloud_deployments_cloud_run_path(project),
- enableCloudStorageUrl: project_google_cloud_deployments_cloud_storage_path(project),
- emptyIllustrationUrl: ActionController::Base.helpers.image_path('illustrations/pipelines_empty.svg'),
- configureGcpRegionsUrl: project_google_cloud_gcp_regions_path(project),
- gcpRegions: [],
- revokeOauthUrl: nil
- },
- project: project,
- user: user
- )
- end
- end
- end
- end
-end
diff --git a/spec/requests/projects/harbor/artifacts_controller_spec.rb b/spec/requests/projects/harbor/artifacts_controller_spec.rb
new file mode 100644
index 00000000000..310fbcf0a0f
--- /dev/null
+++ b/spec/requests/projects/harbor/artifacts_controller_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Harbor::ArtifactsController do
+ it_behaves_like 'a harbor artifacts controller', anonymous_status_code: '302' do
+ let_it_be(:container) { create(:project) }
+ let_it_be(:harbor_integration) { create(:harbor_integration, project: container) }
+ end
+end
diff --git a/spec/requests/projects/harbor/repositories_controller_spec.rb b/spec/requests/projects/harbor/repositories_controller_spec.rb
index cdb5a696d7e..751becaa20a 100644
--- a/spec/requests/projects/harbor/repositories_controller_spec.rb
+++ b/spec/requests/projects/harbor/repositories_controller_spec.rb
@@ -3,67 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::Harbor::RepositoriesController do
- let_it_be(:project, reload: true) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- shared_examples 'responds with 404 status' do
- it 'returns 404' do
- subject
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- shared_examples 'responds with 200 status' do
- it 'renders the index template' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:index)
- end
- end
-
- before do
- stub_feature_flags(harbor_registry_integration: true)
- project.add_developer(user)
- sign_in(user)
- end
-
- describe 'GET #index' do
- subject do
- get project_harbor_registry_index_path(project)
- response
- end
-
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
- end
-
- describe 'GET #show' do
- subject do
- get project_harbor_registry_path(project, 1)
- response
- end
-
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
+ it_behaves_like 'a harbor repositories controller', anonymous_status_code: '302' do
+ let_it_be(:container, reload: true) { create(:project) }
+ let_it_be(:harbor_integration) { create(:harbor_integration, project: container) }
end
end
diff --git a/spec/requests/projects/harbor/tags_controller_spec.rb b/spec/requests/projects/harbor/tags_controller_spec.rb
new file mode 100644
index 00000000000..119d1c746ac
--- /dev/null
+++ b/spec/requests/projects/harbor/tags_controller_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Harbor::TagsController do
+ it_behaves_like 'a harbor tags controller', anonymous_status_code: '302' do
+ let_it_be(:container) { create(:project) }
+ let_it_be(:harbor_integration) { create(:harbor_integration, project: container) }
+ end
+end
diff --git a/spec/requests/projects/issues_controller_spec.rb b/spec/requests/projects/issues_controller_spec.rb
index 248e3e3a92b..de1d55ff5be 100644
--- a/spec/requests/projects/issues_controller_spec.rb
+++ b/spec/requests/projects/issues_controller_spec.rb
@@ -50,22 +50,6 @@ RSpec.describe Projects::IssuesController do
a_hash_including('id' => discussion_2.id.to_s)
])
end
-
- context 'when paginated_issue_discussions is disabled' do
- before do
- stub_feature_flags(paginated_issue_discussions: false)
- end
-
- it 'returns all discussions and ignores per_page param' do
- get_discussions(per_page: 2)
-
- discussions = Gitlab::Json.parse(response.body)
- notes = discussions.flat_map { |d| d['notes'] }
-
- expect(discussions.count).to eq(4)
- expect(notes.count).to eq(5)
- end
- end
end
end
diff --git a/spec/requests/projects/pipelines_controller_spec.rb b/spec/requests/projects/pipelines_controller_spec.rb
new file mode 100644
index 00000000000..1c6b1039aee
--- /dev/null
+++ b/spec/requests/projects/pipelines_controller_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::PipelinesController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before_all do
+ create(:ci_build, pipeline: pipeline, stage: 'build')
+ create(:ci_bridge, pipeline: pipeline, stage: 'build')
+ create(:generic_commit_status, pipeline: pipeline, stage: 'build')
+
+ project.add_developer(user)
+ end
+
+ before do
+ login_as(user)
+ end
+
+ describe "GET stages.json" do
+ it 'does not execute N+1 queries' do
+ request_build_stage
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ request_build_stage
+ end.count
+
+ create(:ci_build, pipeline: pipeline, stage: 'build')
+
+ expect { request_build_stage }.not_to exceed_query_limit(control_count)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'with retried builds' do
+ it 'does not execute N+1 queries' do
+ create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build')
+
+ request_build_stage(retried: true)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ request_build_stage(retried: true)
+ end.count
+
+ create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build')
+
+ expect { request_build_stage(retried: true) }.not_to exceed_query_limit(control_count)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ def request_build_stage(params = {})
+ get stage_namespace_project_pipeline_path(
+ params.merge(
+ namespace_id: project.namespace.to_param,
+ project_id: project.to_param,
+ id: pipeline.id,
+ stage: 'build',
+ format: :json
+ )
+ )
+ end
+ end
+end
diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index d033ce15b00..42f14392117 100644
--- a/spec/requests/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -236,14 +236,14 @@ RSpec.describe UsersController do
let!(:deploy_key) { create(:deploy_key, user: user) }
shared_examples_for 'renders all public keys' do
- it 'renders all non-deploy keys separated with a new line with text/plain content type without the comment key' do
+ it 'renders all non-deploy keys terminated with a new line with text/plain content type without the comment key' do
get "/#{user.username}.keys"
expect(response).to be_successful
expect(response.media_type).to eq("text/plain")
expect(response.body).not_to eq('')
- expect(response.body).to eq(user.all_ssh_keys.join("\n"))
+ expect(response.body).to eq(user.all_ssh_keys.map { |key| key + "\n" }.join)
expect(response.body).to include(key.key.sub(' dummy@gitlab.com', ''))
expect(response.body).to include(another_key.key.sub(' dummy@gitlab.com', ''))
@@ -308,7 +308,7 @@ RSpec.describe UsersController do
let!(:another_gpg_key) { create(:another_gpg_key, user: user.reload) }
shared_examples_for 'renders all verified GPG keys' do
- it 'renders all verified keys separated with a new line with text/plain content type' do
+ it 'renders all verified keys terminated with a new line with text/plain content type' do
get "/#{user.username}.gpg"
expect(response).to be_successful
@@ -316,7 +316,7 @@ RSpec.describe UsersController do
expect(response.media_type).to eq("text/plain")
expect(response.body).not_to eq('')
- expect(response.body).to eq(user.gpg_keys.select(&:verified?).map(&:key).join("\n"))
+ expect(response.body).to eq(user.gpg_keys.filter_map { |gpg_key| gpg_key.key + "\n" if gpg_key.verified? }.join)
expect(response.body).to include(gpg_key.key)
expect(response.body).to include(another_gpg_key.key)
diff --git a/spec/requests/verifies_with_email_spec.rb b/spec/requests/verifies_with_email_spec.rb
new file mode 100644
index 00000000000..2f249952455
--- /dev/null
+++ b/spec/requests/verifies_with_email_spec.rb
@@ -0,0 +1,234 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_redis_rate_limiting do
+ include SessionHelpers
+ include EmailHelpers
+
+ let(:user) { create(:user) }
+
+ shared_examples_for 'send verification instructions' do
+ it 'locks the user' do
+ user.reload
+ expect(user.unlock_token).not_to be_nil
+ expect(user.locked_at).not_to be_nil
+ end
+
+ it 'sends an email' do
+ mail = find_email_for(user)
+ expect(mail.to).to match_array([user.email])
+ expect(mail.subject).to eq('Verify your identity')
+ end
+ end
+
+ shared_examples_for 'prompt for email verification' do
+ it 'sets the verification_user_id session variable and renders the email verification template' do
+ expect(request.session[:verification_user_id]).to eq(user.id)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('devise/sessions/email_verification')
+ end
+ end
+
+ describe 'verify_with_email' do
+ context 'when user is locked and a verification_user_id session variable exists' do
+ before do
+ encrypted_token = Devise.token_generator.digest(User, :unlock_token, 'token')
+ user.update!(locked_at: Time.current, unlock_token: encrypted_token)
+ stub_session(verification_user_id: user.id)
+ end
+
+ context 'when rate limited and a verification_token param exists' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+
+ post(user_session_path(user: { verification_token: 'token' }))
+ end
+
+ it_behaves_like 'prompt for email verification'
+
+ it 'adds a verification error message' do
+ expect(response.body)
+ .to include("You&#39;ve reached the maximum amount of tries. "\
+ 'Wait 10 minutes or resend a new code and try again.')
+ end
+ end
+
+ context 'when an invalid verification_token param exists' do
+ before do
+ post(user_session_path(user: { verification_token: 'invalid_token' }))
+ end
+
+ it_behaves_like 'prompt for email verification'
+
+ it 'adds a verification error message' do
+ expect(response.body).to include(('The code is incorrect. Enter it again, or resend a new code.'))
+ end
+ end
+
+ context 'when an expired verification_token param exists' do
+ before do
+ user.update!(locked_at: 1.hour.ago)
+ post(user_session_path(user: { verification_token: 'token' }))
+ end
+
+ it_behaves_like 'prompt for email verification'
+
+ it 'adds a verification error message' do
+ expect(response.body).to include(('The code has expired. Resend a new code and try again.'))
+ end
+ end
+
+ context 'when a valid verification_token param exists' do
+ before do
+ post(user_session_path(user: { verification_token: 'token' }))
+ end
+
+ it 'unlocks the user' do
+ user.reload
+ expect(user.unlock_token).to be_nil
+ expect(user.locked_at).to be_nil
+ end
+
+ it 'redirects to the successful verification path' do
+ expect(response).to redirect_to(users_successful_verification_path)
+ end
+ end
+ end
+
+ context 'when signing in with a valid password' do
+ let(:sign_in) { post(user_session_path(user: { login: user.username, password: user.password })) }
+
+ context 'when the feature flag is toggled on' do
+ before do
+ stub_feature_flags(require_email_verification: user)
+ end
+
+ context 'when rate limited' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+ sign_in
+ end
+
+ it 'redirects to the login form and shows an alert message' do
+ expect(response).to redirect_to(new_user_session_path)
+ expect(flash[:alert]).to eq('Maximum login attempts exceeded. Wait 10 minutes and try again.')
+ end
+ end
+
+ context 'when the user already has an unlock_token set' do
+ before do
+ user.update!(unlock_token: 'token')
+ sign_in
+ end
+
+ it_behaves_like 'prompt for email verification'
+ end
+
+ context 'when the user is already locked' do
+ before do
+ user.update!(locked_at: Time.current)
+ perform_enqueued_jobs { sign_in }
+ end
+
+ it_behaves_like 'send verification instructions'
+ it_behaves_like 'prompt for email verification'
+ end
+
+ context 'when the user is signing in from an unknown ip address' do
+ before do
+ allow(AuthenticationEvent)
+ .to receive(:initial_login_or_known_ip_address?)
+ .and_return(false)
+
+ perform_enqueued_jobs { sign_in }
+ end
+
+ it_behaves_like 'send verification instructions'
+ it_behaves_like 'prompt for email verification'
+ end
+ end
+
+ context 'when the feature flag is toggled off' do
+ let(:another_user) { build(:user) }
+
+ before do
+ stub_feature_flags(require_email_verification: another_user)
+ sign_in
+ end
+
+ it 'redirects to the root path' do
+ expect(response).to redirect_to(root_path)
+ end
+ end
+ end
+ end
+
+ describe 'resend_verification_code' do
+ context 'when no verification_user_id session variable exists' do
+ before do
+ post(users_resend_verification_code_path)
+ end
+
+ it 'returns 204 No Content' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+
+ context 'when a verification_user_id session variable exists' do
+ before do
+ stub_session(verification_user_id: user.id)
+
+ perform_enqueued_jobs do
+ post(users_resend_verification_code_path)
+ end
+ end
+
+ it_behaves_like 'send verification instructions'
+ it_behaves_like 'prompt for email verification'
+ end
+
+ context 'when exceeding the rate limit' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+
+ stub_session(verification_user_id: user.id)
+
+ perform_enqueued_jobs do
+ post(users_resend_verification_code_path)
+ end
+ end
+
+ it 'does not lock the user' do
+ user.reload
+ expect(user.unlock_token).to be_nil
+ expect(user.locked_at).to be_nil
+ end
+
+ it 'does not send an email' do
+ mail = find_email_for(user)
+ expect(mail).to be_nil
+ end
+
+ it_behaves_like 'prompt for email verification'
+ end
+ end
+
+ describe 'successful_verification' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders the template and removes the verification_user_id session variable' do
+ stub_session(verification_user_id: user.id)
+
+ get(users_successful_verification_path)
+
+ expect(request.session.has_key?(:verification_user_id)).to eq(false)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('successful_verification', layout: 'minimal')
+ expect(response.body).to include(root_path)
+ end
+ end
+end
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index 5c2ef62683e..9f5f821cc61 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -59,6 +59,18 @@ RSpec.shared_examples 'groups routing' do
expect(get('/groups/gitlabhq/-/boards')).to route_to('groups/boards#index', group_id: 'gitlabhq')
end
+
+ it 'routes to the harbor repositories controller' do
+ expect(get("groups/#{group_path}/-/harbor/repositories")).to route_to('groups/harbor/repositories#index', group_id: group_path)
+ end
+
+ it 'routes to the harbor artifacts controller' do
+ expect(get("groups/#{group_path}/-/harbor/repositories/test/artifacts")).to route_to('groups/harbor/artifacts#index', group_id: group_path, repository_id: 'test')
+ end
+
+ it 'routes to the harbor tags controller' do
+ expect(get("groups/#{group_path}/-/harbor/repositories/test/artifacts/test/tags")).to route_to('groups/harbor/tags#index', group_id: group_path, repository_id: 'test', artifact_id: 'test')
+ end
end
RSpec.describe "Groups", "routing" do
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 47fd1622306..1d58a31bd6e 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -788,20 +788,6 @@ RSpec.describe 'project routing' do
it 'to #test' do
expect(put('/gitlab/gitlabhq/-/settings/integrations/acme/test')).to route_to('projects/settings/integrations#test', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
end
-
- context 'legacy routes' do
- it 'to #edit' do
- expect(get('/gitlab/gitlabhq/-/integrations/acme/edit')).to route_to('projects/settings/integrations#edit', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
- end
-
- it 'to #update' do
- expect(put('/gitlab/gitlabhq/-/integrations/acme')).to route_to('projects/settings/integrations#update', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
- end
-
- it 'to #test' do
- expect(put('/gitlab/gitlabhq/-/integrations/acme/test')).to route_to('projects/settings/integrations#test', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
- end
- end
end
describe Projects::Settings::IntegrationHookLogsController do
@@ -812,16 +798,6 @@ RSpec.describe 'project routing' do
it 'to #retry' do
expect(post('/gitlab/gitlabhq/-/settings/integrations/acme/hook_logs/log/retry')).to route_to('projects/settings/integration_hook_logs#retry', namespace_id: 'gitlab', project_id: 'gitlabhq', integration_id: 'acme', id: 'log')
end
-
- context 'legacy routes' do
- it 'to #show' do
- expect(get('/gitlab/gitlabhq/-/integrations/acme/hook_logs/log')).to route_to('projects/settings/integration_hook_logs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', integration_id: 'acme', id: 'log')
- end
-
- it 'to #retry' do
- expect(post('/gitlab/gitlabhq/-/integrations/acme/hook_logs/log/retry')).to route_to('projects/settings/integration_hook_logs#retry', namespace_id: 'gitlab', project_id: 'gitlabhq', integration_id: 'acme', id: 'log')
- end
- end
end
describe Projects::TemplatesController, 'routing' do
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 79edfdd2b3f..2bd23340a88 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -310,6 +310,26 @@ RSpec.describe "Authentication", "routing" do
expect(post("/users/auth/ldapmain/callback")).not_to be_routable
end
end
+
+ context 'with multiple LDAP providers configured' do
+ let(:ldap_settings) do
+ {
+ enabled: true,
+ servers: {
+ main: { 'provider_name' => 'ldapmain' },
+ secondary: { 'provider_name' => 'ldapsecondary' }
+ }
+ }
+ end
+
+ it 'POST /users/auth/ldapmain/callback' do
+ expect(post("/users/auth/ldapmain/callback")).to route_to('ldap/omniauth_callbacks#ldapmain')
+ end
+
+ it 'POST /users/auth/ldapsecondary/callback' do
+ expect(post("/users/auth/ldapsecondary/callback")).to route_to('ldap/omniauth_callbacks#ldapsecondary')
+ end
+ end
end
end
diff --git a/spec/rubocop/cop/database/rescue_query_canceled_spec.rb b/spec/rubocop/cop/database/rescue_query_canceled_spec.rb
new file mode 100644
index 00000000000..56314a18bf5
--- /dev/null
+++ b/spec/rubocop/cop/database/rescue_query_canceled_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/database/rescue_query_canceled'
+
+RSpec.describe RuboCop::Cop::Database::RescueQueryCanceled do
+ subject(:cop) { described_class.new }
+
+ it 'flags the use of ActiveRecord::QueryCanceled' do
+ expect_offense(<<~CODE)
+ begin
+ do_something
+ rescue ActiveRecord::QueryCanceled => e
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid rescuing the `ActiveRecord::QueryCanceled` [...]
+ try_something_else
+ end
+ CODE
+ end
+
+ it 'does not flag a different exception' do
+ expect_no_offenses(<<~CODE)
+ begin
+ do_something
+ rescue ActiveRecord::RecordNotFound => e
+ try_something_else
+ end
+ CODE
+ end
+end
diff --git a/spec/rubocop/cop/database/rescue_statement_timeout_spec.rb b/spec/rubocop/cop/database/rescue_statement_timeout_spec.rb
new file mode 100644
index 00000000000..b9b2ce1c16b
--- /dev/null
+++ b/spec/rubocop/cop/database/rescue_statement_timeout_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/database/rescue_statement_timeout'
+
+RSpec.describe RuboCop::Cop::Database::RescueStatementTimeout do
+ subject(:cop) { described_class.new }
+
+ it 'flags the use of ActiveRecord::StatementTimeout' do
+ expect_offense(<<~CODE)
+ begin
+ do_something
+ rescue ActiveRecord::StatementTimeout => e
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid rescuing the `ActiveRecord::StatementTimeout` [...]
+ try_something_else
+ end
+ CODE
+ end
+
+ it 'does not flag a different exception' do
+ expect_no_offenses(<<~CODE)
+ begin
+ do_something
+ rescue ActiveRecord::RecordNotFound => e
+ try_something_else
+ end
+ CODE
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/namespaced_class_spec.rb b/spec/rubocop/cop/gitlab/namespaced_class_spec.rb
index d9209a8672c..83d0eaf4884 100644
--- a/spec/rubocop/cop/gitlab/namespaced_class_spec.rb
+++ b/spec/rubocop/cop/gitlab/namespaced_class_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe RuboCop::Cop::Gitlab::NamespacedClass do
it 'flags a class definition without additional namespace' do
expect_offense(namespaced(<<~SOURCE))
class MyClass
- ^^^^^^^^^^^^^ #{described_class::MSG}
+ ^^^^^^^ #{described_class::MSG}
end
SOURCE
end
@@ -28,7 +28,7 @@ RSpec.describe RuboCop::Cop::Gitlab::NamespacedClass do
it 'flags a compact class definition without additional namespace' do
expect_offense(<<~SOURCE, namespace: namespace)
class %{namespace}::MyClass
- ^{namespace}^^^^^^^^^^^^^^^ #{described_class::MSG}
+ ^{namespace}^^^^^^^^^ #{described_class::MSG}
end
SOURCE
end
@@ -36,7 +36,7 @@ RSpec.describe RuboCop::Cop::Gitlab::NamespacedClass do
it 'flags a class definition with inheritance without additional namespace' do
expect_offense(namespaced(<<~SOURCE))
class MyClass < ApplicationRecord
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+ ^^^^^^^ #{described_class::MSG}
def some_method
true
end
diff --git a/spec/scripts/determine-qa-tests_spec.rb b/spec/scripts/determine-qa-tests_spec.rb
new file mode 100644
index 00000000000..043eb7f2dc9
--- /dev/null
+++ b/spec/scripts/determine-qa-tests_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+require 'fast_spec_helper'
+
+load File.expand_path('../../scripts/determine-qa-tests', __dir__)
+
+RSpec.describe 'scripts/determine-qa-tests' do
+ describe DetermineQATests do
+ describe '.execute' do
+ let(:qa_spec_files) do
+ %w[qa/qa/specs/features/browser_ui/1_manage/test1.rb
+ qa/qa/specs/features/browser_ui/1_manage/user/test2.rb]
+ end
+
+ let(:qa_spec_and_non_spec_files) do
+ %w[qa/qa/specs/features/browser_ui/1_manage/test1.rb
+ qa/qa/page/admin/menu.rb]
+ end
+
+ let(:non_qa_files) do
+ %w[rubocop/code_reuse_helpers.rb
+ app/components/diffs/overflow_warning_component.rb]
+ end
+
+ let(:non_qa_and_feature_flag_files) do
+ %w[rubocop/code_reuse_helpers.rb
+ app/components/diffs/overflow_warning_component.rb
+ config/feature_flags/development/access_token_ajax.yml]
+ end
+
+ let(:qa_spec_and_non_qa_files) do
+ %w[rubocop/code_reuse_helpers.rb
+ app/components/diffs/overflow_warning_component.rb
+ qa/qa/specs/features/browser_ui/1_manage/test1.rb]
+ end
+
+ let(:qa_non_spec_and_non_qa_files) do
+ %w[rubocop/code_reuse_helpers.rb
+ app/components/diffs/overflow_warning_component.rb
+ qa/qa/page/admin/menu.rb]
+ end
+
+ shared_examples 'determine qa tests' do
+ context 'when only qa spec files have changed' do
+ it 'returns only the changed qa specs' do
+ subject = described_class.new({ changed_files: qa_spec_files }.merge(labels))
+
+ expect(subject.execute).to eql qa_spec_files.map { |path| path.delete_prefix("qa/") }.join(' ')
+ end
+ end
+
+ context 'when qa spec and non spec files have changed' do
+ it 'does not return any specs' do
+ subject = described_class.new({ changed_files: qa_spec_and_non_spec_files }.merge(labels))
+ expect(subject.execute).to be_nil
+ end
+ end
+
+ context 'when non-qa and feature flag files have changed' do
+ it 'does not return any specs' do
+ subject = described_class.new({ changed_files: non_qa_and_feature_flag_files }.merge(labels))
+ expect(subject.execute).to be_nil
+ end
+ end
+
+ context 'when qa spec and non-qa files have changed' do
+ it 'does not return any specs' do
+ subject = described_class.new({ changed_files: qa_spec_and_non_qa_files }.merge(labels))
+ expect(subject.execute).to be_nil
+ end
+ end
+
+ context 'when qa non-spec and non-qa files have changed' do
+ it 'does not return any specs' do
+ subject = described_class.new({ changed_files: qa_non_spec_and_non_qa_files }.merge(labels))
+ expect(subject.execute).to be_nil
+ end
+ end
+ end
+
+ context 'when a devops label is not specified' do
+ let(:labels) { { mr_labels: ['type::feature'] } }
+
+ it_behaves_like 'determine qa tests'
+
+ context 'when only non-qa files have changed' do
+ it 'does not return any specs' do
+ subject = described_class.new({ changed_files: non_qa_files })
+ expect(subject.execute).to be_nil
+ end
+ end
+ end
+
+ context 'when a devops label is specified' do
+ let(:labels) { { mr_labels: %w[devops::manage type::feature] } }
+
+ it_behaves_like 'determine qa tests'
+
+ context 'when only non-qa files have changed' do
+ it 'returns the specs for the devops label' do
+ subject = described_class.new({ changed_files: non_qa_files }.merge(labels))
+ allow(subject).to receive(:qa_spec_directories_for_devops_stage)
+ .and_return(['qa/qa/specs/features/browser_ui/1_manage/'])
+ expect(subject.execute).to eql 'qa/specs/features/browser_ui/1_manage/'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
index 82ed8563c3a..149a384d31e 100644
--- a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
+++ b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
@@ -14,7 +14,7 @@ require_relative '../../../../scripts/lib/glfm/update_example_snapshots'
# This is because the invocation of the full script is slow, because it executes
# two subshells for processing, one which runs a full Rails environment, and one
# which runs a jest test environment. This results in each full run of the script
-# taking between 30-60 seconds. The majority of this is spent loading the Rails environmnent.
+# taking between 30-60 seconds. The majority of this is spent loading the Rails environment.
#
# However, only the `writing html.yml and prosemirror_json.yml` context is used
# to test these slow sub-processes, and it only contains a single example.
@@ -79,6 +79,31 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
<div class="extension">
+ ### Motivation
+
+ This is a third-level heading with no examples, as exists in the actual GHFM
+ specification. It exists to drive a fix for a bug where this caused the
+ indexing and ordering to in examples_index.yml to be incorrect.
+
+ ### Another H3
+
+ This is a second consecutive third-level heading. It exists to drive full code coverage
+ for this scenario, although it doesn't (yet) exist in the actual spec.txt.
+
+ ## An H2 with all disabled examples
+
+ In the GHFM specification, the 'Task list items (extension)' contains only "disabled"
+ examples, which are ignored by the GitHub fork of `spec_test.py`, and thus not part of the
+ Markdown conformance tests, but are part of the HTML-rendered version of the specification.
+ We also exclude them from our GLFM specification for consistency, but we may add
+ GitLab-specific examples for the behavior instead.
+
+ ```````````````````````````````` example disabled
+ this example is disabled during conformance testing
+ .
+ <p>this example is disabled during conformance testing</p>
+ ````````````````````````````````
+
## Strikethrough (extension)
GFM enables the `strikethrough` extension.
@@ -202,7 +227,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
"existing": "This entry is existing, but not skipped, so it will be overwritten."
}
# 02_01__inlines__strong__002: is omitted from the existing file and skipped, to test that scenario.
- 02_02__inlines__strikethrough_extension__001: |-
+ 02_03__inlines__strikethrough_extension__001: |-
{
"type": "doc",
"content": [
@@ -314,20 +339,20 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
02_01__inlines__strong__002:
spec_txt_example_position: 2
source_specification: github
- 02_02__inlines__strikethrough_extension__001:
- spec_txt_example_position: 3
+ 02_03__inlines__strikethrough_extension__001:
+ spec_txt_example_position: 4
source_specification: github
03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
- spec_txt_example_position: 4
+ spec_txt_example_position: 5
source_specification: gitlab
04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
- spec_txt_example_position: 5
+ spec_txt_example_position: 6
source_specification: gitlab
05_01__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
- spec_txt_example_position: 6
+ spec_txt_example_position: 7
source_specification: gitlab
05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
- spec_txt_example_position: 7
+ spec_txt_example_position: 8
source_specification: gitlab
ES_EXAMPLES_INDEX_YML_CONTENTS
end
@@ -349,7 +374,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
__bold__
02_01__inlines__strong__002: |
__bold with more text__
- 02_02__inlines__strikethrough_extension__001: |
+ 02_03__inlines__strikethrough_extension__001: |
~~Hi~~ Hello, world!
03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001: |
**bold**
@@ -413,7 +438,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
<p data-sourcepos="1:1-1:23" dir="auto"><strong>bold with more text</strong></p>
wysiwyg: |-
<p><strong>bold with more text</strong></p>
- 02_02__inlines__strikethrough_extension__001:
+ 02_03__inlines__strikethrough_extension__001:
canonical: |
<p><del>Hi</del> Hello, world!</p>
static: |-
@@ -468,7 +493,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
}
]
}
- 02_02__inlines__strikethrough_extension__001: |-
+ 02_03__inlines__strikethrough_extension__001: |-
{
"type": "doc",
"content": [
diff --git a/spec/scripts/pipeline_test_report_builder_spec.rb b/spec/scripts/pipeline_test_report_builder_spec.rb
index 8553ada044e..198cdefc530 100644
--- a/spec/scripts/pipeline_test_report_builder_spec.rb
+++ b/spec/scripts/pipeline_test_report_builder_spec.rb
@@ -103,16 +103,18 @@ RSpec.describe PipelineTestReportBuilder do
end
describe '#test_report_for_latest_pipeline' do
+ let(:failed_build_uri) { "#{failed_pipeline_url}/tests/suite.json?build_ids[]=#{failed_build_id}" }
+
+ before do
+ allow(subject).to receive(:fetch).with(failed_build_uri).and_return(failed_builds_for_pipeline)
+ end
+
it 'fetches builds from pipeline related to MR' do
- expect(subject).to receive(:fetch).with("#{failed_pipeline_url}/tests/suite.json?build_ids[]=#{failed_build_id}").and_return(failed_builds_for_pipeline)
- subject.test_report_for_latest_pipeline
+ expected = { "suites" => [failed_builds_for_pipeline] }.to_json
+ expect(subject.test_report_for_latest_pipeline).to eq(expected)
end
context 'canonical pipeline' do
- before do
- allow(subject).to receive(:test_report_for_build).and_return(test_report_for_build)
- end
-
context 'no previous pipeline' do
let(:mr_pipelines) { [] }
@@ -171,6 +173,10 @@ RSpec.describe PipelineTestReportBuilder do
end
context 'failed pipeline and failed test builds' do
+ before do
+ allow(subject).to receive(:fetch).with(failed_build_uri).and_return(test_report_for_build)
+ end
+
it 'returns populated test list for suites' do
actual = subject.test_report_for_latest_pipeline
expected = {
@@ -180,6 +186,36 @@ RSpec.describe PipelineTestReportBuilder do
expect(actual).to eq(expected)
end
end
+
+ context 'when receiving a server error' do
+ let(:response) { instance_double('Net::HTTPResponse') }
+ let(:error) { Net::HTTPServerException.new('server error', response) }
+ let(:test_report_for_latest_pipeline) { subject.test_report_for_latest_pipeline }
+
+ before do
+ allow(response).to receive(:code).and_return(response_code)
+ allow(subject).to receive(:fetch).with(failed_build_uri).and_raise(error)
+ end
+
+ context 'when response code is 404' do
+ let(:response_code) { 404 }
+
+ it 'continues without the missing reports' do
+ expected = { 'suites' => [] }.to_json
+
+ expect { test_report_for_latest_pipeline }.not_to raise_error
+ expect(test_report_for_latest_pipeline).to eq(expected)
+ end
+ end
+
+ context 'when response code is unexpected' do
+ let(:response_code) { 500 }
+
+ it 'raises HTTPServerException' do
+ expect { test_report_for_latest_pipeline }.to raise_error(error)
+ end
+ end
+ end
end
end
end
diff --git a/spec/serializers/ci/dag_job_group_entity_spec.rb b/spec/serializers/ci/dag_job_group_entity_spec.rb
index 5a75c04efe5..b654b21f583 100644
--- a/spec/serializers/ci/dag_job_group_entity_spec.rb
+++ b/spec/serializers/ci/dag_job_group_entity_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Ci::DagJobGroupEntity do
subject { entity.as_json }
context 'when group contains 1 job' do
- let(:job) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test') }
+ let(:job) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test') }
let(:jobs) { [job] }
it 'exposes a name' do
@@ -38,8 +38,8 @@ RSpec.describe Ci::DagJobGroupEntity do
end
context 'when group contains multiple parallel jobs' do
- let(:job_1) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test 1/2') }
- let(:job_2) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test 2/2') }
+ let(:job_1) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test 1/2') }
+ let(:job_2) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test 2/2') }
let(:jobs) { [job_1, job_2] }
it 'exposes a name' do
diff --git a/spec/serializers/ci/dag_pipeline_entity_spec.rb b/spec/serializers/ci/dag_pipeline_entity_spec.rb
index 31a0dc5c048..548fd247743 100644
--- a/spec/serializers/ci/dag_pipeline_entity_spec.rb
+++ b/spec/serializers/ci/dag_pipeline_entity_spec.rb
@@ -43,9 +43,9 @@ RSpec.describe Ci::DagPipelineEntity do
end
context 'when pipeline has parallel jobs, DAG needs and GenericCommitStatus' do
- let!(:stage_build) { create(:ci_stage_entity, name: 'build', position: 1, pipeline: pipeline) }
- let!(:stage_test) { create(:ci_stage_entity, name: 'test', position: 2, pipeline: pipeline) }
- let!(:stage_deploy) { create(:ci_stage_entity, name: 'deploy', position: 3, pipeline: pipeline) }
+ let!(:stage_build) { create(:ci_stage, name: 'build', position: 1, pipeline: pipeline) }
+ let!(:stage_test) { create(:ci_stage, name: 'test', position: 2, pipeline: pipeline) }
+ let!(:stage_deploy) { create(:ci_stage, name: 'deploy', position: 3, pipeline: pipeline) }
let!(:job_build_1) { create(:ci_build, name: 'build 1', stage: 'build', pipeline: pipeline) }
let!(:job_build_2) { create(:ci_build, name: 'build 2', stage: 'build', pipeline: pipeline) }
diff --git a/spec/serializers/ci/dag_stage_entity_spec.rb b/spec/serializers/ci/dag_stage_entity_spec.rb
index 0262ccdac68..3530a5e2bae 100644
--- a/spec/serializers/ci/dag_stage_entity_spec.rb
+++ b/spec/serializers/ci/dag_stage_entity_spec.rb
@@ -6,10 +6,10 @@ RSpec.describe Ci::DagStageEntity do
let_it_be(:pipeline) { create(:ci_pipeline) }
let_it_be(:request) { double(:request) }
- let(:stage) { build(:ci_stage, pipeline: pipeline, name: 'test') }
+ let(:stage) { create(:ci_stage, pipeline: pipeline, name: 'test') }
let(:entity) { described_class.new(stage, request: request) }
- let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
+ let!(:job) { create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id) }
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/serializers/ci/job_entity_spec.rb b/spec/serializers/ci/job_entity_spec.rb
index 05b9e38444c..174d9a0aadb 100644
--- a/spec/serializers/ci/job_entity_spec.rb
+++ b/spec/serializers/ci/job_entity_spec.rb
@@ -52,6 +52,14 @@ RSpec.describe Ci::JobEntity do
expect(subject[:status]).to include :icon, :favicon, :text, :label, :tooltip
end
+ it 'contains queued_at' do
+ expect(subject).to include :queued_at
+ end
+
+ it 'contains queued_duration' do
+ expect(subject).to include :queued_duration
+ end
+
context 'when job is retryable' do
before do
job.update!(status: :failed)
diff --git a/spec/serializers/ci/job_serializer_spec.rb b/spec/serializers/ci/job_serializer_spec.rb
index d47c9fdbf24..5f889a10f3d 100644
--- a/spec/serializers/ci/job_serializer_spec.rb
+++ b/spec/serializers/ci/job_serializer_spec.rb
@@ -28,36 +28,4 @@ RSpec.describe Ci::JobSerializer do
end
end
end
-
- describe '#represent_status' do
- context 'for a failed build' do
- let(:resource) { create(:ci_build, :failed) }
- let(:status) { resource.detailed_status(double('user')) }
-
- subject { serializer.represent_status(resource) }
-
- it 'serializes only status' do
- expect(subject[:text]).to eq(status.text)
- expect(subject[:label]).to eq('failed')
- expect(subject[:tooltip]).to eq('failed - (unknown failure)')
- expect(subject[:icon]).to eq(status.icon)
- expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.png")
- end
- end
-
- context 'for any other type of build' do
- let(:resource) { create(:ci_build, :success) }
- let(:status) { resource.detailed_status(double('user')) }
-
- subject { serializer.represent_status(resource) }
-
- it 'serializes only status' do
- expect(subject[:text]).to eq(status.text)
- expect(subject[:label]).to eq('passed')
- expect(subject[:tooltip]).to eq('passed')
- expect(subject[:icon]).to eq(status.icon)
- expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.png")
- end
- end
- end
end
diff --git a/spec/serializers/cluster_entity_spec.rb b/spec/serializers/cluster_entity_spec.rb
index 514828e3c69..7c4c146575d 100644
--- a/spec/serializers/cluster_entity_spec.rb
+++ b/spec/serializers/cluster_entity_spec.rb
@@ -55,65 +55,5 @@ RSpec.describe ClusterEntity do
expect(helm[:status]).to eq(:not_installable)
end
end
-
- context 'gitlab_managed_apps_logs_path' do
- let(:cluster) { create(:cluster, :project) }
- let(:user) { create(:user) }
-
- subject { described_class.new(cluster, request: request).as_json }
-
- before do
- allow_next_instance_of(Clusters::ClusterPresenter) do |presenter|
- allow(presenter).to receive(:show_path).and_return(nil)
- end
- end
-
- it 'return projects log explorer path' do
- log_explorer_path = project_logs_path(cluster.project, cluster_id: cluster.id)
-
- expect_next_instance_of(Clusters::ClusterPresenter, cluster, current_user: user) do |presenter|
- expect(presenter).to receive(:gitlab_managed_apps_logs_path).and_return(log_explorer_path)
- end
-
- expect(subject[:gitlab_managed_apps_logs_path]).to eq(log_explorer_path)
- end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(monitor_logging: false)
- end
-
- specify { is_expected.not_to include(:gitlab_managed_apps_logs_path) }
- end
- end
-
- context 'enable_advanced_logs_querying' do
- let(:cluster) { create(:cluster, :project) }
- let(:user) { create(:user) }
-
- subject { described_class.new(cluster, request: request).as_json }
-
- context 'elastic stack is not installed on cluster' do
- it 'returns false' do
- expect(subject[:enable_advanced_logs_querying]).to be false
- end
- end
-
- context 'elastic stack is enabled on cluster' do
- it 'returns true' do
- create(:clusters_integrations_elastic_stack, cluster: cluster)
-
- expect(subject[:enable_advanced_logs_querying]).to be true
- end
- end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(monitor_logging: false)
- end
-
- specify { is_expected.not_to include(:enable_advanced_logs_querying) }
- end
- end
end
end
diff --git a/spec/serializers/cluster_serializer_spec.rb b/spec/serializers/cluster_serializer_spec.rb
index e65e97b6ae0..7ec6d3c8bb8 100644
--- a/spec/serializers/cluster_serializer_spec.rb
+++ b/spec/serializers/cluster_serializer_spec.rb
@@ -14,8 +14,6 @@ RSpec.describe ClusterSerializer do
:enabled,
:environment_scope,
:id,
- :gitlab_managed_apps_logs_path,
- :enable_advanced_logs_querying,
:kubernetes_errors,
:name,
:nodes,
diff --git a/spec/serializers/diffs_entity_spec.rb b/spec/serializers/diffs_entity_spec.rb
index aef7d3732f8..72777bde30c 100644
--- a/spec/serializers/diffs_entity_spec.rb
+++ b/spec/serializers/diffs_entity_spec.rb
@@ -100,6 +100,7 @@ RSpec.describe DiffsEntity do
let(:options) { super().merge(merge_ref_head_diff: merge_ref_head_diff) }
before do
+ allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
end
diff --git a/spec/serializers/diffs_metadata_entity_spec.rb b/spec/serializers/diffs_metadata_entity_spec.rb
index 3311b434ce5..0e3d808aaac 100644
--- a/spec/serializers/diffs_metadata_entity_spec.rb
+++ b/spec/serializers/diffs_metadata_entity_spec.rb
@@ -65,6 +65,7 @@ RSpec.describe DiffsMetadataEntity do
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
before do
+ allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
end
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index 9b6a293da16..cbe32600941 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -133,54 +133,6 @@ RSpec.describe EnvironmentEntity do
end
end
- context 'pod_logs' do
- context 'with reporter access' do
- before do
- project.add_reporter(user)
- end
-
- it 'does not expose logs keys' do
- expect(subject).not_to include(:logs_path)
- expect(subject).not_to include(:logs_api_path)
- expect(subject).not_to include(:enable_advanced_logs_querying)
- end
- end
-
- context 'with developer access' do
- before do
- project.add_developer(user)
- end
-
- it 'exposes logs keys' do
- expect(subject).to include(:logs_path)
- expect(subject).to include(:logs_api_path)
- expect(subject).to include(:enable_advanced_logs_querying)
- end
-
- it 'uses k8s api when ES is not available' do
- expect(subject[:logs_api_path]).to eq(k8s_project_logs_path(project, environment_name: environment.name, format: :json))
- end
-
- it 'uses ES api when ES is available' do
- allow(environment).to receive(:elastic_stack_available?).and_return(true)
-
- expect(subject[:logs_api_path]).to eq(elasticsearch_project_logs_path(project, environment_name: environment.name, format: :json))
- end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(monitor_logging: false)
- end
-
- it 'does not expose logs keys' do
- expect(subject).not_to include(:logs_path)
- expect(subject).not_to include(:logs_api_path)
- expect(subject).not_to include(:enable_advanced_logs_querying)
- end
- end
- end
- end
-
context 'with deployment service ready' do
before do
allow(environment).to receive(:has_terminals?).and_return(true)
diff --git a/spec/serializers/integrations/field_entity_spec.rb b/spec/serializers/integrations/field_entity_spec.rb
index e75dc051f5e..7af17cf6df6 100644
--- a/spec/serializers/integrations/field_entity_spec.rb
+++ b/spec/serializers/integrations/field_entity_spec.rb
@@ -114,6 +114,6 @@ RSpec.describe Integrations::FieldEntity do
end
def integration_field(name)
- integration.global_fields.find { |f| f[:name] == name }
+ integration.form_fields.find { |f| f[:name] == name }
end
end
diff --git a/spec/serializers/integrations/harbor_serializers/artifact_entity_spec.rb b/spec/serializers/integrations/harbor_serializers/artifact_entity_spec.rb
new file mode 100644
index 00000000000..c9a95c02e19
--- /dev/null
+++ b/spec/serializers/integrations/harbor_serializers/artifact_entity_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::HarborSerializers::ArtifactEntity do
+ let_it_be(:harbor_integration) { create(:harbor_integration) }
+
+ let(:artifact) do
+ {
+ "digest": "sha256:14d4f50961544fdb669075c442509f194bdc4c0e344bde06e35dbd55af842a38",
+ "id": 5,
+ "project_id": 14,
+ "push_time": "2022-03-22T09:04:56.170Z",
+ "repository_id": 5,
+ "size": 774790,
+ "tags": [
+ {
+ "artifact_id": 5,
+ "id": 7,
+ "immutable": false,
+ "name": "2",
+ "push_time": "2022-03-22T09:05:04.844Z",
+ "repository_id": 5,
+ "signed": false
+ },
+ {
+ "artifact_id": 5,
+ "id": 6,
+ "immutable": false,
+ "name": "1",
+ "push_time": "2022-03-22T09:04:56.186Z",
+ "repository_id": 5,
+ "signed": false
+ }
+ ],
+ "type": "IMAGE"
+ }.deep_stringify_keys
+ end
+
+ subject { described_class.new(artifact).as_json }
+
+ it 'returns the Harbor artifact' do
+ expect(subject).to include({
+ harbor_id: 5,
+ size: 774790,
+ push_time: "2022-03-22T09:04:56.170Z".to_datetime,
+ digest: "sha256:14d4f50961544fdb669075c442509f194bdc4c0e344bde06e35dbd55af842a38",
+ tags: %w[2 1]
+ })
+ end
+end
diff --git a/spec/serializers/integrations/harbor_serializers/artifact_serializer_spec.rb b/spec/serializers/integrations/harbor_serializers/artifact_serializer_spec.rb
new file mode 100644
index 00000000000..9879c0a6434
--- /dev/null
+++ b/spec/serializers/integrations/harbor_serializers/artifact_serializer_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::HarborSerializers::ArtifactSerializer do
+ it 'represents Integrations::HarborSerializers::ArtifactEntity entities' do
+ expect(described_class.entity_class).to eq(Integrations::HarborSerializers::ArtifactEntity)
+ end
+end
diff --git a/spec/serializers/integrations/harbor_serializers/repository_entity_spec.rb b/spec/serializers/integrations/harbor_serializers/repository_entity_spec.rb
new file mode 100644
index 00000000000..29708bd0416
--- /dev/null
+++ b/spec/serializers/integrations/harbor_serializers/repository_entity_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::HarborSerializers::RepositoryEntity do
+ let_it_be(:harbor_integration) { create(:harbor_integration) }
+
+ let(:repo) do
+ {
+ "artifact_count" => 1,
+ "creation_time" => "2022-03-13T09:36:43.240Z",
+ "id" => 1,
+ "name" => "jihuprivate/busybox",
+ "project_id" => 4,
+ "pull_count" => 0,
+ "update_time" => "2022-03-13T09:36:43.240Z"
+ }.deep_stringify_keys
+ end
+
+ subject { described_class.new(repo, url: "https://demo.goharbor.io", project_name: "jihuprivate").as_json }
+
+ context 'with normal repository data' do
+ it 'returns the Harbor repository' do
+ expect(subject).to include({
+ artifact_count: 1,
+ creation_time: "2022-03-13T09:36:43.240Z".to_datetime,
+ harbor_id: 1,
+ name: "jihuprivate/busybox",
+ harbor_project_id: 4,
+ pull_count: 0,
+ update_time: "2022-03-13T09:36:43.240Z".to_datetime,
+ location: "https://demo.goharbor.io/harbor/projects/4/repositories/busybox"
+ })
+ end
+ end
+
+ context 'with data that may contain path traversal attacks' do
+ before do
+ repo["project_id"] = './../../../../../etc/hosts'
+ end
+
+ it 'returns empty location' do
+ expect(subject).to include({
+ artifact_count: 1,
+ creation_time: "2022-03-13T09:36:43.240Z".to_datetime,
+ harbor_id: 1,
+ name: "jihuprivate/busybox",
+ harbor_project_id: './../../../../../etc/hosts',
+ pull_count: 0,
+ update_time: "2022-03-13T09:36:43.240Z".to_datetime,
+ location: "https://demo.goharbor.io/"
+ })
+ end
+ end
+end
diff --git a/spec/serializers/integrations/harbor_serializers/repository_serializer_spec.rb b/spec/serializers/integrations/harbor_serializers/repository_serializer_spec.rb
new file mode 100644
index 00000000000..1a4235bea1e
--- /dev/null
+++ b/spec/serializers/integrations/harbor_serializers/repository_serializer_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::HarborSerializers::RepositorySerializer do
+ it 'represents Integrations::HarborSerializers::RepositoryEntity entities' do
+ expect(described_class.entity_class).to eq(Integrations::HarborSerializers::RepositoryEntity)
+ end
+end
diff --git a/spec/serializers/integrations/harbor_serializers/tag_entity_spec.rb b/spec/serializers/integrations/harbor_serializers/tag_entity_spec.rb
new file mode 100644
index 00000000000..f4bc5f71d5b
--- /dev/null
+++ b/spec/serializers/integrations/harbor_serializers/tag_entity_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::HarborSerializers::TagEntity do
+ let_it_be(:harbor_integration) { create(:harbor_integration) }
+
+ let(:push_time) { "2022-03-22T09:04:56.186Z" }
+ let(:pull_time) { "2022-03-23T09:04:56.186Z" }
+
+ let(:tag) do
+ {
+ "artifact_id": 5,
+ "id": 6,
+ "immutable": false,
+ "name": "1",
+ "push_time": push_time,
+ "pull_time": pull_time,
+ "repository_id": 5,
+ "signed": false
+ }.deep_stringify_keys
+ end
+
+ subject { described_class.new(tag).as_json }
+
+ it 'returns the Harbor artifact' do
+ expect(subject).to include({
+ harbor_repository_id: 5,
+ harbor_artifact_id: 5,
+ harbor_id: 6,
+ name: "1",
+ pull_time: pull_time.to_datetime.utc,
+ push_time: push_time.to_datetime.utc,
+ signed: false,
+ immutable: false
+ })
+ end
+end
diff --git a/spec/serializers/integrations/harbor_serializers/tag_serializer_spec.rb b/spec/serializers/integrations/harbor_serializers/tag_serializer_spec.rb
new file mode 100644
index 00000000000..45fee0b9b17
--- /dev/null
+++ b/spec/serializers/integrations/harbor_serializers/tag_serializer_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::HarborSerializers::TagSerializer do
+ it 'represents Integrations::HarborSerializers::TagEntity entities' do
+ expect(described_class.entity_class).to eq(Integrations::HarborSerializers::TagEntity)
+ end
+end
diff --git a/spec/serializers/issue_entity_spec.rb b/spec/serializers/issue_entity_spec.rb
index 9525ed02314..6b9c703c627 100644
--- a/spec/serializers/issue_entity_spec.rb
+++ b/spec/serializers/issue_entity_spec.rb
@@ -139,7 +139,7 @@ RSpec.describe IssueEntity do
end
it 'returns archived project doc' do
- expect(subject[:archived_project_docs_path]).to eq('/help/user/project/settings/index.md#archiving-a-project')
+ expect(subject[:archived_project_docs_path]).to eq('/help/user/project/settings/index.md#archive-a-project')
end
end
end
diff --git a/spec/serializers/member_user_entity_spec.rb b/spec/serializers/member_user_entity_spec.rb
index 85f29845d65..4dd6848c47b 100644
--- a/spec/serializers/member_user_entity_spec.rb
+++ b/spec/serializers/member_user_entity_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe MemberUserEntity do
shared_examples 'correctly exposes user two_factor_enabled' do
context 'when the current_user has a role lower than minimum manage member role' do
before do
- source.add_user(current_user, Gitlab::Access::DEVELOPER)
+ source.add_member(current_user, Gitlab::Access::DEVELOPER)
end
it 'does not expose user two_factor_enabled' do
@@ -65,7 +65,7 @@ RSpec.describe MemberUserEntity do
context 'when the current user has a minimum manage member role or higher' do
before do
- source.add_user(current_user, minimum_manage_member_role)
+ source.add_member(current_user, minimum_manage_member_role)
end
it 'matches json schema' do
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index db8bf92cbf5..9d4456c11d6 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -43,6 +43,7 @@ RSpec.describe PaginatedDiffEntity do
let(:options) { super().merge(merge_ref_head_diff: merge_ref_head_diff) }
before do
+ allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
allow(MergeRequests::Conflicts::ListService).to receive(:new).and_return(conflicts)
end
diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb
index b977d5d33aa..95d3fd254d4 100644
--- a/spec/serializers/stage_entity_spec.rb
+++ b/spec/serializers/stage_entity_spec.rb
@@ -12,12 +12,12 @@ RSpec.describe StageEntity do
end
let(:stage) do
- build(:ci_stage, pipeline: pipeline, name: 'test')
+ create(:ci_stage, pipeline: pipeline, status: :success)
end
before do
allow(request).to receive(:current_user).and_return(user)
- create(:ci_build, :success, pipeline: pipeline)
+ create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id)
end
describe '#as_json' do
@@ -74,7 +74,7 @@ RSpec.describe StageEntity do
end
context 'with a skipped stage ' do
- let(:stage) { create(:ci_stage_entity, status: 'skipped') }
+ let(:stage) { create(:ci_stage, status: 'skipped') }
it 'contains play_all_manual' do
expect(subject[:status][:action]).to be_present
@@ -82,7 +82,7 @@ RSpec.describe StageEntity do
end
context 'with a scheduled stage ' do
- let(:stage) { create(:ci_stage_entity, status: 'scheduled') }
+ let(:stage) { create(:ci_stage, status: 'scheduled') }
it 'contains play_all_manual' do
expect(subject[:status][:action]).to be_present
@@ -90,7 +90,7 @@ RSpec.describe StageEntity do
end
context 'with a manual stage ' do
- let(:stage) { create(:ci_stage_entity, status: 'manual') }
+ let(:stage) { create(:ci_stage, status: 'manual') }
it 'contains play_all_manual' do
expect(subject[:status][:action]).to be_present
diff --git a/spec/serializers/stage_serializer_spec.rb b/spec/serializers/stage_serializer_spec.rb
index 0b5e87dc95b..24e8057375b 100644
--- a/spec/serializers/stage_serializer_spec.rb
+++ b/spec/serializers/stage_serializer_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe StageSerializer do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
- let(:resource) { create(:ci_stage_entity) }
+ let(:resource) { create(:ci_stage) }
let(:serializer) do
described_class.new(current_user: user, project: project)
@@ -21,7 +21,7 @@ RSpec.describe StageSerializer do
end
context 'with an array of entities' do
- let(:resource) { create_list(:ci_stage_entity, 2) }
+ let(:resource) { create_list(:ci_stage, 2) }
it 'serializes the array of pipelines' do
expect(subject).not_to be_empty
diff --git a/spec/serializers/test_reports_comparer_entity_spec.rb b/spec/serializers/test_reports_comparer_entity_spec.rb
index 3f88438ccde..78aa64edae0 100644
--- a/spec/serializers/test_reports_comparer_entity_spec.rb
+++ b/spec/serializers/test_reports_comparer_entity_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe TestReportsComparerEntity do
let(:entity) { described_class.new(comparer) }
let(:comparer) { Gitlab::Ci::Reports::TestReportsComparer.new(base_reports, head_reports) }
- let(:base_reports) { Gitlab::Ci::Reports::TestReports.new }
- let(:head_reports) { Gitlab::Ci::Reports::TestReports.new }
+ let(:base_reports) { Gitlab::Ci::Reports::TestReport.new }
+ let(:head_reports) { Gitlab::Ci::Reports::TestReport.new }
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/serializers/test_reports_comparer_serializer_spec.rb b/spec/serializers/test_reports_comparer_serializer_spec.rb
index f9c37f49039..d19d9681e07 100644
--- a/spec/serializers/test_reports_comparer_serializer_spec.rb
+++ b/spec/serializers/test_reports_comparer_serializer_spec.rb
@@ -8,8 +8,8 @@ RSpec.describe TestReportsComparerSerializer do
let(:project) { double(:project) }
let(:serializer) { described_class.new(project: project).represent(comparer) }
let(:comparer) { Gitlab::Ci::Reports::TestReportsComparer.new(base_reports, head_reports) }
- let(:base_reports) { Gitlab::Ci::Reports::TestReports.new }
- let(:head_reports) { Gitlab::Ci::Reports::TestReports.new }
+ let(:base_reports) { Gitlab::Ci::Reports::TestReport.new }
+ let(:head_reports) { Gitlab::Ci::Reports::TestReport.new }
describe '#to_json' do
subject { serializer.to_json }
diff --git a/spec/services/alert_management/alerts/update_service_spec.rb b/spec/services/alert_management/alerts/update_service_spec.rb
index 9bdc9970807..8375c8cdf7d 100644
--- a/spec/services/alert_management/alerts/update_service_spec.rb
+++ b/spec/services/alert_management/alerts/update_service_spec.rb
@@ -249,57 +249,6 @@ RSpec.describe AlertManagement::Alerts::UpdateService do
it_behaves_like 'adds a system note'
end
-
- context 'with an associated issue' do
- let_it_be(:issue, reload: true) { create(:issue, project: project) }
-
- before do
- alert.update!(issue: issue)
- end
-
- shared_examples 'does not sync with the incident status' do
- specify do
- expect(::Issues::UpdateService).not_to receive(:new)
- expect { response }.to change { alert.acknowledged? }.to(true)
- end
- end
-
- it_behaves_like 'does not sync with the incident status'
-
- context 'when the issue is an incident' do
- before do
- issue.update!(issue_type: Issue.issue_types[:incident])
- end
-
- it_behaves_like 'does not sync with the incident status'
-
- context 'when the incident has an escalation status' do
- let_it_be(:escalation_status, reload: true) { create(:incident_management_issuable_escalation_status, issue: issue) }
-
- it 'updates the incident escalation status with the new alert status' do
- expect(::Issues::UpdateService).to receive(:new).once.and_call_original
- expect(described_class).to receive(:new).once.and_call_original
-
- expect { response }.to change { escalation_status.reload.acknowledged? }.to(true)
- .and change { alert.reload.acknowledged? }.to(true)
- end
-
- context 'when the statuses match' do
- before do
- escalation_status.update!(status_event: :acknowledge)
- end
-
- it_behaves_like 'does not sync with the incident status'
- end
- end
- end
- end
-
- context 'when a status change reason is included' do
- let(:params) { { status: new_status, status_change_reason: ' by changing the incident status' } }
-
- it_behaves_like 'adds a system note', /changed the status to \*\*Acknowledged\*\* by changing the incident status/
- end
end
end
end
diff --git a/spec/services/ci/abort_pipelines_service_spec.rb b/spec/services/ci/abort_pipelines_service_spec.rb
index 9f9519d6829..e43faf0af51 100644
--- a/spec/services/ci/abort_pipelines_service_spec.rb
+++ b/spec/services/ci/abort_pipelines_service_spec.rb
@@ -12,13 +12,13 @@ RSpec.describe Ci::AbortPipelinesService do
let_it_be(:cancelable_build, reload: true) { create(:ci_build, :running, pipeline: cancelable_pipeline) }
let_it_be(:non_cancelable_build, reload: true) { create(:ci_build, :success, pipeline: cancelable_pipeline) }
- let_it_be(:cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageA', status: :running, pipeline: cancelable_pipeline, project: project) }
- let_it_be(:non_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageB', status: :success, pipeline: cancelable_pipeline, project: project) }
+ let_it_be(:cancelable_stage, reload: true) { create(:ci_stage, name: 'stageA', status: :running, pipeline: cancelable_pipeline, project: project) }
+ let_it_be(:non_cancelable_stage, reload: true) { create(:ci_stage, name: 'stageB', status: :success, pipeline: cancelable_pipeline, project: project) }
let_it_be(:manual_pipeline_cancelable_build, reload: true) { create(:ci_build, :created, pipeline: manual_pipeline) }
let_it_be(:manual_pipeline_non_cancelable_build, reload: true) { create(:ci_build, :manual, pipeline: manual_pipeline) }
- let_it_be(:manual_pipeline_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageA', status: :created, pipeline: manual_pipeline, project: project) }
- let_it_be(:manual_pipeline_non_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageB', status: :success, pipeline: manual_pipeline, project: project) }
+ let_it_be(:manual_pipeline_cancelable_stage, reload: true) { create(:ci_stage, name: 'stageA', status: :created, pipeline: manual_pipeline, project: project) }
+ let_it_be(:manual_pipeline_non_cancelable_stage, reload: true) { create(:ci_stage, name: 'stageB', status: :success, pipeline: manual_pipeline, project: project) }
describe '#execute' do
def expect_correct_pipeline_cancellations
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 3116801d50c..849eb5885f6 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -126,5 +126,51 @@ RSpec.describe Ci::CreatePipelineService do
it_behaves_like 'not including the file'
end
end
+
+ context 'with ci_increase_includes_to_250 enabled on root project' do
+ let_it_be(:included_project) do
+ create(:project, :repository).tap { |p| p.add_developer(user) }
+ end
+
+ before do
+ stub_const('::Gitlab::Ci::Config::External::Context::MAX_INCLUDES', 0)
+ stub_const('::Gitlab::Ci::Config::External::Context::TRIAL_MAX_INCLUDES', 3)
+
+ stub_feature_flags(ci_increase_includes_to_250: false)
+ stub_feature_flags(ci_increase_includes_to_250: project)
+
+ allow(Project)
+ .to receive(:find_by_full_path)
+ .with(included_project.full_path)
+ .and_return(included_project)
+
+ allow(included_project.repository)
+ .to receive(:blob_data_at).with(included_project.commit.id, '.gitlab-ci.yml')
+ .and_return(local_config)
+
+ allow(included_project.repository)
+ .to receive(:blob_data_at).with(included_project.commit.id, file_location)
+ .and_return(File.read(Rails.root.join(file_location)))
+ end
+
+ let(:config) do
+ <<~EOY
+ include:
+ - project: #{included_project.full_path}
+ file: .gitlab-ci.yml
+ EOY
+ end
+
+ let(:local_config) do
+ <<~EOY
+ include: #{file_location}
+
+ job:
+ script: exit 0
+ EOY
+ end
+
+ it_behaves_like 'including the file'
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index aac059f2104..9cef7f7dadb 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -2087,6 +2087,12 @@ RSpec.describe Ci::CreatePipelineService do
rules:
- changes:
- $CI_JOB_NAME*
+
+ changes-paths:
+ script: "I am using a new syntax!"
+ rules:
+ - changes:
+ paths: [README.md]
EOY
end
@@ -2098,8 +2104,9 @@ RSpec.describe Ci::CreatePipelineService do
it 'creates five jobs' do
expect(pipeline).to be_persisted
- expect(build_names)
- .to contain_exactly('regular-job', 'rules-job', 'delayed-job', 'negligible-job', 'README')
+ expect(build_names).to contain_exactly(
+ 'regular-job', 'rules-job', 'delayed-job', 'negligible-job', 'README', 'changes-paths'
+ )
end
it 'sets when: for all jobs' do
diff --git a/spec/services/ci/ensure_stage_service_spec.rb b/spec/services/ci/ensure_stage_service_spec.rb
index 3ede214cdd4..026814edda6 100644
--- a/spec/services/ci/ensure_stage_service_spec.rb
+++ b/spec/services/ci/ensure_stage_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Ci::EnsureStageService, '#execute' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let(:stage) { create(:ci_stage_entity) }
+ let(:stage) { create(:ci_stage) }
let(:job) { build(:ci_build) }
let(:service) { described_class.new(project, user) }
diff --git a/spec/services/ci/generate_coverage_reports_service_spec.rb b/spec/services/ci/generate_coverage_reports_service_spec.rb
index d12a9268e7e..212e6be9d07 100644
--- a/spec/services/ci/generate_coverage_reports_service_spec.rb
+++ b/spec/services/ci/generate_coverage_reports_service_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Ci::GenerateCoverageReportsService do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:service) { described_class.new(project) }
- let(:project) { create(:project, :repository) }
describe '#execute' do
subject { service.execute(base_pipeline, head_pipeline) }
@@ -52,4 +53,41 @@ RSpec.describe Ci::GenerateCoverageReportsService do
end
end
end
+
+ describe '#latest?' do
+ subject { service.latest?(base_pipeline, head_pipeline, data) }
+
+ let!(:base_pipeline) { nil }
+ let!(:head_pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project) }
+ let!(:child_pipeline) { create(:ci_pipeline, child_of: head_pipeline) }
+ let!(:key) { service.send(:key, base_pipeline, head_pipeline) }
+
+ let(:data) { { key: key } }
+
+ context 'when cache key is latest' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when head pipeline has been updated' do
+ before do
+ head_pipeline.update_column(:updated_at, 1.minute.from_now)
+ end
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when cache key is empty' do
+ let(:data) { { key: nil } }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when the pipeline has a child that is updated' do
+ before do
+ child_pipeline.update_column(:updated_at, 1.minute.from_now)
+ end
+
+ it { is_expected.to be_falsy }
+ end
+ end
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 01f240805f5..b7a810ce47e 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -30,14 +30,6 @@ RSpec.describe Ci::JobArtifacts::CreateService do
UploadedFile.new(upload.path, **params)
end
- def unique_metrics_report_uploaders
- Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
- event_names: described_class::METRICS_REPORT_UPLOAD_EVENT_NAME,
- start_date: 2.weeks.ago,
- end_date: 2.weeks.from_now
- )
- end
-
describe '#execute' do
subject { service.execute(artifacts_file, params, metadata_file: metadata_file) }
@@ -61,12 +53,6 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.locked).to eq(job.pipeline.locked)
end
- it 'does not track the job user_id' do
- subject
-
- expect(unique_metrics_report_uploaders).to eq(0)
- end
-
context 'when metadata file is also uploaded' do
let(:metadata_file) do
file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256)
@@ -188,20 +174,6 @@ RSpec.describe Ci::JobArtifacts::CreateService do
end
end
- context 'when artifact_type is metrics' do
- before do
- allow(job).to receive(:user_id).and_return(123)
- end
-
- let(:params) { { 'artifact_type' => 'metrics', 'artifact_format' => 'gzip' }.with_indifferent_access }
-
- it 'tracks the job user_id' do
- subject
-
- expect(unique_metrics_report_uploaders).to eq(1)
- end
- end
-
shared_examples 'rescues object storage error' do |klass, message, expected_message|
it "handles #{klass}" do
allow_next_instance_of(JobArtifactUploader) do |uploader|
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 3a04a3af03e..05069054483 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -181,6 +181,26 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
end
end
+ context 'when artifact belongs to a project not undergoing refresh' do
+ context 'and skip_projects_on_refresh is set to false (default)' do
+ it 'does not log any warnings', :aggregate_failures do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).not_to receive(:warn_artifact_deletion_during_stats_refresh)
+
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
+ end
+ end
+
+ context 'and skip_projects_on_refresh is set to true' do
+ let(:skip_projects_on_refresh) { true }
+
+ it 'does not log any warnings', :aggregate_failures do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).not_to receive(:warn_skipped_artifact_deletion_during_stats_refresh)
+
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
+ end
+ end
+ end
+
context 'ProjectStatistics' do
it 'resets project statistics' do
expect(ProjectStatistics).to receive(:increment_statistic).once
diff --git a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
index 403afde5da3..31548793bac 100644
--- a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
@@ -2,16 +2,18 @@
require 'spec_helper'
-RSpec.describe ::Ci::PipelineArtifacts::CoverageReportService do
+RSpec.describe Ci::PipelineArtifacts::CoverageReportService do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
subject { described_class.new(pipeline).execute }
- shared_examples 'creating a pipeline coverage report' do
+ shared_examples 'creating or updating a pipeline coverage report' do
context 'when pipeline is finished' do
- it 'creates a pipeline artifact' do
- expect { subject }.to change { Ci::PipelineArtifact.count }.from(0).to(1)
+ it 'creates or updates a pipeline artifact' do
+ subject
+
+ expect(pipeline.reload.pipeline_artifacts.count).to eq(1)
end
it 'persists the default file name' do
@@ -22,7 +24,7 @@ RSpec.describe ::Ci::PipelineArtifacts::CoverageReportService do
expect(file.filename).to eq('code_coverage.json')
end
- it 'sets expire_at to 1 week' do
+ it 'sets expire_at to 1 week from now' do
freeze_time do
subject
@@ -31,13 +33,16 @@ RSpec.describe ::Ci::PipelineArtifacts::CoverageReportService do
expect(pipeline_artifact.expire_at).to eq(1.week.from_now)
end
end
- end
- context 'when pipeline artifact has already been created' do
- it 'does not raise an error and does not persist the same artifact twice' do
- expect { 2.times { described_class.new(pipeline).execute } }.not_to raise_error
+ it 'logs relevant information' do
+ expect(Gitlab::AppLogger).to receive(:info).with({
+ project_id: project.id,
+ pipeline_id: pipeline.id,
+ pipeline_artifact_id: kind_of(Numeric),
+ message: kind_of(String)
+ })
- expect(Ci::PipelineArtifact.count).to eq(1)
+ subject
end
end
end
@@ -45,21 +50,32 @@ RSpec.describe ::Ci::PipelineArtifacts::CoverageReportService do
context 'when pipeline has coverage report' do
let!(:pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project) }
- it_behaves_like 'creating a pipeline coverage report'
+ it_behaves_like 'creating or updating a pipeline coverage report'
end
context 'when pipeline has coverage report from child pipeline' do
let!(:pipeline) { create(:ci_pipeline, :success, project: project) }
let!(:child_pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project, child_of: pipeline) }
- it_behaves_like 'creating a pipeline coverage report'
+ it_behaves_like 'creating or updating a pipeline coverage report'
+ end
+
+ context 'when pipeline has existing pipeline artifact for coverage report' do
+ let!(:pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project) }
+ let!(:child_pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project, child_of: pipeline) }
+
+ let!(:pipeline_artifact) do
+ create(:ci_pipeline_artifact, :with_coverage_report, pipeline: pipeline, expire_at: 1.day.from_now)
+ end
+
+ it_behaves_like 'creating or updating a pipeline coverage report'
end
context 'when pipeline is running and coverage report does not exist' do
let(:pipeline) { create(:ci_pipeline, :running) }
it 'does not persist data' do
- expect { subject }.not_to change { Ci::PipelineArtifact.count }
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }.from(0)
end
end
end
diff --git a/spec/services/ci/play_manual_stage_service_spec.rb b/spec/services/ci/play_manual_stage_service_spec.rb
index 3e2a95ee975..b3ae92aa787 100644
--- a/spec/services/ci/play_manual_stage_service_spec.rb
+++ b/spec/services/ci/play_manual_stage_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::PlayManualStageService, '#execute' do
let(:stage_status) { 'manual' }
let(:stage) do
- create(:ci_stage_entity,
+ create(:ci_stage,
pipeline: pipeline,
project: project,
name: 'test')
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 74adbc4efc8..2316575f164 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -750,41 +750,7 @@ module Ci
end
context 'when using pending builds table' do
- before do
- stub_feature_flags(ci_pending_builds_queue_source: true)
- end
-
- context 'with ci_queuing_use_denormalized_data_strategy enabled' do
- before do
- stub_feature_flags(ci_queuing_use_denormalized_data_strategy: true)
- end
-
- include_examples 'handles runner assignment'
- end
-
- context 'with ci_queuing_use_denormalized_data_strategy disabled' do
- before do
- skip_if_multiple_databases_are_setup
-
- stub_feature_flags(ci_queuing_use_denormalized_data_strategy: false)
- end
-
- around do |example|
- allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
- example.run
- end
- end
-
- include_examples 'handles runner assignment'
- end
-
- context 'with ci_queuing_use_denormalized_data_strategy enabled' do
- before do
- stub_feature_flags(ci_queuing_use_denormalized_data_strategy: true)
- end
-
- include_examples 'handles runner assignment'
- end
+ include_examples 'handles runner assignment'
context 'when a conflicting data is stored in denormalized table' do
let!(:specific_runner) { create(:ci_runner, :project, projects: [project], tag_list: %w[conflict]) }
@@ -805,22 +771,6 @@ module Ci
end
end
end
-
- context 'when not using pending builds table' do
- before do
- skip_if_multiple_databases_are_setup
-
- stub_feature_flags(ci_pending_builds_queue_source: false)
- end
-
- around do |example|
- allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
- example.run
- end
- end
-
- include_examples 'handles runner assignment'
- end
end
describe '#register_success' do
@@ -888,14 +838,6 @@ module Ci
shared_examples 'metrics collector' do
it_behaves_like 'attempt counter collector'
it_behaves_like 'jobs queueing time histogram collector'
-
- context 'when using denormalized data is disabled' do
- before do
- stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
- end
-
- it_behaves_like 'jobs queueing time histogram collector'
- end
end
context 'when shared runner is used' do
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index acc7a99637b..f042471bd1f 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Ci::RetryJobService do
end
let_it_be(:stage) do
- create(:ci_stage_entity, project: project,
+ create(:ci_stage, project: project,
pipeline: pipeline,
name: 'test')
end
@@ -154,7 +154,7 @@ RSpec.describe Ci::RetryJobService do
end
context 'when the pipeline has other jobs' do
- let!(:stage2) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'deploy') }
+ let!(:stage2) { create(:ci_stage, project: project, pipeline: pipeline, name: 'deploy') }
let!(:build2) { create(:ci_build, pipeline: pipeline, stage_id: stage.id ) }
let!(:deploy) { create(:ci_build, pipeline: pipeline, stage_id: stage2.id) }
let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
diff --git a/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
new file mode 100644
index 00000000000..f8313eaab90
--- /dev/null
+++ b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute' do
+ subject(:execute) { described_class.new.execute }
+
+ let_it_be(:runner_14_0_1) { create(:ci_runner, version: '14.0.1') }
+ let_it_be(:runner_version_14_0_1) do
+ create(:ci_runner_version, version: '14.0.1', status: :not_available)
+ end
+
+ context 'with RunnerUpgradeCheck recommending 14.0.2' do
+ before do
+ stub_const('Ci::Runners::ReconcileExistingRunnerVersionsService::VERSION_BATCH_SIZE', 1)
+
+ allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
+ .to receive(:check_runner_upgrade_status)
+ .and_return({ recommended: ::Gitlab::VersionInfo.new(14, 0, 2) })
+ end
+
+ context 'with runner with new version' do
+ let!(:runner_14_0_2) { create(:ci_runner, version: '14.0.2') }
+ let!(:runner_version_14_0_0) { create(:ci_runner_version, version: '14.0.0', status: :not_available) }
+ let!(:runner_14_0_0) { create(:ci_runner, version: '14.0.0') }
+
+ before do
+ allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
+ .to receive(:check_runner_upgrade_status)
+ .with('14.0.2')
+ .and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 2) })
+ .once
+ end
+
+ it 'creates and updates expected ci_runner_versions entries', :aggregate_failures do
+ expect(Ci::RunnerVersion).to receive(:insert_all)
+ .ordered
+ .with([{ version: '14.0.2' }], anything)
+ .once
+ .and_call_original
+
+ result = nil
+ expect { result = execute }
+ .to change { runner_version_14_0_0.reload.status }.from('not_available').to('recommended')
+ .and change { runner_version_14_0_1.reload.status }.from('not_available').to('recommended')
+ .and change { ::Ci::RunnerVersion.find_by(version: '14.0.2')&.status }.from(nil).to('not_available')
+
+ expect(result).to eq({
+ status: :success,
+ total_inserted: 1, # 14.0.2 is inserted
+ total_updated: 3, # 14.0.0, 14.0.1 are updated, and newly inserted 14.0.2's status is calculated
+ total_deleted: 0
+ })
+ end
+ end
+
+ context 'with orphan ci_runner_version' do
+ let!(:runner_version_14_0_2) { create(:ci_runner_version, version: '14.0.2', status: :not_available) }
+
+ before do
+ allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
+ .to receive(:check_runner_upgrade_status)
+ .and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 2) })
+ end
+
+ it 'deletes orphan ci_runner_versions entry', :aggregate_failures do
+ result = nil
+ expect { result = execute }
+ .to change { ::Ci::RunnerVersion.find_by_version('14.0.2')&.status }.from('not_available').to(nil)
+ .and not_change { runner_version_14_0_1.reload.status }.from('not_available')
+
+ expect(result).to eq({
+ status: :success,
+ total_inserted: 0,
+ total_updated: 0,
+ total_deleted: 1 # 14.0.2 is deleted
+ })
+ end
+ end
+
+ context 'with no runner version changes' do
+ before do
+ allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
+ .to receive(:check_runner_upgrade_status)
+ .and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 1) })
+ end
+
+ it 'does not modify ci_runner_versions entries', :aggregate_failures do
+ result = nil
+ expect { result = execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
+
+ expect(result).to eq({
+ status: :success,
+ total_inserted: 0,
+ total_updated: 0,
+ total_deleted: 0
+ })
+ end
+ end
+
+ context 'with failing version check' do
+ before do
+ allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
+ .to receive(:check_runner_upgrade_status)
+ .and_return({ error: ::Gitlab::VersionInfo.new(14, 0, 1) })
+ end
+
+ it 'makes no changes to ci_runner_versions', :aggregate_failures do
+ result = nil
+ expect { result = execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
+
+ expect(result).to eq({
+ status: :success,
+ total_inserted: 0,
+ total_updated: 0,
+ total_deleted: 0
+ })
+ end
+ end
+ end
+
+ context 'integration testing with Gitlab::Ci::RunnerUpgradeCheck' do
+ let(:available_runner_releases) do
+ %w[14.0.0 14.0.1]
+ end
+
+ before do
+ url = ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+
+ WebMock.stub_request(:get, url).to_return(
+ body: available_runner_releases.map { |v| { name: v } }.to_json,
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ it 'does not modify ci_runner_versions entries', :aggregate_failures do
+ result = nil
+ expect { result = execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
+
+ expect(result).to eq({
+ status: :success,
+ total_inserted: 0,
+ total_updated: 0,
+ total_deleted: 0
+ })
+ end
+ end
+end
diff --git a/spec/services/ci/runners/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index f43fd823078..03dcf851e53 100644
--- a/spec/services/ci/runners/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
end
- subject { described_class.new.execute(token, args) }
+ subject(:runner) { described_class.new.execute(token, args) }
context 'when no token is provided' do
let(:token) { '' }
@@ -83,6 +83,9 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
expect(subject.platform).to eq args[:platform]
expect(subject.architecture).to eq args[:architecture]
expect(subject.ip_address).to eq args[:ip_address]
+
+ expect(Ci::Runner.tagged_with('tag1')).to include(subject)
+ expect(Ci::Runner.tagged_with('tag2')).to include(subject)
end
end
@@ -230,5 +233,41 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
end
end
+
+ context 'when tags are provided' do
+ let(:token) { registration_token }
+
+ let(:args) do
+ { tag_list: %w(tag1 tag2) }
+ end
+
+ it 'creates runner with tags' do
+ expect(runner).to be_persisted
+
+ expect(runner.tags).to contain_exactly(
+ an_object_having_attributes(name: 'tag1'),
+ an_object_having_attributes(name: 'tag2')
+ )
+ end
+
+ it 'creates tags in bulk' do
+ expect(Gitlab::Ci::Tags::BulkInsert).to receive(:bulk_insert_tags!).and_call_original
+
+ expect(runner).to be_persisted
+ end
+
+ context 'and tag list exceeds limit' do
+ let(:args) do
+ { tag_list: (1..Ci::Runner::TAG_LIST_MAX_LENGTH + 1).map { |i| "tag#{i}" } }
+ end
+
+ it 'does not create any tags' do
+ expect(Gitlab::Ci::Tags::BulkInsert).not_to receive(:bulk_insert_tags!)
+
+ expect(runner).not_to be_persisted
+ expect(runner.tags).to be_empty
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/unlock_artifacts_service_spec.rb b/spec/services/ci/unlock_artifacts_service_spec.rb
index 8ee07fc44c8..94d39fc9f14 100644
--- a/spec/services/ci/unlock_artifacts_service_spec.rb
+++ b/spec/services/ci/unlock_artifacts_service_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe Ci::UnlockArtifactsService do
WHERE
"ci_pipelines"."ci_ref_id" = #{ci_ref.id}
AND "ci_pipelines"."locked" = 1
- AND (ci_pipelines.id < #{before_pipeline.id})
+ AND "ci_pipelines"."id" < #{before_pipeline.id}
AND "ci_pipelines"."id" NOT IN
(WITH RECURSIVE
"base_and_descendants"
diff --git a/spec/services/ci/update_pending_build_service_spec.rb b/spec/services/ci/update_pending_build_service_spec.rb
index 2bb0aded24a..e49b22299f0 100644
--- a/spec/services/ci/update_pending_build_service_spec.rb
+++ b/spec/services/ci/update_pending_build_service_spec.rb
@@ -42,19 +42,6 @@ RSpec.describe Ci::UpdatePendingBuildService do
expect(pending_build_1.instance_runners_enabled).to be_truthy
expect(pending_build_2.instance_runners_enabled).to be_truthy
end
-
- context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
- before do
- stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
- end
-
- it 'does not update all pending builds', :aggregate_failures do
- update_pending_builds
-
- expect(pending_build_1.instance_runners_enabled).to be_falsey
- expect(pending_build_2.instance_runners_enabled).to be_truthy
- end
- end
end
context 'when model is a project with pending builds' do
@@ -66,19 +53,6 @@ RSpec.describe Ci::UpdatePendingBuildService do
expect(pending_build_1.instance_runners_enabled).to be_truthy
expect(pending_build_2.instance_runners_enabled).to be_truthy
end
-
- context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
- before do
- stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
- end
-
- it 'does not update all pending builds', :aggregate_failures do
- update_pending_builds
-
- expect(pending_build_1.instance_runners_enabled).to be_falsey
- expect(pending_build_2.instance_runners_enabled).to be_truthy
- end
- end
end
end
end
diff --git a/spec/services/clusters/applications/create_service_spec.rb b/spec/services/clusters/applications/create_service_spec.rb
index eb907377ca8..00a67a9b2ef 100644
--- a/spec/services/clusters/applications/create_service_spec.rb
+++ b/spec/services/clusters/applications/create_service_spec.rb
@@ -168,29 +168,6 @@ RSpec.describe Clusters::Applications::CreateService do
subject
end
end
-
- context 'elastic stack application' do
- let(:params) do
- {
- application: 'elastic_stack'
- }
- end
-
- before do
- create(:clusters_applications_ingress, :installed, external_ip: "127.0.0.0", cluster: cluster)
- expect_any_instance_of(Clusters::Applications::ElasticStack)
- .to receive(:make_scheduled!)
- .and_call_original
- end
-
- it 'creates the application' do
- expect do
- subject
-
- cluster.reload
- end.to change(cluster, :application_elastic_stack)
- end
- end
end
context 'invalid application' do
diff --git a/spec/services/clusters/integrations/create_service_spec.rb b/spec/services/clusters/integrations/create_service_spec.rb
index 6dac97ebf8f..016511a3c01 100644
--- a/spec/services/clusters/integrations/create_service_spec.rb
+++ b/spec/services/clusters/integrations/create_service_spec.rb
@@ -61,7 +61,6 @@ RSpec.describe Clusters::Integrations::CreateService, '#execute' do
end
it_behaves_like 'a cluster integration', 'prometheus'
- it_behaves_like 'a cluster integration', 'elastic_stack'
context 'when application_type is invalid' do
let(:params) do
diff --git a/spec/services/deployments/create_for_build_service_spec.rb b/spec/services/deployments/create_for_build_service_spec.rb
index 6fc7c9e56a6..38d94580512 100644
--- a/spec/services/deployments/create_for_build_service_spec.rb
+++ b/spec/services/deployments/create_for_build_service_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe Deployments::CreateForBuildService do
expect(build.deployment.deployable).to eq(build)
expect(build.deployment.deployable_type).to eq('CommitStatus')
expect(build.deployment.environment).to eq(build.persisted_environment)
+ expect(build.deployment.valid?).to be_truthy
end
context 'when creation failure occures' do
diff --git a/spec/services/deployments/create_service_spec.rb b/spec/services/deployments/create_service_spec.rb
index f6f4c68a6f1..0f2a6ce32e1 100644
--- a/spec/services/deployments/create_service_spec.rb
+++ b/spec/services/deployments/create_service_spec.rb
@@ -21,34 +21,11 @@ RSpec.describe Deployments::CreateService do
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- expect_next_instance_of(Deployment) do |deployment|
- expect(deployment).to receive(:execute_hooks)
- end
+ expect(Deployments::HooksWorker).to receive(:perform_async)
expect(service.execute).to be_persisted
end
- context 'when `deployment_hooks_skip_worker` flag is disabled' do
- before do
- stub_feature_flags(deployment_hooks_skip_worker: false)
- end
-
- it 'executes Deployments::HooksWorker asynchronously' do
- service = described_class.new(
- environment,
- user,
- sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
- ref: 'master',
- tag: false,
- status: 'success'
- )
-
- expect(Deployments::HooksWorker).to receive(:perform_async)
-
- service.execute
- end
- end
-
it 'does not change the status if no status is given' do
service = described_class.new(
environment,
@@ -60,9 +37,7 @@ RSpec.describe Deployments::CreateService do
expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
- expect_next_instance_of(Deployment) do |deployment|
- expect(deployment).not_to receive(:execute_hooks)
- end
+ expect(Deployments::HooksWorker).not_to receive(:perform_async)
expect(service.execute).to be_persisted
end
@@ -80,9 +55,11 @@ RSpec.describe Deployments::CreateService do
it 'does not create a new deployment' do
described_class.new(environment, user, params).execute
- expect do
- described_class.new(environment.reload, user, params).execute
- end.not_to change { Deployment.count }
+ expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
+ expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
+ expect(Deployments::HooksWorker).not_to receive(:perform_async)
+
+ described_class.new(environment.reload, user, params).execute
end
end
end
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index e2d7a80fde3..8ab53a37a33 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(deployment).to receive(:execute_hooks)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
job.success! # Create/Succeed deployment
end
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index 51ef30c91c0..81443eed7d3 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -168,7 +168,7 @@ RSpec.describe DraftNotes::PublishService do
# NOTE: This should be reduced as we work on reducing Gitaly calls.
# Gitaly requests shouldn't go above this threshold as much as possible
# as it may add more to the Gitaly N+1 issue we are experiencing.
- expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(21)
+ expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(20)
end
end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index 56da85cc4a0..e66b413a5c9 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -379,10 +379,14 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
end
end
- describe 'design events' do
+ describe 'design events', :snowplow do
let_it_be(:design) { create(:design, project: project) }
let_it_be(:author) { user }
+ before do
+ allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
+ end
+
describe '#save_designs' do
let_it_be(:updated) { create_list(:design, 5) }
let_it_be(:created) { create_list(:design, 3) }
@@ -411,6 +415,44 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION }
end
+
+ it 'records correct create payload with Snowplow event' do
+ service.save_designs(author, create: [design])
+
+ expect_snowplow_event(
+ category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
+ action: 'create',
+ namespace: design.project.namespace,
+ user: author,
+ project: design.project,
+ label: 'design_users'
+ )
+ end
+
+ it 'records correct update payload with Snowplow event' do
+ service.save_designs(author, update: [design])
+
+ expect_snowplow_event(
+ category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
+ action: 'update',
+ namespace: design.project.namespace,
+ user: author,
+ project: design.project,
+ label: 'design_users'
+ )
+ end
+
+ context 'when FF is disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ end
+
+ it 'doesnt emit snowwplow events', :snowplow do
+ subject
+
+ expect_no_snowplow_event
+ end
+ end
end
describe '#destroy_designs' do
@@ -434,6 +476,31 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION }
end
+
+ it 'records correct payload with Snowplow event' do
+ service.destroy_designs([design], author)
+
+ expect_snowplow_event(
+ category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
+ action: 'destroy',
+ namespace: design.project.namespace,
+ user: author,
+ project: design.project,
+ label: 'design_users'
+ )
+ end
+
+ context 'when FF is disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ end
+
+ it 'doesnt emit snowwplow events', :snowplow do
+ subject
+
+ expect_no_snowplow_event
+ end
+ end
end
end
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index e37d41562f9..1c9bde70af3 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -41,6 +41,8 @@ RSpec.describe FeatureFlags::CreateService do
subject
end
+
+ it_behaves_like 'does not update feature flag client'
end
context 'when feature flag is saved correctly' do
@@ -62,6 +64,8 @@ RSpec.describe FeatureFlags::CreateService do
expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
end
+ it_behaves_like 'update feature flag client'
+
context 'when Jira Connect subscription does not exist' do
it 'does not sync the feature flag to Jira' do
expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
diff --git a/spec/services/feature_flags/destroy_service_spec.rb b/spec/services/feature_flags/destroy_service_spec.rb
index d3796ef6b4d..740923db9b6 100644
--- a/spec/services/feature_flags/destroy_service_spec.rb
+++ b/spec/services/feature_flags/destroy_service_spec.rb
@@ -36,6 +36,8 @@ RSpec.describe FeatureFlags::DestroyService do
expect(audit_event_message).to eq("Deleted feature flag #{feature_flag.name}.")
end
+ it_behaves_like 'update feature flag client'
+
context 'when user is reporter' do
let(:user) { reporter }
@@ -57,6 +59,8 @@ RSpec.describe FeatureFlags::DestroyService do
it 'does not create audit log' do
expect { subject }.not_to change { AuditEvent.count }
end
+
+ it_behaves_like 'does not update feature flag client'
end
end
end
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index f5e94c4af0f..8f985d34961 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -58,6 +58,8 @@ RSpec.describe FeatureFlags::UpdateService do
)
end
+ it_behaves_like 'update feature flag client'
+
context 'with invalid params' do
let(:params) { { name: nil } }
@@ -79,6 +81,8 @@ RSpec.describe FeatureFlags::UpdateService do
subject
end
+
+ it_behaves_like 'does not update feature flag client'
end
context 'when user is reporter' do
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index 79c2cb1fca3..5de1c0e27be 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -387,6 +387,27 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
expect(commits_count).to eq(project.repository.commit_count_for_ref(newrev))
end
+
+ it 'collects the related metrics' do
+ expect(Gitlab::Metrics).to receive(:add_event).with(:push_commit, { branch: 'master' })
+ expect(Gitlab::Metrics).to receive(:add_event).with(:push_branch, {})
+ expect(Gitlab::Metrics).to receive(:add_event).with(:change_default_branch, {})
+ expect(Gitlab::Metrics).to receive(:add_event).with(:process_commit_limit_overflow)
+
+ service.execute
+ end
+
+ context 'when limit is not hit' do
+ before do
+ stub_const("::Git::BaseHooksService::PROCESS_COMMIT_LIMIT", 100)
+ end
+
+ it 'does not collect the corresponding metric' do
+ expect(Gitlab::Metrics).not_to receive(:add_event).with(:process_commit_limit_overflow)
+
+ service.execute
+ end
+ end
end
context 'updating the default branch' do
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index befa9598964..8d41b20c8a9 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -19,11 +19,13 @@ RSpec.describe Git::BranchPushService, services: true do
project.add_maintainer(user)
end
- describe 'Push branches' do
- subject do
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref, push_options: push_options)
- end
+ subject(:execute_service) do
+ described_class
+ .new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }, push_options: push_options)
+ .execute
+ end
+ describe 'Push branches' do
context 'new branch' do
let(:oldrev) { blankrev }
@@ -72,8 +74,6 @@ RSpec.describe Git::BranchPushService, services: true do
end
describe "Pipelines" do
- subject { execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref) }
-
before do
stub_ci_pipeline_to_return_yaml_file
end
@@ -117,7 +117,7 @@ RSpec.describe Git::BranchPushService, services: true do
end
context 'with push options' do
- let(:push_options) { ['mr.create'] }
+ let(:push_options) { { 'mr' => { 'create' => true } } }
it 'sanitizes push options' do
allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
@@ -148,27 +148,34 @@ RSpec.describe Git::BranchPushService, services: true do
end
describe "Updates merge requests" do
+ let(:oldrev) { blankrev }
+
it "when pushing a new branch for the first time" do
expect(UpdateMergeRequestsWorker)
.to receive(:perform_async)
- .with(project.id, user.id, blankrev, newrev, ref)
+ .with(project.id, user.id, blankrev, newrev, ref, { 'push_options' => nil })
+ .ordered
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
+ subject
end
end
describe "Updates git attributes" do
context "for default branch" do
- it "calls the copy attributes method for the first push to the default branch" do
- expect(project.repository).to receive(:copy_gitattributes).with('master')
+ context "when first push" do
+ let(:oldrev) { blankrev }
+
+ it "calls the copy attributes method for the first push to the default branch" do
+ expect(project.repository).to receive(:copy_gitattributes).with('master')
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
+ subject
+ end
end
it "calls the copy attributes method for changes to the default branch" do
expect(project.repository).to receive(:copy_gitattributes).with(ref)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
@@ -181,49 +188,53 @@ RSpec.describe Git::BranchPushService, services: true do
it "does not call copy attributes method" do
expect(project.repository).not_to receive(:copy_gitattributes)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
end
describe "Webhooks" do
- context "execute webhooks" do
- before do
- create(:project_hook, push_events: true, project: project)
- end
+ before do
+ create(:project_hook, push_events: true, project: project)
+ end
- it "when pushing a branch for the first time" do
+ context "when pushing a branch for the first time" do
+ let(:oldrev) { blankrev }
+
+ it "executes webhooks" do
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
+
+ subject
+
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
end
- it "when pushing a branch for the first time with default branch protection disabled" do
+ it "with default branch protection disabled" do
expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_NONE)
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
+ subject
expect(project.protected_branches).to be_empty
end
- it "when pushing a branch for the first time with default branch protection set to 'developers can push'" do
+ it "with default branch protection set to 'developers can push'" do
expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
+ subject
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
end
- it "when pushing a branch for the first time with an existing branch permission configured" do
+ it "with an existing branch permission configured" do
expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
create(:protected_branch, :no_one_can_push, :developers_can_merge, project: project, name: 'master')
@@ -231,27 +242,29 @@ RSpec.describe Git::BranchPushService, services: true do
expect(project.default_branch).to eq("master")
expect(ProtectedBranches::CreateService).not_to receive(:new)
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
+ subject
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::NO_ACCESS])
expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
end
- it "when pushing a branch for the first time with default branch protection set to 'developers can merge'" do
+ it "with default branch protection set to 'developers can merge'" do
expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
expect(project).to receive(:execute_hooks)
expect(project.default_branch).to eq("master")
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref)
+ subject
expect(project.protected_branches).not_to be_empty
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
end
+ end
- it "when pushing new commits to existing branch" do
+ context "when pushing new commits to existing branch" do
+ it "executes webhooks" do
expect(project).to receive(:execute_hooks)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
end
@@ -281,7 +294,7 @@ RSpec.describe Git::BranchPushService, services: true do
it "creates a note if a pushed commit mentions an issue", :sidekiq_might_not_need_inline do
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
it "only creates a cross-reference note if one doesn't already exist" do
@@ -289,7 +302,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(SystemNoteService).not_to receive(:cross_reference).with(issue, commit, commit_author)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
it "defaults to the pushing user if the commit's author is not known", :sidekiq_inline, :use_clean_rails_redis_caching do
@@ -299,16 +312,21 @@ RSpec.describe Git::BranchPushService, services: true do
)
expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, user)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
- it "finds references in the first push to a non-default branch", :sidekiq_might_not_need_inline do
- allow(project.repository).to receive(:commits_between).with(blankrev, newrev).and_return([])
- allow(project.repository).to receive(:commits_between).with("master", newrev).and_return([commit])
+ context "when first push on a non-default branch" do
+ let(:oldrev) { blankrev }
+ let(:ref) { 'refs/heads/other' }
- expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
+ it "finds references", :sidekiq_might_not_need_inline do
+ allow(project.repository).to receive(:commits_between).with(blankrev, newrev).and_return([])
+ allow(project.repository).to receive(:commits_between).with("master", newrev).and_return([commit])
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: 'refs/heads/other')
+ expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author)
+
+ subject
+ end
end
end
@@ -338,14 +356,14 @@ RSpec.describe Git::BranchPushService, services: true do
context "while saving the 'first_mentioned_in_commit_at' metric for an issue" do
it 'sets the metric for referenced issues', :sidekiq_inline, :use_clean_rails_redis_caching do
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(issue.reload.metrics.first_mentioned_in_commit_at).to be_like_time(commit_time)
end
it 'does not set the metric for non-referenced issues' do
non_referenced_issue = create(:issue, project: project)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(non_referenced_issue.reload.metrics.first_mentioned_in_commit_at).to be_nil
end
@@ -376,19 +394,21 @@ RSpec.describe Git::BranchPushService, services: true do
end
context "to default branches" do
+ let(:user) { commit_author }
+
it "closes issues", :sidekiq_might_not_need_inline do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(Issue.find(issue.id)).to be_closed
end
it "adds a note indicating that the issue is now closed", :sidekiq_might_not_need_inline do
expect(SystemNoteService).to receive(:change_status).with(issue, project, commit_author, "closed", closing_commit)
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
it "doesn't create additional cross-reference notes" do
expect(SystemNoteService).not_to receive(:cross_reference)
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
@@ -400,11 +420,11 @@ RSpec.describe Git::BranchPushService, services: true do
it "creates cross-reference notes", :sidekiq_inline, :use_clean_rails_redis_caching do
expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
it "doesn't close issues" do
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(Issue.find(issue.id)).to be_opened
end
end
@@ -441,7 +461,7 @@ RSpec.describe Git::BranchPushService, services: true do
let(:message) { "this is some work.\n\nrelated to JIRA-1" }
it "initiates one api call to jira server to mention the issue", :sidekiq_inline, :use_clean_rails_redis_caching do
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
body: /mentioned this issue in/
@@ -468,37 +488,43 @@ RSpec.describe Git::BranchPushService, services: true do
end
context "using right markdown", :sidekiq_might_not_need_inline do
+ let(:user) { commit_author }
+
it "initiates one api call to jira server to close the issue" do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(WebMock).to have_requested(:post, jira_api_transition_url('JIRA-1')).once
end
it "initiates one api call to jira server to comment on the issue" do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
- expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
- body: comment_body
- ).once
+ expect(WebMock)
+ .to have_requested(:post, jira_api_comment_url('JIRA-1'))
+ .with(body: comment_body)
+ .once
end
end
context "using internal issue reference" do
+ let(:user) { commit_author }
+
context 'when internal issues are disabled' do
before do
project.issues_enabled = false
project.save!
end
+
let(:message) { "this is some work.\n\ncloses #1" }
it "does not initiates one api call to jira server to close the issue" do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(WebMock).not_to have_requested(:post, jira_api_transition_url('JIRA-1'))
end
it "does not initiates one api call to jira server to comment on the issue" do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(WebMock).not_to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
body: comment_body
@@ -511,13 +537,13 @@ RSpec.describe Git::BranchPushService, services: true do
let(:message) { "this is some work.\n\ncloses JIRA-1 \n\n closes #{issue.to_reference}" }
it "initiates one api call to jira server to close the jira issue" do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(WebMock).to have_requested(:post, jira_api_transition_url('JIRA-1')).once
end
it "initiates one api call to jira server to comment on the jira issue" do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
body: comment_body
@@ -525,14 +551,14 @@ RSpec.describe Git::BranchPushService, services: true do
end
it "closes the internal issue" do
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
expect(issue.reload).to be_closed
end
it "adds a note indicating that the issue is now closed" do
expect(SystemNoteService).to receive(:change_status)
.with(issue, project, commit_author, "closed", closing_commit)
- execute_service(project, commit_author, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
end
@@ -542,7 +568,8 @@ RSpec.describe Git::BranchPushService, services: true do
describe "empty project" do
let(:project) { create(:project_empty_repo) }
- let(:new_ref) { 'refs/heads/feature' }
+ let(:ref) { 'refs/heads/feature' }
+ let(:oldrev) { blankrev }
before do
allow(project).to receive(:default_branch).and_return('feature')
@@ -550,7 +577,7 @@ RSpec.describe Git::BranchPushService, services: true do
end
it 'push to first branch updates HEAD' do
- execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: new_ref)
+ subject
end
end
@@ -561,7 +588,7 @@ RSpec.describe Git::BranchPushService, services: true do
it 'does nothing' do
expect(::Environments::StopService).not_to receive(:new)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
@@ -569,7 +596,7 @@ RSpec.describe Git::BranchPushService, services: true do
it 'does nothing' do
expect(::Environments::StopService).not_to receive(:new)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
@@ -583,7 +610,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(stop_service).to receive(:execute_for_branch).with(branch)
end
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
end
@@ -595,7 +622,7 @@ RSpec.describe Git::BranchPushService, services: true do
it 'does nothing' do
expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
@@ -603,7 +630,7 @@ RSpec.describe Git::BranchPushService, services: true do
it 'does nothing' do
expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
@@ -614,7 +641,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(::Ci::RefDeleteUnlockArtifactsWorker)
.to receive(:perform_async).with(project.id, user.id, "refs/heads/#{branch}")
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
end
@@ -636,7 +663,7 @@ RSpec.describe Git::BranchPushService, services: true do
expect(hooks_service).to receive(:execute)
end
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
@@ -646,38 +673,24 @@ RSpec.describe Git::BranchPushService, services: true do
it 'does nothing' do
expect(::Git::BranchHooksService).not_to receive(:new)
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ subject
end
end
end
- def execute_service(project, user, change, push_options = {})
- service = described_class.new(project, user, change: change, push_options: push_options)
- service.execute
- service
- end
-
context 'Jira Connect hooks' do
- let_it_be(:project) { create(:project, :repository) }
-
let(:branch_to_sync) { nil }
let(:commits_to_sync) { [] }
- let(:params) do
- { change: { oldrev: oldrev, newrev: newrev, ref: ref } }
- end
-
- subject do
- described_class.new(project, user, params)
- end
shared_examples 'enqueues Jira sync worker' do
specify :aggregate_failures do
Sidekiq::Testing.fake! do
- expect(JiraConnect::SyncBranchWorker).to receive(:perform_async)
- .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
- .and_call_original
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_async)
+ .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
+ .and_call_original
- expect { subject.execute }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
+ expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
end
end
end
@@ -685,7 +698,7 @@ RSpec.describe Git::BranchPushService, services: true do
shared_examples 'does not enqueue Jira sync worker' do
specify do
Sidekiq::Testing.fake! do
- expect { subject.execute }.not_to change(JiraConnect::SyncBranchWorker.jobs, :size)
+ expect { subject }.not_to change(JiraConnect::SyncBranchWorker.jobs, :size)
end
end
end
@@ -723,12 +736,12 @@ RSpec.describe Git::BranchPushService, services: true do
end
describe 'project target platforms detection' do
- subject(:execute) { execute_service(project, user, oldrev: blankrev, newrev: newrev, ref: ref) }
+ let(:oldrev) { blankrev }
it 'calls enqueue_record_project_target_platforms on the project' do
expect(project).to receive(:enqueue_record_project_target_platforms)
- execute
+ subject
end
end
end
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index 05c1f898cab..8d2da4a899e 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -243,14 +243,37 @@ RSpec.describe Git::ProcessRefChangesService do
end
it 'schedules job for existing merge requests' do
- expect(UpdateMergeRequestsWorker).to receive(:perform_async)
- .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789012', "#{ref_prefix}/create1").ordered
- expect(UpdateMergeRequestsWorker).to receive(:perform_async)
- .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789013', "#{ref_prefix}/create2").ordered
- expect(UpdateMergeRequestsWorker).to receive(:perform_async)
- .with(project.id, user.id, '789015', '789016', "#{ref_prefix}/changed1").ordered
- expect(UpdateMergeRequestsWorker).to receive(:perform_async)
- .with(project.id, user.id, '789020', Gitlab::Git::BLANK_SHA, "#{ref_prefix}/removed2").ordered
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
+ project.id,
+ user.id,
+ Gitlab::Git::BLANK_SHA,
+ '789012',
+ "#{ref_prefix}/create1",
+ { 'push_options' => nil }).ordered
+
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
+ project.id,
+ user.id,
+ Gitlab::Git::BLANK_SHA,
+ '789013',
+ "#{ref_prefix}/create2",
+ { 'push_options' => nil }).ordered
+
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
+ project.id,
+ user.id,
+ '789015',
+ '789016',
+ "#{ref_prefix}/changed1",
+ { 'push_options' => nil }).ordered
+
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
+ project.id,
+ user.id,
+ '789020',
+ Gitlab::Git::BLANK_SHA,
+ "#{ref_prefix}/removed2",
+ { 'push_options' => nil }).ordered
subject.execute
end
diff --git a/spec/services/git/tag_hooks_service_spec.rb b/spec/services/git/tag_hooks_service_spec.rb
index dae2f63f2f9..2d50c64d63c 100644
--- a/spec/services/git/tag_hooks_service_spec.rb
+++ b/spec/services/git/tag_hooks_service_spec.rb
@@ -138,7 +138,7 @@ RSpec.describe Git::TagHooksService, :service do
before do
# Create the lightweight tag
- rugged_repo(project.repository).tags.create(tag_name, newrev)
+ project.repository.write_ref("refs/tags/#{tag_name}", newrev)
# Clear tag list cache
project.repository.expire_tags_cache
diff --git a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb b/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
index e2f5a2e719e..b2cd5632be0 100644
--- a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
+++ b/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe GoogleCloud::GcpRegionAddOrReplaceService do
service.execute('env_2', 'loc_2')
service.execute('env_1', 'loc_3')
- list = project.variables.reload.filter { |variable| variable.key == Projects::GoogleCloudController::GCP_REGION_CI_VAR_KEY }
+ list = project.variables.reload.filter { |variable| variable.key == Projects::GoogleCloud::GcpRegionsController::GCP_REGION_CI_VAR_KEY }
list = list.sort_by(&:environment_scope)
aggregate_failures 'testing list of gcp regions' do
diff --git a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb b/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
new file mode 100644
index 00000000000..55553097423
--- /dev/null
+++ b/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
@@ -0,0 +1,158 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
+ let(:random_user) { create(:user) }
+ let(:project) { create(:project) }
+
+ context 'when unauthorized user triggers worker' do
+ subject do
+ params = {
+ gcp_project_id: :gcp_project_id,
+ instance_name: :instance_name,
+ database_version: :database_version,
+ environment_name: :environment_name,
+ is_protected: :is_protected
+ }
+ described_class.new(project, random_user, params).execute
+ end
+
+ it 'raises unauthorized error' do
+ message = subject[:message]
+ status = subject[:status]
+
+ expect(status).to eq(:error)
+ expect(message).to eq('Unauthorized user')
+ end
+ end
+
+ context 'when authorized user triggers worker' do
+ subject do
+ user = project.creator
+ params = {
+ gcp_project_id: :gcp_project_id,
+ instance_name: :instance_name,
+ database_version: :database_version,
+ environment_name: :environment_name,
+ is_protected: :is_protected
+ }
+ described_class.new(project, user, params).execute
+ end
+
+ context 'when instance is not RUNNABLE' do
+ let(:get_instance_response_pending) do
+ Google::Apis::SqladminV1beta4::DatabaseInstance.new(state: 'PENDING')
+ end
+
+ it 'raises error' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_pending)
+ end
+
+ message = subject[:message]
+ status = subject[:status]
+
+ expect(status).to eq(:error)
+ expect(message).to eq('CloudSQL instance not RUNNABLE: {"state":"PENDING"}')
+ end
+ end
+
+ context 'when instance is RUNNABLE' do
+ let(:get_instance_response_runnable) do
+ Google::Apis::SqladminV1beta4::DatabaseInstance.new(
+ connection_name: 'mock-connection-name',
+ ip_addresses: [Struct.new(:ip_address).new('1.2.3.4')],
+ state: 'RUNNABLE'
+ )
+ end
+
+ let(:operation_fail) { Google::Apis::SqladminV1beta4::Operation.new(status: 'FAILED') }
+
+ let(:operation_done) { Google::Apis::SqladminV1beta4::Operation.new(status: 'DONE') }
+
+ context 'when database creation fails' do
+ it 'raises error' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
+ expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_fail)
+ end
+
+ message = subject[:message]
+ status = subject[:status]
+
+ expect(status).to eq(:error)
+ expect(message).to eq('Database creation failed: {"status":"FAILED"}')
+ end
+ end
+
+ context 'when user creation fails' do
+ it 'raises error' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
+ expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_done)
+ expect(google_api_client).to receive(:create_cloudsql_user).and_return(operation_fail)
+ end
+
+ message = subject[:message]
+ status = subject[:status]
+
+ expect(status).to eq(:error)
+ expect(message).to eq('User creation failed: {"status":"FAILED"}')
+ end
+ end
+
+ context 'when database and user creation succeeds' do
+ it 'stores project CI vars' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
+ expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_done)
+ expect(google_api_client).to receive(:create_cloudsql_user).and_return(operation_done)
+ end
+
+ subject
+
+ aggregate_failures 'test generated vars' do
+ variables = project.reload.variables
+
+ expect(variables.count).to eq(8)
+ expect(variables.find_by(key: 'GCP_PROJECT_ID').value).to eq("gcp_project_id")
+ expect(variables.find_by(key: 'GCP_CLOUDSQL_INSTANCE_NAME').value).to eq("instance_name")
+ expect(variables.find_by(key: 'GCP_CLOUDSQL_CONNECTION_NAME').value).to eq("mock-connection-name")
+ expect(variables.find_by(key: 'GCP_CLOUDSQL_PRIMARY_IP_ADDRESS').value).to eq("1.2.3.4")
+ expect(variables.find_by(key: 'GCP_CLOUDSQL_VERSION').value).to eq("database_version")
+ expect(variables.find_by(key: 'GCP_CLOUDSQL_DATABASE_NAME').value).to eq("main_db")
+ expect(variables.find_by(key: 'GCP_CLOUDSQL_DATABASE_USER').value).to eq("main_user")
+ expect(variables.find_by(key: 'GCP_CLOUDSQL_DATABASE_PASS').value).to be_present
+ end
+ end
+
+ context 'when the ci variable already exists' do
+ before do
+ create(
+ :ci_variable,
+ project: project,
+ key: 'GCP_PROJECT_ID',
+ value: 'previous_gcp_project_id',
+ environment_scope: :environment_name
+ )
+ end
+
+ it 'overwrites existing GCP_PROJECT_ID var' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
+ expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_done)
+ expect(google_api_client).to receive(:create_cloudsql_user).and_return(operation_done)
+ end
+
+ subject
+
+ variables = project.reload.variables
+ value = variables.find_by(key: 'GCP_PROJECT_ID', environment_scope: :environment_name).value
+ expect(value).to eq("gcp_project_id")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 628943e40ff..57a151efda6 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Groups::DestroyService do
let(:remove_path) { group.path + "+#{group.id}+deleted" }
before do
- group.add_user(user, Gitlab::Access::OWNER)
+ group.add_member(user, Gitlab::Access::OWNER)
end
def destroy_group(group, user, async)
@@ -168,8 +168,8 @@ RSpec.describe Groups::DestroyService do
let(:group2_user) { create(:user) }
before do
- group1.add_user(group1_user, Gitlab::Access::OWNER)
- group2.add_user(group2_user, Gitlab::Access::OWNER)
+ group1.add_member(group1_user, Gitlab::Access::OWNER)
+ group2.add_member(group2_user, Gitlab::Access::OWNER)
end
context 'when a project is shared with a group' do
@@ -203,7 +203,7 @@ RSpec.describe Groups::DestroyService do
let(:group3_user) { create(:user) }
before do
- group3.add_user(group3_user, Gitlab::Access::OWNER)
+ group3.add_member(group3_user, Gitlab::Access::OWNER)
create(:group_group_link, shared_group: group2, shared_with_group: group3)
group3.refresh_members_authorized_projects
@@ -290,7 +290,7 @@ RSpec.describe Groups::DestroyService do
let!(:shared_with_group_user) { create(:user) }
before do
- shared_with_group.add_user(shared_with_group_user, Gitlab::Access::MAINTAINER)
+ shared_with_group.add_member(shared_with_group_user, Gitlab::Access::MAINTAINER)
create(:group_group_link, shared_group: shared_group, shared_with_group: shared_with_group)
shared_with_group.refresh_members_authorized_projects
diff --git a/spec/services/groups/group_links/destroy_service_spec.rb b/spec/services/groups/group_links/destroy_service_spec.rb
index 6aaf5f45069..03de7175edd 100644
--- a/spec/services/groups/group_links/destroy_service_spec.rb
+++ b/spec/services/groups/group_links/destroy_service_spec.rb
@@ -24,11 +24,29 @@ RSpec.describe Groups::GroupLinks::DestroyService, '#execute' do
expect { subject.execute(link) }.to change { shared_group.shared_with_group_links.count }.from(1).to(0)
end
- it 'revokes project authorization', :sidekiq_inline do
- group.add_developer(user)
+ context 'with skip_group_share_unlink_auth_refresh feature flag disabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: false)
+ end
- expect { subject.execute(link) }.to(
- change { Ability.allowed?(user, :read_project, project) }.from(true).to(false))
+ it 'revokes project authorization', :sidekiq_inline do
+ group.add_developer(user)
+
+ expect { subject.execute(link) }.to(
+ change { Ability.allowed?(user, :read_project, project) }.from(true).to(false))
+ end
+ end
+
+ context 'with skip_group_share_unlink_auth_refresh feature flag enabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: true)
+ end
+
+ it 'maintains project authorization', :sidekiq_inline do
+ group.add_developer(user)
+
+ expect(Ability.allowed?(user, :read_project, project)).to be_truthy
+ end
end
end
@@ -45,12 +63,32 @@ RSpec.describe Groups::GroupLinks::DestroyService, '#execute' do
]
end
- it 'updates project authorization once per group' do
- expect(GroupGroupLink).to receive(:delete).and_call_original
- expect(group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true, blocking: false).once
- expect(another_group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true, blocking: false).once
+ context 'with skip_group_share_unlink_auth_refresh feature flag disabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: false)
+ end
+
+ it 'updates project authorization once per group' do
+ expect(GroupGroupLink).to receive(:delete).and_call_original
+ expect(group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true, blocking: false).once
+ expect(another_group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true, blocking: false).once
+
+ subject.execute(links)
+ end
+ end
+
+ context 'with skip_group_share_unlink_auth_refresh feature flag enabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: true)
+ end
+
+ it 'does not update project authorization once per group' do
+ expect(GroupGroupLink).to receive(:delete).and_call_original
+ expect(group).not_to receive(:refresh_members_authorized_projects)
+ expect(another_group).not_to receive(:refresh_members_authorized_projects)
- subject.execute(links)
+ subject.execute(links)
+ end
end
end
end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 20ea8b2bf1b..fbcca215282 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -439,6 +439,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
before do
TestEnv.clean_test_path
create(:group_member, :owner, group: new_parent_group, user: user)
+ allow(transfer_service).to receive(:update_project_settings)
transfer_service.execute(new_parent_group)
end
@@ -478,6 +479,11 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
end
end
+ it 'invokes #update_project_settings' do
+ expect(transfer_service).to have_received(:update_project_settings)
+ .with(group.projects.pluck(:id))
+ end
+
it_behaves_like 'project namespace path is in sync with project path' do
let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
let(:projects_with_project_namespace) { [project1, project2] }
@@ -601,8 +607,8 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
}.from(0).to(1)
end
- it 'performs authorizations job immediately' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_inline)
+ it 'performs authorizations job' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)
transfer_service.execute(new_parent_group)
end
@@ -659,7 +665,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
it 'schedules authorizations job' do
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)
- .with(array_including(group.all_projects.ids.map { |id| [id, anything] }))
+ .with(array_including(group.all_projects.ids.map { |id| [id] }))
transfer_service.execute(new_parent_group)
end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 46c5e2a9818..c0e1691fe26 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Groups::UpdateService do
let!(:service) { described_class.new(public_group, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
before do
- public_group.add_user(user, Gitlab::Access::OWNER)
+ public_group.add_member(user, Gitlab::Access::OWNER)
create(:project, :public, group: public_group)
expect(TodosDestroyer::GroupPrivateWorker).not_to receive(:perform_in)
@@ -119,7 +119,7 @@ RSpec.describe Groups::UpdateService do
let!(:service) { described_class.new(internal_group, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
before do
- internal_group.add_user(user, Gitlab::Access::OWNER)
+ internal_group.add_member(user, Gitlab::Access::OWNER)
create(:project, :internal, group: internal_group)
expect(TodosDestroyer::GroupPrivateWorker).not_to receive(:perform_in)
@@ -135,7 +135,7 @@ RSpec.describe Groups::UpdateService do
let!(:service) { described_class.new(internal_group, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
before do
- internal_group.add_user(user, Gitlab::Access::OWNER)
+ internal_group.add_member(user, Gitlab::Access::OWNER)
create(:project, :private, group: internal_group)
expect(TodosDestroyer::GroupPrivateWorker).to receive(:perform_in)
@@ -233,7 +233,7 @@ RSpec.describe Groups::UpdateService do
let!(:service) { described_class.new(internal_group, user, visibility_level: 99) }
before do
- internal_group.add_user(user, Gitlab::Access::MAINTAINER)
+ internal_group.add_member(user, Gitlab::Access::MAINTAINER)
end
it "does not change permission level" do
@@ -246,7 +246,7 @@ RSpec.describe Groups::UpdateService do
let(:service) { described_class.new(internal_group, user, emails_disabled: true) }
it 'updates the attribute' do
- internal_group.add_user(user, Gitlab::Access::OWNER)
+ internal_group.add_member(user, Gitlab::Access::OWNER)
expect { service.execute }.to change { internal_group.emails_disabled }.to(true)
end
@@ -280,7 +280,7 @@ RSpec.describe Groups::UpdateService do
let!(:service) { described_class.new(internal_group, user, path: SecureRandom.hex) }
before do
- internal_group.add_user(user, Gitlab::Access::MAINTAINER)
+ internal_group.add_member(user, Gitlab::Access::MAINTAINER)
create(:project, :internal, group: internal_group)
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb
index 731406613dd..4b0c8d9113c 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb
@@ -7,14 +7,11 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::AfterUpdateServic
let_it_be(:escalation_status, reload: true) { create(:incident_management_issuable_escalation_status, :triggered) }
let_it_be(:issue, reload: true) { escalation_status.issue }
let_it_be(:project) { issue.project }
- let_it_be(:alert) { create(:alert_management_alert, issue: issue, project: project) }
- let(:status_event) { :acknowledge }
- let(:update_params) { { incident_management_issuable_escalation_status_attributes: { status_event: status_event } } }
let(:service) { IncidentManagement::IssuableEscalationStatuses::AfterUpdateService.new(issue, current_user) }
subject(:result) do
- issue.update!(update_params)
+ issue.update!(incident_management_issuable_escalation_status_attributes: update_params)
service.execute
end
@@ -22,46 +19,31 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::AfterUpdateServic
issue.project.add_developer(current_user)
end
- shared_examples 'does not attempt to update the alert' do
- specify do
- expect(::AlertManagement::Alerts::UpdateService).not_to receive(:new)
-
- expect(result).to be_success
- end
- end
-
- shared_examples 'adds a status change system note' do
- specify do
- expect { result }.to change { issue.reload.notes.count }.by(1)
- end
- end
-
context 'with status attributes' do
- it_behaves_like 'adds a status change system note'
-
- it 'updates the alert with the new alert status' do
- expect(::AlertManagement::Alerts::UpdateService).to receive(:new).once.and_call_original
- expect(described_class).to receive(:new).once.and_call_original
+ let(:status_event) { :acknowledge }
+ let(:update_params) { { status_event: status_event } }
- expect { result }.to change { escalation_status.reload.acknowledged? }.to(true)
- .and change { alert.reload.acknowledged? }.to(true)
+ it 'adds a status change system note' do
+ expect { result }.to change { issue.reload.notes.count }.by(1)
end
- context 'when incident is not associated with an alert' do
- before do
- alert.destroy!
- end
+ it 'adds a status change timeline event' do
+ expect(IncidentManagement::TimelineEvents::CreateService)
+ .to receive(:change_incident_status)
+ .with(issue, current_user, escalation_status)
+ .and_call_original
- it_behaves_like 'does not attempt to update the alert'
- it_behaves_like 'adds a status change system note'
+ expect { result }.to change { issue.reload.incident_management_timeline_events.count }.by(1)
end
+ end
- context 'when new status matches the current status' do
- let(:status_event) { :trigger }
-
- it_behaves_like 'does not attempt to update the alert'
+ context 'with non-status attributes' do
+ let(:update_params) { { updated_at: Time.current } }
- specify { expect { result }.not_to change { issue.reload.notes.count } }
+ it 'does not add a status change system note or timeline event' do
+ expect { result }
+ .to not_change { issue.reload.notes.count }
+ .and not_change { issue.reload.incident_management_timeline_events.count }
end
end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb
index c20a0688ac2..b5c5238d483 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb
@@ -11,10 +11,4 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::BuildService do
subject(:execute) { service.execute }
it_behaves_like 'initializes new escalation status with expected attributes'
-
- context 'with associated alert' do
- let_it_be(:alert) { create(:alert_management_alert, :acknowledged, project: project, issue: incident) }
-
- it_behaves_like 'initializes new escalation status with expected attributes', { status_event: :acknowledge }
- end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
index 2c7d330766c..b6ae03a19fe 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
@@ -27,19 +27,4 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::CreateService do
expect { execute }.not_to change { incident.reload.escalation_status }
end
end
-
- context 'with associated alert' do
- before do
- create(:alert_management_alert, :acknowledged, project: project, issue: incident)
- end
-
- it 'creates an escalation status matching the alert attributes' do
- expect { execute }.to change { incident.reload.escalation_status }.from(nil)
- expect(incident.escalation_status).to have_attributes(
- status_name: :acknowledged,
- policy_id: nil,
- escalations_started_at: nil
- )
- end
- end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
index 6c99631fcb0..761cc5c92ea 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
@@ -102,10 +102,4 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateServ
it_behaves_like 'successful response', { status_event: :acknowledge }
end
end
-
- context 'with status_change_reason param' do
- let(:params) { { status_change_reason: ' by changing the incident status' } }
-
- it_behaves_like 'successful response', { status_change_reason: ' by changing the incident status' }
- end
end
diff --git a/spec/services/incident_management/timeline_events/create_service_spec.rb b/spec/services/incident_management/timeline_events/create_service_spec.rb
index 133a644f243..a4e928b98f4 100644
--- a/spec/services/incident_management/timeline_events/create_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/create_service_spec.rb
@@ -132,6 +132,40 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
it 'creates a system note' do
expect { execute }.to change { incident.notes.reload.count }.by(1)
end
+
+ context 'with auto_created param' do
+ let(:args) do
+ {
+ note: 'note',
+ occurred_at: Time.current,
+ action: 'new comment',
+ promoted_from_note: comment,
+ auto_created: auto_created
+ }
+ end
+
+ context 'when auto_created is true' do
+ let(:auto_created) { true }
+
+ it 'does not create a system note' do
+ expect { execute }.not_to change { incident.notes.reload.count }
+ end
+
+ context 'when user does not have permissions' do
+ let(:current_user) { user_without_permissions }
+
+ it_behaves_like 'success response'
+ end
+ end
+
+ context 'when auto_created is false' do
+ let(:auto_created) { false }
+
+ it 'creates a system note' do
+ expect { execute }.to change { incident.notes.reload.count }.by(1)
+ end
+ end
+ end
end
context 'when incident_timeline feature flag is disabled' do
@@ -144,4 +178,71 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
end
end
end
+
+ describe 'automatically created timeline events' do
+ shared_examples 'successfully created timeline event' do
+ it 'creates a timeline event', :aggregate_failures do
+ expect(execute).to be_success
+
+ result = execute.payload[:timeline_event]
+ expect(result).to be_a(::IncidentManagement::TimelineEvent)
+ expect(result.author).to eq(current_user)
+ expect(result.incident).to eq(incident)
+ expect(result.project).to eq(project)
+ expect(result.note).to eq(expected_note)
+ expect(result.editable).to eq(false)
+ expect(result.action).to eq(expected_action)
+ end
+
+ it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_created
+
+ it 'successfully creates a database record', :aggregate_failures do
+ expect { execute }.to change { ::IncidentManagement::TimelineEvent.count }.by(1)
+ end
+
+ it 'does not create a system note' do
+ expect { execute }.not_to change { incident.notes.reload.count }
+ end
+ end
+
+ describe '.create_incident' do
+ subject(:execute) { described_class.create_incident(incident, current_user) }
+
+ let(:expected_note) { "@#{current_user.username} created the incident" }
+ let(:expected_action) { 'issues' }
+
+ it_behaves_like 'successfully created timeline event'
+ end
+
+ describe '.reopen_incident' do
+ subject(:execute) { described_class.reopen_incident(incident, current_user) }
+
+ let(:expected_note) { "@#{current_user.username} reopened the incident" }
+ let(:expected_action) { 'issues' }
+
+ it_behaves_like 'successfully created timeline event'
+ end
+
+ describe '.resolve_incident' do
+ subject(:execute) { described_class.resolve_incident(incident, current_user) }
+
+ let(:expected_note) { "@#{current_user.username} resolved the incident" }
+ let(:expected_action) { 'status' }
+
+ it_behaves_like 'successfully created timeline event'
+ end
+
+ describe '.change_incident_status' do
+ subject(:execute) { described_class.change_incident_status(incident, current_user, escalation_status) }
+
+ let(:escalation_status) do
+ instance_double('IncidentManagement::IssuableEscalationStatus', status_name: 'acknowledged')
+ end
+
+ let(:expected_note) { "@#{current_user.username} changed the incident status to **Acknowledged**" }
+ let(:expected_action) { 'status' }
+
+ it_behaves_like 'successfully created timeline event'
+ end
+ end
end
diff --git a/spec/services/incident_management/timeline_events/update_service_spec.rb b/spec/services/incident_management/timeline_events/update_service_spec.rb
index 3da533fb2a6..728f2fa3e9d 100644
--- a/spec/services/incident_management/timeline_events/update_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/update_service_spec.rb
@@ -146,7 +146,8 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
create(:incident_management_timeline_event, :non_editable, project: project, incident: incident)
end
- it_behaves_like 'error response', 'You cannot edit this timeline event.'
+ it_behaves_like 'error response',
+ 'You have insufficient permissions to manage timeline events for this incident'
end
end
@@ -155,7 +156,8 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
project.add_reporter(user)
end
- it_behaves_like 'error response', 'You have insufficient permissions to manage timeline events for this incident'
+ it_behaves_like 'error response',
+ 'You have insufficient permissions to manage timeline events for this incident'
end
end
end
diff --git a/spec/services/issuable/clone/attributes_rewriter_spec.rb b/spec/services/issuable/clone/attributes_rewriter_spec.rb
deleted file mode 100644
index 7f434b8b246..00000000000
--- a/spec/services/issuable/clone/attributes_rewriter_spec.rb
+++ /dev/null
@@ -1,140 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Issuable::Clone::AttributesRewriter do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project1) { create(:project, :public, group: group) }
- let(:project2) { create(:project, :public, group: group) }
- let(:original_issue) { create(:issue, project: project1) }
- let(:new_issue) { create(:issue, project: project2) }
-
- subject { described_class.new(user, original_issue, new_issue) }
-
- context 'setting labels' do
- it 'sets labels present in the new project and group labels' do
- project1_label_1 = create(:label, title: 'label1', project: project1)
- project1_label_2 = create(:label, title: 'label2', project: project1)
- project2_label_1 = create(:label, title: 'label1', project: project2)
- group_label = create(:group_label, title: 'group_label', group: group)
- create(:label, title: 'label3', project: project2)
-
- original_issue.update!(labels: [project1_label_1, project1_label_2, group_label])
-
- subject.execute
-
- expect(new_issue.reload.labels).to match_array([project2_label_1, group_label])
- end
-
- it 'does not set any labels when not used on the original issue' do
- subject.execute
-
- expect(new_issue.reload.labels).to be_empty
- end
-
- it 'copies the resource label events' do
- resource_label_events = create_list(:resource_label_event, 2, issue: original_issue)
-
- subject.execute
-
- expected = resource_label_events.map(&:label_id)
-
- expect(new_issue.resource_label_events.map(&:label_id)).to match_array(expected)
- end
- end
-
- context 'setting milestones' do
- it 'sets milestone to nil when old issue milestone is not in the new project' do
- milestone = create(:milestone, title: 'milestone', project: project1)
-
- original_issue.update!(milestone: milestone)
-
- subject.execute
-
- expect(new_issue.reload.milestone).to be_nil
- end
-
- it 'copies the milestone when old issue milestone title is in the new project' do
- milestone_project1 = create(:milestone, title: 'milestone', project: project1)
- milestone_project2 = create(:milestone, title: 'milestone', project: project2)
-
- original_issue.update!(milestone: milestone_project1)
-
- subject.execute
-
- expect(new_issue.reload.milestone).to eq(milestone_project2)
- end
-
- it 'copies the milestone when old issue milestone is a group milestone' do
- milestone = create(:milestone, title: 'milestone', group: group)
-
- original_issue.update!(milestone: milestone)
-
- subject.execute
-
- expect(new_issue.reload.milestone).to eq(milestone)
- end
-
- context 'with existing milestone events' do
- let!(:milestone1_project1) { create(:milestone, title: 'milestone1', project: project1) }
- let!(:milestone2_project1) { create(:milestone, title: 'milestone2', project: project1) }
- let!(:milestone3_project1) { create(:milestone, title: 'milestone3', project: project1) }
-
- let!(:milestone1_project2) { create(:milestone, title: 'milestone1', project: project2) }
- let!(:milestone2_project2) { create(:milestone, title: 'milestone2', project: project2) }
-
- before do
- original_issue.update!(milestone: milestone2_project1)
-
- create_event(milestone1_project1)
- create_event(milestone2_project1)
- create_event(nil, 'remove')
- create_event(milestone3_project1)
- end
-
- it 'copies existing resource milestone events' do
- subject.execute
-
- new_issue_milestone_events = new_issue.reload.resource_milestone_events
- expect(new_issue_milestone_events.count).to eq(3)
-
- expect_milestone_event(new_issue_milestone_events.first, milestone: milestone1_project2, action: 'add', state: 'opened')
- expect_milestone_event(new_issue_milestone_events.second, milestone: milestone2_project2, action: 'add', state: 'opened')
- expect_milestone_event(new_issue_milestone_events.third, milestone: nil, action: 'remove', state: 'opened')
- end
-
- def create_event(milestone, action = 'add')
- create(:resource_milestone_event, issue: original_issue, milestone: milestone, action: action)
- end
-
- def expect_milestone_event(event, expected_attrs)
- expect(event.milestone_id).to eq(expected_attrs[:milestone]&.id)
- expect(event.action).to eq(expected_attrs[:action])
- expect(event.state).to eq(expected_attrs[:state])
- end
- end
-
- context 'with existing state events' do
- let!(:event1) { create(:resource_state_event, issue: original_issue, state: 'opened') }
- let!(:event2) { create(:resource_state_event, issue: original_issue, state: 'closed') }
- let!(:event3) { create(:resource_state_event, issue: original_issue, state: 'reopened') }
-
- it 'copies existing state events as expected' do
- subject.execute
-
- state_events = new_issue.reload.resource_state_events
- expect(state_events.size).to eq(3)
-
- expect_state_event(state_events.first, issue: new_issue, state: 'opened')
- expect_state_event(state_events.second, issue: new_issue, state: 'closed')
- expect_state_event(state_events.third, issue: new_issue, state: 'reopened')
- end
-
- def expect_state_event(event, expected_attrs)
- expect(event.issue_id).to eq(expected_attrs[:issue]&.id)
- expect(event.state).to eq(expected_attrs[:state])
- end
- end
- end
-end
diff --git a/spec/services/issues/clone_service_spec.rb b/spec/services/issues/clone_service_spec.rb
index abbcb1c1d48..858dfc4ab3a 100644
--- a/spec/services/issues/clone_service_spec.rb
+++ b/spec/services/issues/clone_service_spec.rb
@@ -82,12 +82,14 @@ RSpec.describe Issues::CloneService do
expect(new_issue.iid).to be_present
end
- it 'preserves create time' do
- expect(old_issue.created_at.strftime('%D')).to eq new_issue.created_at.strftime('%D')
- end
+ it 'sets created_at of new issue to the time of clone' do
+ future_time = 5.days.from_now
- it 'does not copy system notes' do
- expect(new_issue.notes.count).to eq(1)
+ travel_to(future_time) do
+ new_issue = clone_service.execute(old_issue, new_project, with_notes: with_notes)
+
+ expect(new_issue.created_at).to be_like_time(future_time)
+ end
end
it 'does not set moved_issue' do
@@ -105,6 +107,24 @@ RSpec.describe Issues::CloneService do
end
end
+ context 'issue with system notes and resource events' do
+ before do
+ create(:note, :system, noteable: old_issue, project: old_project)
+ create(:resource_label_event, label: create(:label, project: old_project), issue: old_issue)
+ create(:resource_state_event, issue: old_issue, state: :reopened)
+ create(:resource_milestone_event, issue: old_issue, action: 'remove', milestone_id: nil)
+ end
+
+ it 'does not copy system notes and resource events' do
+ new_issue = clone_service.execute(old_issue, new_project)
+
+ # 1 here is for the "cloned from" system note
+ expect(new_issue.notes.count).to eq(1)
+ expect(new_issue.resource_state_events).to be_empty
+ expect(new_issue.resource_milestone_events).to be_empty
+ end
+ end
+
context 'issue with award emoji' do
let!(:award_emoji) { create(:award_emoji, awardable: old_issue) }
@@ -124,14 +144,27 @@ RSpec.describe Issues::CloneService do
create(:issue, title: title, description: description, project: old_project, author: author, milestone: milestone)
end
- before do
- create(:resource_milestone_event, issue: old_issue, milestone: milestone, action: :add)
+ it 'copies the milestone and creates a resource_milestone_event' do
+ new_issue = clone_service.execute(old_issue, new_project)
+
+ expect(new_issue.milestone).to eq(milestone)
+ expect(new_issue.resource_milestone_events.count).to eq(1)
+ end
+ end
+
+ context 'issue with label' do
+ let(:label) { create(:group_label, group: sub_group_1) }
+ let(:new_project) { create(:project, namespace: sub_group_1) }
+
+ let(:old_issue) do
+ create(:issue, project: old_project, labels: [label])
end
- it 'does not create extra milestone events' do
+ it 'copies the label and creates a resource_label_event' do
new_issue = clone_service.execute(old_issue, new_project)
- expect(new_issue.resource_milestone_events.count).to eq(old_issue.resource_milestone_events.count)
+ expect(new_issue.labels).to contain_exactly(label)
+ expect(new_issue.resource_label_events.count).to eq(1)
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 344da5a6582..e88fe1b42f0 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -122,14 +122,29 @@ RSpec.describe Issues::CloseService do
expect(new_note.author).to eq(user)
end
+ it 'adds a timeline event', :aggregate_failures do
+ expect(IncidentManagement::TimelineEvents::CreateService)
+ .to receive(:resolve_incident)
+ .with(issue, user)
+ .and_call_original
+
+ expect { service.execute(issue) }.to change { issue.incident_management_timeline_events.count }.by(1)
+ end
+
context 'when the escalation status did not change to resolved' do
let(:escalation_status) { instance_double('IncidentManagement::IssuableEscalationStatus', resolve: false) }
- it 'does not create a system note' do
+ before do
allow(issue).to receive(:incident_management_issuable_escalation_status).and_return(escalation_status)
+ end
+ it 'does not create a system note' do
expect { service.execute(issue) }.not_to change { issue.notes.count }
end
+
+ it 'does not create a timeline event' do
+ expect { service.execute(issue) }.not_to change { issue.incident_management_timeline_events.count }
+ end
end
end
end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 9f006603f29..0bc8511e3e3 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -135,6 +135,14 @@ RSpec.describe Issues::CreateService do
issue
end
+ it 'calls IncidentManagement::TimelineEvents::CreateService.create_incident' do
+ expect(IncidentManagement::TimelineEvents::CreateService)
+ .to receive(:create_incident)
+ .with(a_kind_of(Issue), reporter)
+
+ issue
+ end
+
context 'when invalid' do
before do
opts.merge!(title: '')
@@ -489,6 +497,23 @@ RSpec.describe Issues::CreateService do
end
end
end
+
+ context 'with alert bot author' do
+ let_it_be(:user) { User.alert_bot }
+ let_it_be(:label) { create(:label, project: project) }
+
+ let(:opts) do
+ {
+ title: 'Title',
+ description: %(/label #{label.to_reference(format: :name)}")
+ }
+ end
+
+ it 'can apply labels' do
+ expect(issue).to be_persisted
+ expect(issue.labels).to eq([label])
+ end
+ end
end
context 'resolving discussions' do
diff --git a/spec/services/issues/import_csv_service_spec.rb b/spec/services/issues/import_csv_service_spec.rb
index fa40b75190f..9ad1d7dba9f 100644
--- a/spec/services/issues/import_csv_service_spec.rb
+++ b/spec/services/issues/import_csv_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Issues::ImportCsvService do
let(:project) { create(:project) }
let(:user) { create(:user) }
+ let(:assignee) { create(:user, username: 'csv_assignee') }
let(:service) do
uploader = FileUploader.new(project)
uploader.store!(file)
@@ -16,4 +17,27 @@ RSpec.describe Issues::ImportCsvService do
let(:issuables) { project.issues }
let(:email_method) { :import_issues_csv_email }
end
+
+ describe '#execute' do
+ let(:file) { fixture_file_upload('spec/fixtures/csv_complex.csv') }
+
+ subject { service.execute }
+
+ it 'sets all issueable attributes and executes quick actions' do
+ project.add_developer(user)
+ project.add_developer(assignee)
+
+ expect { subject }.to change { issuables.count }.by 3
+
+ expect(issuables.reload).to include(
+ have_attributes(
+ title: 'Title with quote"',
+ description: 'Description',
+ time_estimate: 3600,
+ assignees: include(assignee),
+ due_date: Date.new(2022, 6, 28)
+ )
+ )
+ end
+ end
end
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 56a3c22cd7f..5a1bb2e8b74 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe Issues::MoveService do
it 'creates a new issue in a new project' do
expect(new_issue.project).to eq new_project
+ expect(new_issue.namespace_id).to eq new_project.project_namespace_id
end
it 'copies issue title' do
diff --git a/spec/services/issues/related_branches_service_spec.rb b/spec/services/issues/related_branches_service_spec.rb
index 7a4bae7f852..95d456c1b05 100644
--- a/spec/services/issues/related_branches_service_spec.rb
+++ b/spec/services/issues/related_branches_service_spec.rb
@@ -3,88 +3,47 @@
require 'spec_helper'
RSpec.describe Issues::RelatedBranchesService do
+ let_it_be(:project) { create(:project, :repository, :public, public_builds: false) }
let_it_be(:developer) { create(:user) }
- let_it_be(:issue) { create(:issue) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:user) { developer }
- subject { described_class.new(project: issue.project, current_user: user) }
+ subject { described_class.new(project: project, current_user: user) }
- before do
- issue.project.add_developer(developer)
+ before_all do
+ project.add_developer(developer)
end
describe '#execute' do
- let(:sha) { 'abcdef' }
- let(:repo) { issue.project.repository }
- let(:project) { issue.project }
let(:branch_info) { subject.execute(issue) }
- def make_branch
- double('Branch', dereferenced_target: double('Target', sha: sha))
- end
-
- before do
- allow(repo).to receive(:branch_names).and_return(branch_names)
- end
-
- context 'no branches are available' do
- let(:branch_names) { [] }
-
- it 'returns an empty array' do
- expect(branch_info).to be_empty
- end
- end
-
context 'branches are available' do
- let(:missing_branch) { "#{issue.to_branch_name}-missing" }
- let(:unreadable_branch_name) { "#{issue.to_branch_name}-unreadable" }
- let(:pipeline) { build(:ci_pipeline, :success, project: project) }
- let(:unreadable_pipeline) { build(:ci_pipeline, :running) }
-
- let(:branch_names) do
- [
- generate(:branch),
- "#{issue.iid}doesnt-match",
- issue.to_branch_name,
- missing_branch,
- unreadable_branch_name
- ]
- end
+ let_it_be(:pipeline) { create(:ci_pipeline, :success, project: project, ref: issue.to_branch_name) }
- before do
- {
- issue.to_branch_name => pipeline,
- unreadable_branch_name => unreadable_pipeline
- }.each do |name, pipeline|
- allow(repo).to receive(:find_branch).with(name).and_return(make_branch)
- allow(project).to receive(:latest_pipeline).with(name, sha).and_return(pipeline)
- end
+ before_all do
+ project.repository.create_branch(issue.to_branch_name, pipeline.sha)
+ project.repository.create_branch("#{issue.iid}doesnt-match", project.repository.root_ref)
+ project.repository.create_branch("#{issue.iid}-0-stable", project.repository.root_ref)
- allow(repo).to receive(:find_branch).with(missing_branch).and_return(nil)
+ project.repository.add_tag(developer, issue.to_branch_name, pipeline.sha)
end
- it 'selects relevant branches, along with pipeline status where available' do
- expect(branch_info).to contain_exactly(
- { name: issue.to_branch_name, pipeline_status: an_instance_of(Gitlab::Ci::Status::Success) },
- { name: missing_branch, pipeline_status: be_nil },
- { name: unreadable_branch_name, pipeline_status: be_nil }
- )
+ context 'when user has access to pipelines' do
+ it 'selects relevant branches, along with pipeline status' do
+ expect(branch_info).to contain_exactly(
+ { name: issue.to_branch_name, pipeline_status: an_instance_of(Gitlab::Ci::Status::Success) }
+ )
+ end
end
- context 'the user has access to otherwise unreadable pipelines' do
- let(:user) { create(:admin) }
+ context 'when user does not have access to pipelines' do
+ let(:user) { create(:user) }
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'returns info a developer could not see' do
- expect(branch_info.pluck(:pipeline_status)).to include(an_instance_of(Gitlab::Ci::Status::Running))
- end
- end
-
- context 'when admin mode is disabled' do
- it 'does not return info a developer could not see' do
- expect(branch_info.pluck(:pipeline_status)).not_to include(an_instance_of(Gitlab::Ci::Status::Running))
- end
+ it 'returns branches without pipeline status' do
+ expect(branch_info).to contain_exactly(
+ { name: issue.to_branch_name, pipeline_status: nil }
+ )
end
end
@@ -103,10 +62,10 @@ RSpec.describe Issues::RelatedBranchesService do
end
end
- context 'one of the branches is stable' do
- let(:branch_names) { ["#{issue.iid}-0-stable"] }
+ context 'no branches are available' do
+ let(:project) { create(:project, :empty_repo) }
- it 'is excluded' do
+ it 'returns an empty array' do
expect(branch_info).to be_empty
end
end
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index c9469b861ac..477b44f4c2c 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -33,6 +33,8 @@ RSpec.describe Issues::ReopenService do
context 'when user is authorized to reopen issue' do
let(:user) { create(:user) }
+ subject(:execute) { described_class.new(project: project, current_user: user).execute(issue) }
+
before do
project.add_maintainer(user)
end
@@ -41,14 +43,12 @@ RSpec.describe Issues::ReopenService do
issue.assignees << user
expect_any_instance_of(User).to receive(:invalidate_issue_cache_counts)
- described_class.new(project: project, current_user: user).execute(issue)
+ execute
end
it 'refreshes the number of opened issues' do
- service = described_class.new(project: project, current_user: user)
-
expect do
- service.execute(issue)
+ execute
BatchLoader::Executor.clear_current
end.to change { project.open_issues_count }.from(0).to(1)
@@ -61,16 +61,27 @@ RSpec.describe Issues::ReopenService do
expect(service).to receive(:delete_cache).and_call_original
end
- described_class.new(project: project, current_user: user).execute(issue)
+ execute
+ end
+
+ it 'does not create timeline event' do
+ expect { execute }.not_to change { issue.incident_management_timeline_events.count }
end
context 'issue is incident type' do
let(:issue) { create(:incident, :closed, project: project) }
let(:current_user) { user }
- subject { described_class.new(project: project, current_user: user).execute(issue) }
-
it_behaves_like 'an incident management tracked event', :incident_management_incident_reopened
+
+ it 'creates a timeline event' do
+ expect(IncidentManagement::TimelineEvents::CreateService)
+ .to receive(:reopen_incident)
+ .with(issue, current_user)
+ .and_call_original
+
+ expect { execute }.to change { issue.incident_management_timeline_events.count }.by(1)
+ end
end
context 'when issue is not confidential' do
@@ -78,18 +89,18 @@ RSpec.describe Issues::ReopenService do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
- described_class.new(project: project, current_user: user).execute(issue)
+ execute
end
end
context 'when issue is confidential' do
- it 'executes confidential issue hooks' do
- issue = create(:issue, :confidential, :closed, project: project)
+ let(:issue) { create(:issue, :confidential, :closed, project: project) }
+ it 'executes confidential issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
- described_class.new(project: project, current_user: user).execute(issue)
+ execute
end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index d11fe772023..e2e8828ae89 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -1146,11 +1146,11 @@ RSpec.describe Issues::UpdateService, :mailer do
let(:opts) { { escalation_status: { status: 'acknowledged' } } }
let(:escalation_update_class) { ::IncidentManagement::IssuableEscalationStatuses::AfterUpdateService }
- shared_examples 'updates the escalation status record' do |expected_status, expected_reason = nil|
+ shared_examples 'updates the escalation status record' do |expected_status|
let(:service_double) { instance_double(escalation_update_class) }
it 'has correct value' do
- expect(escalation_update_class).to receive(:new).with(issue, user, status_change_reason: expected_reason).and_return(service_double)
+ expect(escalation_update_class).to receive(:new).with(issue, user).and_return(service_double)
expect(service_double).to receive(:execute)
update_issue(opts)
@@ -1193,23 +1193,6 @@ RSpec.describe Issues::UpdateService, :mailer do
it_behaves_like 'updates the escalation status record', :acknowledged
- context 'with associated alert' do
- let!(:alert) { create(:alert_management_alert, issue: issue, project: project) }
-
- it 'syncs the update back to the alert' do
- update_issue(opts)
-
- expect(issue.escalation_status.status_name).to eq(:acknowledged)
- expect(alert.reload.status_name).to eq(:acknowledged)
- end
- end
-
- context 'with a status change reason provided' do
- let(:opts) { { escalation_status: { status: 'acknowledged', status_change_reason: ' by changing the alert status' } } }
-
- it_behaves_like 'updates the escalation status record', :acknowledged, ' by changing the alert status'
- end
-
context 'with unsupported status value' do
let(:opts) { { escalation_status: { status: 'unsupported-status' } } }
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index e79e13af769..fe9f3ddc14d 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -146,12 +146,14 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
end
context 'when passing an existing invite user id' do
- let(:user_id) { create(:project_member, :invited, project: source).invite_email }
+ let(:invited_member) { create(:project_member, :guest, :invited, project: source) }
+ let(:user_id) { invited_member.invite_email }
+ let(:access_level) { ProjectMember::MAINTAINER }
- it 'does not add a member' do
- expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to eq("The member's email address has already been taken")
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
+ it 'allows already invited members to be re-invited by email and updates the member access' do
+ expect(execute_service[:status]).to eq(:success)
+ expect(invited_member.reset.access_level).to eq ProjectMember::MAINTAINER
+ expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
end
end
diff --git a/spec/services/members/creator_service_spec.rb b/spec/services/members/creator_service_spec.rb
index 8b1df2ab86d..ad4c649086b 100644
--- a/spec/services/members/creator_service_spec.rb
+++ b/spec/services/members/creator_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Members::CreatorService do
describe '#execute' do
it 'raises error for new member on authorization check implementation' do
expect do
- described_class.add_user(source, user, :maintainer, current_user: current_user)
+ described_class.add_member(source, user, :maintainer, current_user: current_user)
end.to raise_error(NotImplementedError)
end
@@ -19,7 +19,7 @@ RSpec.describe Members::CreatorService do
source.add_developer(user)
expect do
- described_class.add_user(source, user, :maintainer, current_user: current_user)
+ described_class.add_member(source, user, :maintainer, current_user: current_user)
end.to raise_error(NotImplementedError)
end
end
diff --git a/spec/services/members/groups/creator_service_spec.rb b/spec/services/members/groups/creator_service_spec.rb
index b80b7998eac..4130fbd44fa 100644
--- a/spec/services/members/groups/creator_service_spec.rb
+++ b/spec/services/members/groups/creator_service_spec.rb
@@ -14,13 +14,13 @@ RSpec.describe Members::Groups::CreatorService do
it_behaves_like 'owner management'
- describe '.add_users' do
+ describe '.add_members' do
it_behaves_like 'bulk member creation' do
let_it_be(:member_type) { GroupMember }
end
end
- describe '.add_user' do
+ describe '.add_member' do
it_behaves_like 'member creation' do
let_it_be(:member_type) { GroupMember }
end
@@ -30,7 +30,7 @@ RSpec.describe Members::Groups::CreatorService do
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_and_wait).once
1.upto(3) do
- described_class.add_user(source, user, :maintainer)
+ described_class.add_member(source, user, :maintainer)
end
end
end
diff --git a/spec/services/members/invite_member_builder_spec.rb b/spec/services/members/invite_member_builder_spec.rb
new file mode 100644
index 00000000000..52de65364c4
--- /dev/null
+++ b/spec/services/members/invite_member_builder_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::InviteMemberBuilder do
+ let_it_be(:source) { create(:group) }
+ let_it_be(:existing_member) { create(:group_member) }
+
+ let(:existing_members) { { existing_member.user.id => existing_member } }
+
+ describe '#execute' do
+ context 'when user record found by email' do
+ it 'returns member from existing members hash' do
+ expect(described_class.new(source, existing_member.user.email, existing_members).execute).to eq existing_member
+ end
+
+ it 'builds a new member' do
+ user = create(:user)
+
+ member = described_class.new(source, user.email, existing_members).execute
+
+ expect(member).to be_new_record
+ expect(member.user).to eq user
+ end
+ end
+ end
+
+ context 'when no existing users found by the email' do
+ it 'finds existing member' do
+ member = create(:group_member, :invited, source: source)
+
+ expect(described_class.new(source, member.invite_email, existing_members).execute).to eq member
+ end
+
+ it 'builds a new member' do
+ email = 'test@example.com'
+
+ member = described_class.new(source, email, existing_members).execute
+
+ expect(member).to be_new_record
+ expect(member.invite_email).to eq email
+ end
+ end
+end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index a948041479b..7a1512970b4 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -353,15 +353,16 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
context 'when member already exists' do
context 'with email' do
- let!(:invited_member) { create(:project_member, :invited, project: project) }
- let(:params) { { email: "#{invited_member.invite_email},#{project_user.email}" } }
+ let!(:invited_member) { create(:project_member, :guest, :invited, project: project) }
+ let(:params) do
+ { email: "#{invited_member.invite_email},#{project_user.email}", access_level: ProjectMember::MAINTAINER }
+ end
- it 'adds new email and returns an error for the already invited email' do
+ it 'adds new email and allows already invited members to be re-invited by email and updates the member access' do
expect_to_create_members(count: 1)
- expect(result[:status]).to eq(:error)
- expect(result[:message][invited_member.invite_email])
- .to eq("The member's email address has already been taken")
+ expect(result[:status]).to eq(:success)
expect(project.users).to include project_user
+ expect(invited_member.reset.access_level).to eq ProjectMember::MAINTAINER
end
end
diff --git a/spec/services/members/projects/creator_service_spec.rb b/spec/services/members/projects/creator_service_spec.rb
index 38955122ab0..8304bee2ffc 100644
--- a/spec/services/members/projects/creator_service_spec.rb
+++ b/spec/services/members/projects/creator_service_spec.rb
@@ -14,13 +14,13 @@ RSpec.describe Members::Projects::CreatorService do
it_behaves_like 'owner management'
- describe '.add_users' do
+ describe '.add_members' do
it_behaves_like 'bulk member creation' do
let_it_be(:member_type) { ProjectMember }
end
end
- describe '.add_user' do
+ describe '.add_member' do
it_behaves_like 'member creation' do
let_it_be(:member_type) { ProjectMember }
end
@@ -30,7 +30,7 @@ RSpec.describe Members::Projects::CreatorService do
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to receive(:bulk_perform_in).once
1.upto(3) do
- described_class.add_user(source, user, :maintainer)
+ described_class.add_member(source, user, :maintainer)
end
end
end
diff --git a/spec/services/members/standard_member_builder_spec.rb b/spec/services/members/standard_member_builder_spec.rb
new file mode 100644
index 00000000000..16daff53d31
--- /dev/null
+++ b/spec/services/members/standard_member_builder_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::StandardMemberBuilder do
+ let_it_be(:source) { create(:group) }
+ let_it_be(:existing_member) { create(:group_member) }
+
+ let(:existing_members) { { existing_member.user.id => existing_member } }
+
+ describe '#execute' do
+ it 'returns member from existing members hash' do
+ expect(described_class.new(source, existing_member.user, existing_members).execute).to eq existing_member
+ end
+
+ it 'builds a new member' do
+ user = create(:user)
+
+ member = described_class.new(source, user, existing_members).execute
+
+ expect(member).to be_new_record
+ expect(member.user).to eq user
+ end
+ end
+end
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index e500102a00c..e1fbb945ee3 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe MergeRequests::ApprovalService do
it 'tracks merge request approve action' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .to receive(:track_approve_mr_action).with(user: user)
+ .to receive(:track_approve_mr_action).with(user: user, merge_request: merge_request)
service.execute(merge_request)
end
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index 08ad05b54da..03a37ea59a3 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -13,7 +13,6 @@ RSpec.describe MergeRequests::CreatePipelineService do
let(:params) { {} }
before do
- stub_feature_flags(ci_disallow_to_create_merge_request_pipelines_in_target_project: false)
project.add_developer(user)
end
@@ -92,16 +91,6 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
end
end
-
- context 'when ci_disallow_to_create_merge_request_pipelines_in_target_project feature flag is enabled' do
- before do
- stub_feature_flags(ci_disallow_to_create_merge_request_pipelines_in_target_project: true)
- end
-
- it 'creates a pipeline in the source project' do
- expect(response.payload.project).to eq(source_project)
- end
- end
end
context 'when actor has permission to create pipelines in forked project' do
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index c0c56a72192..9c9bcb79990 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -212,7 +212,6 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
before do
- stub_feature_flags(ci_disallow_to_create_merge_request_pipelines_in_target_project: false)
target_project.add_developer(user2)
target_project.add_maintainer(user)
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index eecf7c21cba..4b7dd84474a 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -243,6 +243,25 @@ RSpec.describe MergeRequests::RefreshService do
end
end
+ context 'when ci.skip push_options are passed' do
+ let(:params) { { push_options: { ci: { skip: true } } } }
+ let(:service_instance) { service.new(project: project, current_user: @user, params: params) }
+
+ subject { service_instance.execute(@oldrev, @newrev, ref) }
+
+ it 'creates a skipped detached merge request pipeline with commits' do
+ expect { subject }
+ .to change { @merge_request.pipelines_for_merge_request.count }.by(1)
+ .and change { @another_merge_request.pipelines_for_merge_request.count }.by(0)
+
+ expect(@merge_request.has_commits?).to be_truthy
+ expect(@another_merge_request.has_commits?).to be_falsy
+
+ pipeline = @merge_request.pipelines_for_merge_request.last
+ expect(pipeline).to be_skipped
+ end
+ end
+
it 'does not create detached merge request pipeline for forked project' do
expect { subject }
.not_to change { @fork_merge_request.pipelines_for_merge_request.count }
@@ -267,10 +286,6 @@ RSpec.describe MergeRequests::RefreshService do
context 'when service runs on forked project' do
let(:project) { @fork_project }
- before do
- stub_feature_flags(ci_disallow_to_create_merge_request_pipelines_in_target_project: false)
- end
-
it 'creates detached merge request pipeline for fork merge request' do
expect { subject }
.to change { @fork_merge_request.pipelines_for_merge_request.count }.by(1)
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 7164ba8fac0..212f75d853f 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -845,6 +845,8 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
context 'when the draft status is changed' do
+ let(:title) { 'New Title' }
+ let(:draft_title) { "Draft: #{title}" }
let!(:non_subscriber) { create(:user) }
let!(:subscriber) do
create(:user) { |u| merge_request.toggle_subscription(u, project) }
@@ -857,7 +859,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
context 'removing draft status' do
before do
- merge_request.update_attribute(:title, 'Draft: New Title')
+ merge_request.update_attribute(:title, draft_title)
end
it 'sends notifications for subscribers', :sidekiq_might_not_need_inline do
@@ -870,9 +872,22 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
should_email(subscriber)
should_not_email(non_subscriber)
end
+
+ context 'when removing through wip_event param' do
+ it 'removes Draft from the title' do
+ expect { update_merge_request({ wip_event: "ready" }) }
+ .to change { merge_request.title }
+ .from(draft_title)
+ .to(title)
+ end
+ end
end
context 'adding draft status' do
+ before do
+ merge_request.update_attribute(:title, title)
+ end
+
it 'does not send notifications', :sidekiq_might_not_need_inline do
opts = { title: 'Draft: New title' }
@@ -883,6 +898,15 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
should_not_email(subscriber)
should_not_email(non_subscriber)
end
+
+ context 'when adding through wip_event param' do
+ it 'adds Draft to the title' do
+ expect { update_merge_request({ wip_event: "draft" }) }
+ .to change { merge_request.title }
+ .from(title)
+ .to(draft_title)
+ end
+ end
end
end
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
index de84666ca1d..b44c256802f 100644
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
@@ -54,7 +54,6 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
:team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
:team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
:team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
- :experience | 30 | { created_at: frozen_time - 31.days, git_write_at: frozen_time - 31.days }
end
with_them do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 032f35cfc29..98fe8a40c61 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -147,6 +147,34 @@ RSpec.describe NotificationService, :mailer do
end
end
+ shared_examples 'participating by confidential note notification' do
+ context 'when user is mentioned on confidential note' do
+ let_it_be(:guest_1) { create(:user) }
+ let_it_be(:guest_2) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ before do
+ issuable.resource_parent.add_guest(guest_1)
+ issuable.resource_parent.add_guest(guest_2)
+ issuable.resource_parent.add_reporter(reporter)
+ end
+
+ it 'only emails authorized users' do
+ confidential_note_text = "#{guest_1.to_reference} and #{guest_2.to_reference} and #{reporter.to_reference}"
+ note_text = "Mentions #{guest_2.to_reference}"
+ create(:note_on_issue, noteable: issuable, project_id: project.id, note: confidential_note_text, confidential: true)
+ create(:note_on_issue, noteable: issuable, project_id: project.id, note: note_text)
+ reset_delivered_emails!
+
+ notification_trigger
+
+ should_not_email(guest_1)
+ should_email(guest_2)
+ should_email(reporter)
+ end
+ end
+ end
+
shared_examples 'participating by assignee notification' do
it 'emails the participant' do
issuable.assignees << participant
@@ -554,8 +582,8 @@ RSpec.describe NotificationService, :mailer do
before do
note.project.namespace_id = group.id
- group.add_user(@u_watcher, GroupMember::MAINTAINER)
- group.add_user(@u_custom_global, GroupMember::MAINTAINER)
+ group.add_member(@u_watcher, GroupMember::MAINTAINER)
+ group.add_member(@u_custom_global, GroupMember::MAINTAINER)
note.project.save!
@u_watcher.notification_settings_for(note.project).participating!
@@ -736,6 +764,20 @@ RSpec.describe NotificationService, :mailer do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
+
+ context 'when note is confidential' do
+ let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@all mentioned', confidential: true) }
+ let(:guest) { create(:user) }
+
+ it 'does not notify users that cannot read note' do
+ project.add_guest(guest)
+ reset_delivered_emails!
+
+ notification.new_note(note)
+
+ should_not_email(guest)
+ end
+ end
end
end
@@ -1376,6 +1418,11 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled, [assignee]) }
end
+ it_behaves_like 'participating by confidential note notification' do
+ let(:issuable) { issue }
+ let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled, [assignee]) }
+ end
+
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.reassigned_issue(issue, @u_disabled, [assignee]) }
@@ -1494,6 +1541,11 @@ RSpec.describe NotificationService, :mailer do
let(:notification_target) { issue }
let(:notification_trigger) { notification.removed_milestone_issue(issue, issue.author) }
end
+
+ it_behaves_like 'participating by confidential note notification' do
+ let(:issuable) { issue }
+ let(:notification_trigger) { notification.removed_milestone_issue(issue, issue.author) }
+ end
end
context 'confidential issues' do
@@ -1616,6 +1668,11 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.close_issue(issue, @u_disabled) }
end
+ it_behaves_like 'participating by confidential note notification' do
+ let(:issuable) { issue }
+ let(:notification_trigger) { notification.close_issue(issue, @u_disabled) }
+ end
+
it 'adds "subscribed" reason to subscriber emails' do
user_1 = create(:user)
issue.subscribe(user_1)
@@ -1658,6 +1715,11 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.reopen_issue(issue, @u_disabled) }
end
+ it_behaves_like 'participating by confidential note notification' do
+ let(:issuable) { issue }
+ let(:notification_trigger) { notification.reopen_issue(issue, @u_disabled) }
+ end
+
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.reopen_issue(issue, @u_disabled) }
@@ -1689,6 +1751,11 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) }
end
+ it_behaves_like 'participating by confidential note notification' do
+ let(:issuable) { issue }
+ let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) }
+ end
+
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.issue_moved(issue, new_issue, @u_disabled) }
@@ -1720,6 +1787,11 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.issue_cloned(issue, new_issue, @u_disabled) }
end
+ it_behaves_like 'participating by confidential note notification' do
+ let(:issuable) { issue }
+ let(:notification_trigger) { notification.issue_cloned(issue, new_issue, @u_disabled) }
+ end
+
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.issue_cloned(issue, new_issue, @u_disabled) }
@@ -1765,6 +1837,11 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.issue_due(issue) }
end
+ it_behaves_like 'participating by confidential note notification' do
+ let(:issuable) { issue }
+ let(:notification_trigger) { notification.issue_due(issue) }
+ end
+
it_behaves_like 'project emails are disabled' do
let(:notification_target) { issue }
let(:notification_trigger) { notification.issue_due(issue) }
@@ -3773,7 +3850,7 @@ RSpec.describe NotificationService, :mailer do
# Group member: global=watch, group=global
@g_global_watcher ||= create_global_setting_for(create(:user), :watch)
- group.add_users([@g_watcher, @g_global_watcher], :maintainer)
+ group.add_members([@g_watcher, @g_global_watcher], :maintainer)
group
end
diff --git a/spec/services/packages/cleanup/execute_policy_service_spec.rb b/spec/services/packages/cleanup/execute_policy_service_spec.rb
new file mode 100644
index 00000000000..93335c4a821
--- /dev/null
+++ b/spec/services/packages/cleanup/execute_policy_service_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Cleanup::ExecutePolicyService do
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:policy) { create(:packages_cleanup_policy, project: project) }
+
+ let(:service) { described_class.new(policy) }
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ context 'with the keep_n_duplicated_files parameter' do
+ let_it_be(:package1) { create(:package, project: project) }
+ let_it_be(:package2) { create(:package, project: project) }
+ let_it_be(:package3) { create(:package, project: project) }
+ let_it_be(:package4) { create(:package, :pending_destruction, project: project) }
+
+ let_it_be(:package_file1_1) { create(:package_file, package: package1, file_name: 'file_name1') }
+ let_it_be(:package_file1_2) { create(:package_file, package: package1, file_name: 'file_name1') }
+ let_it_be(:package_file1_3) { create(:package_file, package: package1, file_name: 'file_name1') }
+
+ let_it_be(:package_file1_4) { create(:package_file, package: package1, file_name: 'file_name2') }
+ let_it_be(:package_file1_5) { create(:package_file, package: package1, file_name: 'file_name2') }
+ let_it_be(:package_file1_6) { create(:package_file, package: package1, file_name: 'file_name2') }
+ let_it_be(:package_file1_7) do
+ create(:package_file, :pending_destruction, package: package1, file_name: 'file_name2')
+ end
+
+ let_it_be(:package_file2_1) { create(:package_file, package: package2, file_name: 'file_name1') }
+ let_it_be(:package_file2_2) { create(:package_file, package: package2, file_name: 'file_name1') }
+ let_it_be(:package_file2_3) { create(:package_file, package: package2, file_name: 'file_name1') }
+ let_it_be(:package_file2_4) { create(:package_file, package: package2, file_name: 'file_name1') }
+
+ let_it_be(:package_file3_1) { create(:package_file, package: package3, file_name: 'file_name_test') }
+
+ let_it_be(:package_file4_1) { create(:package_file, package: package4, file_name: 'file_name1') }
+ let_it_be(:package_file4_2) { create(:package_file, package: package4, file_name: 'file_name1') }
+
+ let(:package_files_1) { package1.package_files.installable }
+ let(:package_files_2) { package2.package_files.installable }
+ let(:package_files_3) { package3.package_files.installable }
+
+ context 'set to less than the total number of duplicated files' do
+ before do
+ # for each package file duplicate, we keep only the most recent one
+ policy.update!(keep_n_duplicated_package_files: '1')
+ end
+
+ shared_examples 'keeping the most recent package files' do
+ let(:response_payload) do
+ {
+ counts: {
+ marked_package_files_total_count: 7,
+ unique_package_id_and_file_name_total_count: 3
+ },
+ timeout: false
+ }
+ end
+
+ it 'only keeps the most recent package files' do
+ expect { execute }.to change { ::Packages::PackageFile.installable.count }.by(-7)
+
+ expect(package_files_1).to contain_exactly(package_file1_3, package_file1_6)
+ expect(package_files_2).to contain_exactly(package_file2_4)
+ expect(package_files_3).to contain_exactly(package_file3_1)
+
+ expect(execute).to be_success
+ expect(execute.message).to eq("Packages cleanup policy executed for project #{project.id}")
+ expect(execute.payload).to eq(response_payload)
+ end
+ end
+
+ it_behaves_like 'keeping the most recent package files'
+
+ context 'when the service needs to loop' do
+ before do
+ stub_const("#{described_class.name}::DUPLICATED_FILES_BATCH_SIZE", 2)
+ end
+
+ it_behaves_like 'keeping the most recent package files' do
+ before do
+ expect(::Packages::MarkPackageFilesForDestructionService)
+ .to receive(:new).exactly(3).times.and_call_original
+ end
+ end
+
+ context 'when a timeout is hit' do
+ let(:response_payload) do
+ {
+ counts: {
+ marked_package_files_total_count: 4,
+ unique_package_id_and_file_name_total_count: 3
+ },
+ timeout: true
+ }
+ end
+
+ let(:service_timeout_response) do
+ ServiceResponse.error(
+ message: 'Timeout while marking package files as pending destruction',
+ payload: { marked_package_files_count: 0 }
+ )
+ end
+
+ before do
+ mock_service_timeout(on_iteration: 3)
+ end
+
+ it 'keeps part of the most recent package files' do
+ expect { execute }
+ .to change { ::Packages::PackageFile.installable.count }.by(-4)
+ .and not_change { package_files_2.count } # untouched because of the timeout
+ .and not_change { package_files_3.count } # untouched because of the timeout
+
+ expect(package_files_1).to contain_exactly(package_file1_3, package_file1_6)
+ expect(execute).to be_success
+ expect(execute.message).to eq("Packages cleanup policy executed for project #{project.id}")
+ expect(execute.payload).to eq(response_payload)
+ end
+
+ def mock_service_timeout(on_iteration:)
+ execute_call_count = 1
+ expect_next_instances_of(::Packages::MarkPackageFilesForDestructionService, 3) do |service|
+ expect(service).to receive(:execute).and_wrap_original do |m, *args|
+ # timeout if we are on the right iteration
+ if execute_call_count == on_iteration
+ service_timeout_response
+ else
+ execute_call_count += 1
+ m.call(*args)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'set to more than the total number of duplicated files' do
+ before do
+ # using the biggest value for keep_n_duplicated_package_files
+ policy.update!(keep_n_duplicated_package_files: '50')
+ end
+
+ it 'keeps all package files' do
+ expect { execute }.not_to change { ::Packages::PackageFile.installable.count }
+ end
+ end
+
+ context 'set to all' do
+ before do
+ policy.update!(keep_n_duplicated_package_files: 'all')
+ end
+
+ it 'skips the policy' do
+ expect(::Packages::MarkPackageFilesForDestructionService).not_to receive(:new)
+ expect { execute }.not_to change { ::Packages::PackageFile.installable.count }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/debian/create_package_file_service_spec.rb b/spec/services/packages/debian/create_package_file_service_spec.rb
index 74b97a4f941..c8292b2d5c2 100644
--- a/spec/services/packages/debian/create_package_file_service_spec.rb
+++ b/spec/services/packages/debian/create_package_file_service_spec.rb
@@ -102,5 +102,13 @@ RSpec.describe Packages::Debian::CreatePackageFileService do
expect { subject.execute }.to raise_error(ActiveRecord::RecordInvalid)
end
end
+
+ context 'FIPS mode enabled', :fips_mode do
+ let(:file) { nil }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(::Packages::FIPS::DisabledError)
+ end
+ end
end
end
diff --git a/spec/services/packages/debian/extract_changes_metadata_service_spec.rb b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
index ced846866c2..4765e6c3bd4 100644
--- a/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
+++ b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
@@ -13,6 +13,12 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
subject { service.execute }
+ context 'with FIPS mode enabled', :fips_mode do
+ it 'raises an error' do
+ expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
+ end
+ end
+
context 'with valid package file' do
it 'extract metadata', :aggregate_failures do
expected_fields = { 'Architecture' => 'source amd64', 'Binary' => 'libsample0 sample-dev sample-udeb' }
diff --git a/spec/services/packages/debian/generate_distribution_service_spec.rb b/spec/services/packages/debian/generate_distribution_service_spec.rb
index 53805d03655..fe5fbfbbe1f 100644
--- a/spec/services/packages/debian/generate_distribution_service_spec.rb
+++ b/spec/services/packages/debian/generate_distribution_service_spec.rb
@@ -15,6 +15,12 @@ RSpec.describe Packages::Debian::GenerateDistributionService do
context "for #{container_type}" do
include_context 'with Debian distribution', container_type
+ context 'with FIPS mode enabled', :fips_mode do
+ it 'raises an error' do
+ expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
+ end
+ end
+
it_behaves_like 'Generate Debian Distribution and component files'
end
end
diff --git a/spec/services/packages/mark_package_files_for_destruction_service_spec.rb b/spec/services/packages/mark_package_files_for_destruction_service_spec.rb
index a836de1f7f6..66534338003 100644
--- a/spec/services/packages/mark_package_files_for_destruction_service_spec.rb
+++ b/spec/services/packages/mark_package_files_for_destruction_service_spec.rb
@@ -6,9 +6,11 @@ RSpec.describe Packages::MarkPackageFilesForDestructionService, :aggregate_failu
let(:service) { described_class.new(package_files) }
describe '#execute', :aggregate_failures do
- subject { service.execute }
+ let(:batch_deadline) { nil }
- shared_examples 'executing successfully' do
+ subject { service.execute(batch_deadline: batch_deadline) }
+
+ shared_examples 'executing successfully' do |marked_package_files_count: 0|
it 'marks package files for destruction' do
expect { subject }
.to change { ::Packages::PackageFile.pending_destruction.count }.by(package_files.size)
@@ -17,6 +19,7 @@ RSpec.describe Packages::MarkPackageFilesForDestructionService, :aggregate_failu
it 'executes successfully' do
expect(subject).to be_success
expect(subject.message).to eq('Package files are now pending destruction')
+ expect(subject.payload).to eq(marked_package_files_count: marked_package_files_count)
end
end
@@ -30,13 +33,49 @@ RSpec.describe Packages::MarkPackageFilesForDestructionService, :aggregate_failu
let_it_be(:package_file) { create(:package_file) }
let_it_be(:package_files) { ::Packages::PackageFile.id_in(package_file.id) }
- it_behaves_like 'executing successfully'
+ it_behaves_like 'executing successfully', marked_package_files_count: 1
end
context 'with many package files' do
let_it_be(:package_files) { ::Packages::PackageFile.id_in(create_list(:package_file, 3).map(&:id)) }
- it_behaves_like 'executing successfully'
+ it_behaves_like 'executing successfully', marked_package_files_count: 3
+
+ context 'with a batch deadline' do
+ let_it_be(:batch_deadline) { 250.seconds.from_now }
+
+ context 'when the deadline is not hit' do
+ before do
+ expect(Time.zone).to receive(:now).and_return(batch_deadline - 10.seconds)
+ end
+
+ it_behaves_like 'executing successfully', marked_package_files_count: 3
+ end
+
+ context 'when the deadline is hit' do
+ it 'does not execute the batch loop' do
+ expect(Time.zone).to receive(:now).and_return(batch_deadline + 10.seconds)
+ expect { subject }.to not_change { ::Packages::PackageFile.pending_destruction.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq('Timeout while marking package files as pending destruction')
+ expect(subject.payload).to eq(marked_package_files_count: 0)
+ end
+ end
+ end
+
+ context 'when a batch size is defined' do
+ let_it_be(:batch_deadline) { 250.seconds.from_now }
+
+ let(:batch_size) { 2 }
+
+ subject { service.execute(batch_deadline: batch_deadline, batch_size: batch_size) }
+
+ before do
+ expect(Time.zone).to receive(:now).twice.and_call_original
+ end
+
+ it_behaves_like 'executing successfully', marked_package_files_count: 3
+ end
end
context 'with an error during the update' do
diff --git a/spec/services/packages/pypi/create_package_service_spec.rb b/spec/services/packages/pypi/create_package_service_spec.rb
index 354ac92b99a..6794ab4d9d6 100644
--- a/spec/services/packages/pypi/create_package_service_spec.rb
+++ b/spec/services/packages/pypi/create_package_service_spec.rb
@@ -42,6 +42,21 @@ RSpec.describe Packages::Pypi::CreatePackageService, :aggregate_failures do
end
end
+ context 'with FIPS mode', :fips_mode do
+ it 'does not generate file_md5' do
+ expect { subject }.to change { Packages::Package.pypi.count }.by(1)
+
+ expect(created_package.name).to eq 'foo'
+ expect(created_package.version).to eq '1.0'
+
+ expect(created_package.pypi_metadatum.required_python).to eq '>=2.7'
+ expect(created_package.package_files.size).to eq 1
+ expect(created_package.package_files.first.file_name).to eq 'foo.tgz'
+ expect(created_package.package_files.first.file_sha256).to eq sha256
+ expect(created_package.package_files.first.file_md5).to be_nil
+ end
+ end
+
context 'without required_python' do
before do
params.delete(:requires_python)
diff --git a/spec/services/pages/delete_service_spec.rb b/spec/services/pages/delete_service_spec.rb
index 0c0b2c0431b..29d9a47c72e 100644
--- a/spec/services/pages/delete_service_spec.rb
+++ b/spec/services/pages/delete_service_spec.rb
@@ -45,7 +45,11 @@ RSpec.describe Pages::DeleteService do
end
it 'publishes a ProjectDeleted event with project id and namespace id' do
- expected_data = { project_id: project.id, namespace_id: project.namespace_id }
+ expected_data = {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
+ }
expect { service.execute }.to publish_event(Pages::PageDeletedEvent).with(expected_data)
end
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
deleted file mode 100644
index d2f6300ab65..00000000000
--- a/spec/services/pod_logs/base_service_spec.rb
+++ /dev/null
@@ -1,147 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::PodLogs::BaseService do
- include KubernetesHelpers
-
- let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
-
- let(:namespace) { 'autodevops-deploy-9-production' }
-
- let(:pod_name) { 'pod-1' }
- let(:pod_name_2) { 'pod-2' }
- let(:container_name) { 'container-0' }
- let(:params) { {} }
- let(:raw_pods) do
- [
- {
- name: pod_name,
- container_names: %w(container-0-0 container-0-1)
- },
- {
- name: pod_name_2,
- container_names: %w(container-1-0 container-1-1)
- }
- ]
- end
-
- subject { described_class.new(cluster, namespace, params: params) }
-
- describe '#initialize' do
- let(:params) do
- {
- 'container_name' => container_name,
- 'another_param' => 'foo'
- }
- end
-
- it 'filters the parameters' do
- expect(subject.cluster).to eq(cluster)
- expect(subject.namespace).to eq(namespace)
- expect(subject.params).to eq({
- 'container_name' => container_name
- })
- expect(subject.params.equal?(params)).to be(false)
- end
- end
-
- describe '#check_arguments' do
- context 'when cluster and namespace are provided' do
- it 'returns success' do
- result = subject.send(:check_arguments, {})
-
- expect(result[:status]).to eq(:success)
- end
- end
-
- context 'when cluster is nil' do
- let(:cluster) { nil }
-
- it 'returns an error' do
- result = subject.send(:check_arguments, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Cluster does not exist')
- end
- end
-
- context 'when namespace is nil' do
- let(:namespace) { nil }
-
- it 'returns an error' do
- result = subject.send(:check_arguments, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Namespace is empty')
- end
- end
-
- context 'when namespace is empty' do
- let(:namespace) { '' }
-
- it 'returns an error' do
- result = subject.send(:check_arguments, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Namespace is empty')
- end
- end
-
- context 'when pod_name and container_name are provided' do
- let(:params) do
- {
- 'pod_name' => pod_name,
- 'container_name' => container_name
- }
- end
-
- it 'returns success' do
- result = subject.send(:check_arguments, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:pod_name]).to eq(pod_name)
- expect(result[:container_name]).to eq(container_name)
- end
- end
-
- context 'when pod_name is not a string' do
- let(:params) do
- {
- 'pod_name' => { something_that_is: :not_a_string }
- }
- end
-
- it 'returns error' do
- result = subject.send(:check_arguments, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Invalid pod_name')
- end
- end
-
- context 'when container_name is not a string' do
- let(:params) do
- {
- 'container_name' => { something_that_is: :not_a_string }
- }
- end
-
- it 'returns error' do
- result = subject.send(:check_arguments, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Invalid container_name')
- end
- end
- end
-
- describe '#get_pod_names' do
- it 'returns success with a list of pods' do
- result = subject.send(:get_pod_names, raw_pods: raw_pods)
-
- expect(result[:status]).to eq(:success)
- expect(result[:pods]).to eq([pod_name, pod_name_2])
- end
- end
-end
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
deleted file mode 100644
index 1111d9b9307..00000000000
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ /dev/null
@@ -1,309 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::PodLogs::ElasticsearchService do
- let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
-
- let(:namespace) { 'autodevops-deploy-9-production' }
-
- let(:pod_name) { 'pod-1' }
- let(:container_name) { 'container-1' }
- let(:search) { 'foo -bar' }
- let(:start_time) { '2019-01-02T12:13:14+02:00' }
- let(:end_time) { '2019-01-03T12:13:14+02:00' }
- let(:cursor) { '9999934,1572449784442' }
- let(:params) { {} }
- let(:expected_logs) do
- [
- { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
- { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
- { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
- ]
- end
-
- let(:raw_pods) do
- [
- {
- name: pod_name,
- container_names: [container_name, "#{container_name}-1"]
- }
- ]
- end
-
- subject { described_class.new(cluster, namespace, params: params) }
-
- describe '#get_raw_pods' do
- before do
- create(:clusters_integrations_elastic_stack, cluster: cluster)
- end
-
- it 'returns success with elasticsearch response' do
- allow_any_instance_of(::Clusters::Integrations::ElasticStack)
- .to receive(:elasticsearch_client)
- .and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
- .to receive(:pods)
- .with(namespace)
- .and_return(raw_pods)
-
- result = subject.send(:get_raw_pods, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:raw_pods]).to eq(raw_pods)
- end
-
- it 'returns an error when ES is unreachable' do
- allow_any_instance_of(::Clusters::Integrations::ElasticStack)
- .to receive(:elasticsearch_client)
- .and_return(nil)
-
- result = subject.send(:get_raw_pods, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Unable to connect to Elasticsearch')
- end
-
- it 'handles server errors from elasticsearch' do
- allow_any_instance_of(::Clusters::Integrations::ElasticStack)
- .to receive(:elasticsearch_client)
- .and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
- .to receive(:pods)
- .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
-
- result = subject.send(:get_raw_pods, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
- end
- end
-
- describe '#check_times' do
- context 'with start and end provided and valid' do
- let(:params) do
- {
- 'start_time' => start_time,
- 'end_time' => end_time
- }
- end
-
- it 'returns success with times' do
- result = subject.send(:check_times, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:start_time]).to eq(start_time)
- expect(result[:end_time]).to eq(end_time)
- end
- end
-
- context 'with start and end not provided' do
- let(:params) do
- {}
- end
-
- it 'returns success with nothing else' do
- result = subject.send(:check_times, {})
-
- expect(result.keys.length).to eq(1)
- expect(result[:status]).to eq(:success)
- end
- end
-
- context 'with start valid and end invalid' do
- let(:params) do
- {
- 'start_time' => start_time,
- 'end_time' => 'invalid date'
- }
- end
-
- it 'returns error' do
- result = subject.send(:check_times, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Invalid start or end time format')
- end
- end
-
- context 'with start invalid and end valid' do
- let(:params) do
- {
- 'start_time' => 'invalid date',
- 'end_time' => end_time
- }
- end
-
- it 'returns error' do
- result = subject.send(:check_times, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Invalid start or end time format')
- end
- end
- end
-
- describe '#check_search' do
- context 'with search provided and valid' do
- let(:params) do
- {
- 'search' => search
- }
- end
-
- it 'returns success with search' do
- result = subject.send(:check_search, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:search]).to eq(search)
- end
- end
-
- context 'with search provided and invalid' do
- let(:params) do
- {
- 'search' => { term: "foo-bar" }
- }
- end
-
- it 'returns error' do
- result = subject.send(:check_search, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Invalid search parameter")
- end
- end
-
- context 'with search not provided' do
- let(:params) do
- {}
- end
-
- it 'returns success with nothing else' do
- result = subject.send(:check_search, {})
-
- expect(result.keys.length).to eq(1)
- expect(result[:status]).to eq(:success)
- end
- end
- end
-
- describe '#check_cursor' do
- context 'with cursor provided and valid' do
- let(:params) do
- {
- 'cursor' => cursor
- }
- end
-
- it 'returns success with cursor' do
- result = subject.send(:check_cursor, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:cursor]).to eq(cursor)
- end
- end
-
- context 'with cursor provided and invalid' do
- let(:params) do
- {
- 'cursor' => { term: "foo-bar" }
- }
- end
-
- it 'returns error' do
- result = subject.send(:check_cursor, {})
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Invalid cursor parameter")
- end
- end
-
- context 'with cursor not provided' do
- let(:params) do
- {}
- end
-
- it 'returns success with nothing else' do
- result = subject.send(:check_cursor, {})
-
- expect(result.keys.length).to eq(1)
- expect(result[:status]).to eq(:success)
- end
- end
- end
-
- describe '#pod_logs' do
- let(:result_arg) do
- {
- pod_name: pod_name,
- container_name: container_name,
- search: search,
- start_time: start_time,
- end_time: end_time,
- cursor: cursor
- }
- end
-
- let(:expected_cursor) { '9999934,1572449784442' }
-
- before do
- create(:clusters_integrations_elastic_stack, cluster: cluster)
- end
-
- it 'returns the logs' do
- allow_any_instance_of(::Clusters::Integrations::ElasticStack)
- .to receive(:elasticsearch_client)
- .and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
- .to receive(:pod_logs)
- .with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor, chart_above_v2: true)
- .and_return({ logs: expected_logs, cursor: expected_cursor })
-
- result = subject.send(:pod_logs, result_arg)
-
- expect(result[:status]).to eq(:success)
- expect(result[:logs]).to eq(expected_logs)
- expect(result[:cursor]).to eq(expected_cursor)
- end
-
- it 'returns an error when ES is unreachable' do
- allow_any_instance_of(::Clusters::Integrations::ElasticStack)
- .to receive(:elasticsearch_client)
- .and_return(nil)
-
- result = subject.send(:pod_logs, result_arg)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Unable to connect to Elasticsearch')
- end
-
- it 'handles server errors from elasticsearch' do
- allow_any_instance_of(::Clusters::Integrations::ElasticStack)
- .to receive(:elasticsearch_client)
- .and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
- .to receive(:pod_logs)
- .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
-
- result = subject.send(:pod_logs, result_arg)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
- end
-
- it 'handles cursor errors from elasticsearch' do
- allow_any_instance_of(::Clusters::Integrations::ElasticStack)
- .to receive(:elasticsearch_client)
- .and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
- .to receive(:pod_logs)
- .and_raise(::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor.new)
-
- result = subject.send(:pod_logs, result_arg)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Invalid cursor value provided')
- end
- end
-end
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
deleted file mode 100644
index c06a87830ca..00000000000
--- a/spec/services/pod_logs/kubernetes_service_spec.rb
+++ /dev/null
@@ -1,310 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::PodLogs::KubernetesService do
- include KubernetesHelpers
-
- let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
-
- let(:namespace) { 'autodevops-deploy-9-production' }
-
- let(:pod_name) { 'pod-1' }
- let(:pod_name_2) { 'pod-2' }
- let(:container_name) { 'container-0' }
- let(:container_name_2) { 'foo-0' }
- let(:params) { {} }
-
- let(:raw_logs) do
- "2019-12-13T14:04:22.123456Z Log 1\n2019-12-13T14:04:23.123456Z Log 2\n" \
- "2019-12-13T14:04:24.123456Z Log 3"
- end
-
- let(:raw_pods) do
- [
- {
- name: pod_name,
- container_names: [container_name, "#{container_name}-1"]
- },
- {
- name: pod_name_2,
- container_names: [container_name_2, "#{container_name_2}-1"]
- }
- ]
- end
-
- subject { described_class.new(cluster, namespace, params: params) }
-
- describe '#get_raw_pods' do
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
-
- it 'returns success with passthrough k8s response' do
- stub_kubeclient_pods(namespace)
-
- result = subject.send(:get_raw_pods, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:raw_pods]).to eq([{
- name: 'kube-pod',
- container_names: %w(container-0 container-0-1)
- }])
- end
- end
-
- describe '#pod_logs' do
- let(:result_arg) do
- {
- pod_name: pod_name,
- container_name: container_name
- }
- end
-
- let(:expected_logs) { raw_logs }
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
-
- it 'returns the logs' do
- stub_kubeclient_logs(pod_name, namespace, container: container_name)
-
- result = subject.send(:pod_logs, result_arg)
-
- expect(result[:status]).to eq(:success)
- expect(result[:logs]).to eq(expected_logs)
- end
-
- it 'handles Not Found errors from k8s' do
- allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
- .to receive(:get_pod_log)
- .with(any_args)
- .and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not Found', {}))
-
- result = subject.send(:pod_logs, result_arg)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Pod not found')
- end
-
- it 'handles HTTP errors from k8s' do
- allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
- .to receive(:get_pod_log)
- .with(any_args)
- .and_raise(Kubeclient::HttpError.new(500, 'Error', {}))
-
- result = subject.send(:pod_logs, result_arg)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Kubernetes API returned status code: 500')
- end
- end
-
- describe '#encode_logs_to_utf8', :aggregate_failures do
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
- let(:expected_logs) { '2019-12-13T14:04:22.123456Z ✔ Started logging errors to Sentry' }
- let(:raw_logs) { expected_logs.dup.force_encoding(Encoding::ASCII_8BIT) }
- let(:result) { subject.send(:encode_logs_to_utf8, result_arg) }
-
- let(:result_arg) do
- {
- pod_name: pod_name,
- container_name: container_name,
- logs: raw_logs
- }
- end
-
- it 'converts logs to utf-8' do
- expect(result[:status]).to eq(:success)
- expect(result[:logs]).to eq(expected_logs)
- end
-
- it 'returns error if output of encoding helper is blank' do
- allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return('')
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
- end
-
- it 'returns error if output of encoding helper is nil' do
- allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return(nil)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
- end
-
- it 'returns error if output of encoding helper is not UTF-8' do
- allow(Gitlab::EncodingHelper).to receive(:encode_utf8)
- .and_return(expected_logs.encode(Encoding::UTF_16BE))
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
- end
-
- context 'when logs are nil' do
- let(:raw_logs) { nil }
- let(:expected_logs) { nil }
-
- it 'returns nil' do
- expect(result[:status]).to eq(:success)
- expect(result[:logs]).to eq(expected_logs)
- end
- end
-
- context 'when logs are blank' do
- let(:raw_logs) { (+'').force_encoding(Encoding::ASCII_8BIT) }
- let(:expected_logs) { '' }
-
- it 'returns blank string' do
- expect(result[:status]).to eq(:success)
- expect(result[:logs]).to eq(expected_logs)
- end
- end
-
- context 'when logs are already in utf-8' do
- let(:raw_logs) { expected_logs }
-
- it 'does not fail' do
- expect(result[:status]).to eq(:success)
- expect(result[:logs]).to eq(expected_logs)
- end
- end
- end
-
- describe '#split_logs' do
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
-
- let(:expected_logs) do
- [
- { message: "Log 1", pod: 'pod-1', timestamp: "2019-12-13T14:04:22.123456Z" },
- { message: "Log 2", pod: 'pod-1', timestamp: "2019-12-13T14:04:23.123456Z" },
- { message: "Log 3", pod: 'pod-1', timestamp: "2019-12-13T14:04:24.123456Z" }
- ]
- end
-
- let(:result_arg) do
- {
- pod_name: pod_name,
- container_name: container_name,
- logs: raw_logs
- }
- end
-
- it 'returns the logs' do
- result = subject.send(:split_logs, result_arg)
-
- aggregate_failures do
- expect(result[:status]).to eq(:success)
- expect(result[:logs]).to eq(expected_logs)
- end
- end
- end
-
- describe '#check_pod_name' do
- it 'returns success if pod_name was specified' do
- result = subject.send(:check_pod_name, pod_name: pod_name, pods: [pod_name])
-
- expect(result[:status]).to eq(:success)
- expect(result[:pod_name]).to eq(pod_name)
- end
-
- it 'returns success if pod_name was not specified but there are pods' do
- result = subject.send(:check_pod_name, pod_name: nil, pods: [pod_name])
-
- expect(result[:status]).to eq(:success)
- expect(result[:pod_name]).to eq(pod_name)
- end
-
- it 'returns error if pod_name was not specified and there are no pods' do
- result = subject.send(:check_pod_name, pod_name: nil, pods: [])
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('No pods available')
- end
-
- it 'returns error if pod_name was specified but does not exist' do
- result = subject.send(:check_pod_name, pod_name: 'another-pod', pods: [pod_name])
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Pod does not exist')
- end
-
- it 'returns error if pod_name is too long' do
- result = subject.send(:check_pod_name, pod_name: "a very long string." * 15, pods: [pod_name])
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('pod_name cannot be larger than 253 chars')
- end
-
- it 'returns error if pod_name is in invalid format' do
- result = subject.send(:check_pod_name, pod_name: "Invalid_pod_name", pods: [pod_name])
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('pod_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character')
- end
- end
-
- describe '#check_container_name' do
- it 'returns success if container_name was specified' do
- result = subject.send(:check_container_name,
- container_name: container_name,
- pod_name: pod_name,
- raw_pods: raw_pods
- )
-
- expect(result[:status]).to eq(:success)
- expect(result[:container_name]).to eq(container_name)
- end
-
- it 'returns success if container_name was not specified and there are containers' do
- result = subject.send(:check_container_name,
- pod_name: pod_name_2,
- raw_pods: raw_pods
- )
-
- expect(result[:status]).to eq(:success)
- expect(result[:container_name]).to eq(container_name_2)
- end
-
- it 'returns error if container_name was not specified and there are no containers on the pod' do
- raw_pods.first[:container_names] = []
-
- result = subject.send(:check_container_name,
- pod_name: pod_name,
- raw_pods: raw_pods
- )
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('No containers available')
- end
-
- it 'returns error if container_name was specified but does not exist' do
- result = subject.send(:check_container_name,
- container_name: 'foo',
- pod_name: pod_name,
- raw_pods: raw_pods
- )
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Container does not exist')
- end
-
- it 'returns error if container_name is too long' do
- result = subject.send(:check_container_name,
- container_name: "a very long string." * 15,
- pod_name: pod_name,
- raw_pods: raw_pods
- )
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('container_name cannot be larger than 253 chars')
- end
-
- it 'returns error if container_name is in invalid format' do
- result = subject.send(:check_container_name,
- container_name: "Invalid_container_name",
- pod_name: pod_name,
- raw_pods: raw_pods
- )
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('container_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character')
- end
- end
-end
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index 53f8f5b7253..fe1ab6b1d58 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -172,4 +172,24 @@ RSpec.describe PreviewMarkdownService do
expect(result[:commands]).to eq 'Tags this commit to v1.2.3 with "Stable release".'
end
end
+
+ context 'note with multiple quick actions' do
+ let(:issue) { create(:issue, project: project) }
+ let(:params) do
+ {
+ text: "/confidential\n/due 2001-12-31\n/estimate 2y\n/assign #{user.to_reference}",
+ target_type: 'Issue',
+ target_id: issue.id
+ }
+ end
+
+ let(:service) { described_class.new(project, user, params) }
+
+ it 'renders quick actions on multiple lines' do
+ result = service.execute
+
+ expect(result[:commands]).to eq "Makes this issue confidential.<br>Sets the due date to Dec 31, 2001.<br>" \
+ "Sets time estimate to 2y.<br>Assigns #{user.to_reference}."
+ end
+ end
end
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index a9329f092fa..9dc15131bc5 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -59,22 +59,6 @@ RSpec.describe Projects::AfterRenameService do
end
end
- context 'gitlab pages' do
- before do
- allow(project_storage).to receive(:rename_repo) { true }
- end
-
- context 'when the project does not have pages deployed' do
- it 'does nothing with the pages directory' do
- allow(project).to receive(:pages_deployed?).and_return(false)
-
- expect(PagesTransferWorker).not_to receive(:perform_async)
-
- service_execute
- end
- end
- end
-
context 'attachments' do
before do
expect(project_storage).to receive(:rename_repo) { true }
@@ -204,6 +188,22 @@ RSpec.describe Projects::AfterRenameService do
)
end
end
+
+ context 'EventStore' do
+ let(:project) { create(:project, :repository, skip_disk_validation: true) }
+
+ it 'publishes a ProjectPathChangedEvent' do
+ expect { service_execute }
+ .to publish_event(Projects::ProjectPathChangedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id,
+ old_path: full_path_before_rename,
+ new_path: full_path_after_rename
+ )
+ end
+ end
end
def service_execute
diff --git a/spec/services/projects/blame_service_spec.rb b/spec/services/projects/blame_service_spec.rb
index 40b2bc869dc..54c4315d242 100644
--- a/spec/services/projects/blame_service_spec.rb
+++ b/spec/services/projects/blame_service_spec.rb
@@ -98,31 +98,21 @@ RSpec.describe Projects::BlameService, :aggregate_failures do
end
end
- describe 'Current page' do
- subject { service.pagination.current_page }
-
- context 'with page = 1' do
- let(:page) { 1 }
-
- it { is_expected.to eq(1) }
- end
-
- context 'with page = 2' do
- let(:page) { 2 }
-
- it { is_expected.to eq(2) }
+ describe 'Pagination attributes' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:page, :current_page, :total_pages) do
+ 1 | 1 | 2
+ 2 | 2 | 2
+ 3 | 1 | 2 # Overlimit
+ 0 | 1 | 2 # Incorrect
end
- context 'with page = 3 (overlimit)' do
- let(:page) { 3 }
-
- it { is_expected.to eq(1) }
- end
-
- context 'with page = 0 (incorrect)' do
- let(:page) { 0 }
-
- it { is_expected.to eq(1) }
+ with_them do
+ it 'returns the correct pagination attributes' do
+ expect(subject.current_page).to eq(current_page)
+ expect(subject.total_pages).to eq(total_pages)
+ end
end
end
end
diff --git a/spec/services/projects/create_from_template_service_spec.rb b/spec/services/projects/create_from_template_service_spec.rb
index 7e23daabcd3..fba6225b87a 100644
--- a/spec/services/projects/create_from_template_service_spec.rb
+++ b/spec/services/projects/create_from_template_service_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Projects::CreateFromTemplateService do
end
it 'is not scheduled' do
- expect(project.import_scheduled?).to be_nil
+ expect(project.import_scheduled?).to be(false)
end
it 'repository is empty' do
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index cd1e629e1d2..59dee209ff9 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -152,6 +152,20 @@ RSpec.describe Projects::CreateService, '#execute' do
create_project(user, opts)
end
+
+ it 'publishes a ProjectCreatedEvent' do
+ group = create(:group, :nested).tap do |group|
+ group.add_owner(user)
+ end
+
+ expect { create_project(user, name: 'Project', path: 'project', namespace_id: group.id) }
+ .to publish_event(Projects::ProjectCreatedEvent)
+ .with(
+ project_id: kind_of(Numeric),
+ namespace_id: group.id,
+ root_namespace_id: group.parent_id
+ )
+ end
end
context "admin creates project with other user's namespace_id" do
@@ -543,15 +557,15 @@ RSpec.describe Projects::CreateService, '#execute' do
end
context 'with legacy storage' do
- let(:fake_repo_path) { File.join(TestEnv.repos_path, user.namespace.full_path, 'existing.git') }
+ let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(user.namespace.full_path, 'existing.git'), nil, nil) }
before do
stub_application_setting(hashed_storage_enabled: false)
- TestEnv.create_bare_repository(fake_repo_path)
+ raw_fake_repo.create_repository
end
after do
- FileUtils.rm_rf(fake_repo_path)
+ raw_fake_repo.remove
end
it 'does not allow to create a project when path matches existing repository on disk' do
@@ -578,15 +592,15 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'with hashed storage' do
let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
let(:hashed_path) { '@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
- let(:fake_repo_path) { File.join(TestEnv.repos_path, "#{hashed_path}.git") }
+ let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', "#{hashed_path}.git", nil, nil) }
before do
allow(Digest::SHA2).to receive(:hexdigest) { hash }
- TestEnv.create_bare_repository(fake_repo_path)
+ raw_fake_repo.create_repository
end
after do
- FileUtils.rm_rf(fake_repo_path)
+ raw_fake_repo.remove
end
it 'does not allow to create a project when path matches existing repository on disk' do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index c00438199fd..955384e518c 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -25,8 +25,12 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
- it 'publishes a ProjectDeleted event with project id and namespace id' do
- expected_data = { project_id: project.id, namespace_id: project.namespace_id }
+ it 'publishes a ProjectDeletedEvent' do
+ expected_data = {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
+ }
expect { destroy_project(project, user, {}) }.to publish_event(Projects::ProjectDeletedEvent).with(expected_data)
end
@@ -119,14 +123,15 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
allow(project).to receive(:destroy!).and_return(true)
end
- it "deletes merge request and related records" do
- merge_request_diffs = merge_request.merge_request_diffs
- expect(merge_request_diffs.size).to eq(1)
+ [MergeRequestDiffCommit, MergeRequestDiffFile].each do |model|
+ it "deletes #{model} records of the merge request" do
+ merge_request_diffs = merge_request.merge_request_diffs
+ expect(merge_request_diffs.size).to eq(1)
- mrdc_count = MergeRequestDiffCommit.where(merge_request_diff_id: merge_request_diffs.first.id).count
+ records_count = model.where(merge_request_diff_id: merge_request_diffs.first.id).count
- expect { destroy_project(project, user, {}) }
- .to change { MergeRequestDiffCommit.count }.by(-mrdc_count)
+ expect { destroy_project(project, user, {}) }.to change { model.count }.by(-records_count)
+ end
end
end
@@ -220,7 +225,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
context 'when flushing caches fail due to Redis' do
before do
new_user = create(:user)
- project.team.add_user(new_user, Gitlab::Access::DEVELOPER)
+ project.team.add_member(new_user, Gitlab::Access::DEVELOPER)
allow_any_instance_of(described_class).to receive(:flush_caches).and_raise(::Redis::CannotConnectError)
end
@@ -454,10 +459,10 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
it 'deletes webhooks and logs related to project' do
expect_next_instance_of(WebHooks::DestroyService, user) do |instance|
- expect(instance).to receive(:sync_destroy).with(web_hook1).and_call_original
+ expect(instance).to receive(:execute).with(web_hook1).and_call_original
end
expect_next_instance_of(WebHooks::DestroyService, user) do |instance|
- expect(instance).to receive(:sync_destroy).with(web_hook2).and_call_original
+ expect(instance).to receive(:execute).with(web_hook2).and_call_original
end
expect do
@@ -468,7 +473,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
context 'when an error is raised deleting webhooks' do
before do
allow_next_instance_of(WebHooks::DestroyService) do |instance|
- allow(instance).to receive(:sync_destroy).and_return(message: 'foo', status: :error)
+ allow(instance).to receive(:execute).and_return(message: 'foo', status: :error)
end
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index ce30a20edf4..48756cf774b 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -32,14 +32,14 @@ RSpec.describe Projects::ForkService do
external_authorization_classification_label: 'classification-label')
@to_user = create(:user)
@to_namespace = @to_user.namespace
- @from_project.add_user(@to_user, :developer)
+ @from_project.add_member(@to_user, :developer)
end
context 'fork project' do
context 'when forker is a guest' do
before do
@guest = create(:user)
- @from_project.add_user(@guest, :guest)
+ @from_project.add_member(@guest, :guest)
end
subject { fork_project(@from_project, @guest, using_service: true) }
@@ -68,6 +68,9 @@ RSpec.describe Projects::ForkService do
it { expect(to_project.avatar.file).to be_exists }
it { expect(to_project.ci_config_path).to eq(@from_project.ci_config_path) }
it { expect(to_project.external_authorization_classification_label).to eq(@from_project.external_authorization_classification_label) }
+ it { expect(to_project.suggestion_commit_message).to eq(@from_project.suggestion_commit_message) }
+ it { expect(to_project.merge_commit_template).to eq(@from_project.merge_commit_template) }
+ it { expect(to_project.squash_commit_template).to eq(@from_project.squash_commit_template) }
# This test is here because we had a bug where the from-project lost its
# avatar after being forked.
@@ -156,16 +159,16 @@ RSpec.describe Projects::ForkService do
end
context 'repository in legacy storage already exists' do
- let(:fake_repo_path) { File.join(TestEnv.repos_path, @to_user.namespace.full_path, "#{@from_project.path}.git") }
+ let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(@to_user.namespace.full_path, "#{@from_project.path}.git"), nil, nil) }
let(:params) { { namespace: @to_user.namespace, using_service: true } }
before do
stub_application_setting(hashed_storage_enabled: false)
- TestEnv.create_bare_repository(fake_repo_path)
+ raw_fake_repo.create_repository
end
after do
- FileUtils.rm_rf(fake_repo_path)
+ raw_fake_repo.remove
end
subject { fork_project(@from_project, @to_user, params) }
@@ -261,10 +264,10 @@ RSpec.describe Projects::ForkService do
description: 'Wow, such a cool project!',
ci_config_path: 'debian/salsa-ci.yml')
@group = create(:group)
- @group.add_user(@group_owner, GroupMember::OWNER)
- @group.add_user(@developer, GroupMember::DEVELOPER)
- @project.add_user(@developer, :developer)
- @project.add_user(@group_owner, :developer)
+ @group.add_member(@group_owner, GroupMember::OWNER)
+ @group.add_member(@developer, GroupMember::DEVELOPER)
+ @project.add_member(@developer, :developer)
+ @project.add_member(@group_owner, :developer)
@opts = { namespace: @group, using_service: true }
end
diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb
index ff1618c3bbe..20616890ebd 100644
--- a/spec/services/projects/group_links/update_service_spec.rb
+++ b/spec/services/projects/group_links/update_service_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute' do
expires_at: expiry_date }
end
- subject { described_class.new(link).execute(group_link_params) }
+ subject { described_class.new(link, user).execute(group_link_params) }
before do
group.add_developer(user)
diff --git a/spec/services/projects/move_deploy_keys_projects_service_spec.rb b/spec/services/projects/move_deploy_keys_projects_service_spec.rb
index bd93b80f712..59674a3a4ef 100644
--- a/spec/services/projects/move_deploy_keys_projects_service_spec.rb
+++ b/spec/services/projects/move_deploy_keys_projects_service_spec.rb
@@ -56,5 +56,22 @@ RSpec.describe Projects::MoveDeployKeysProjectsService do
expect(project_with_deploy_keys.deploy_keys_projects.count).not_to eq 0
end
end
+
+ context 'when SHA256 fingerprint is missing' do
+ before do
+ create(:deploy_keys_project, project: target_project)
+ DeployKey.all.update_all(fingerprint_sha256: nil)
+ end
+
+ it 'moves the user\'s deploy keys from one project to another' do
+ combined_keys = project_with_deploy_keys.deploy_keys + target_project.deploy_keys
+
+ subject.execute(project_with_deploy_keys)
+
+ expect(project_with_deploy_keys.deploy_keys.reload).to be_empty
+ expect(target_project.deploy_keys.reload).to match_array(combined_keys)
+ expect(DeployKey.all.select(:fingerprint_sha256)).to all(be_present)
+ end
+ end
end
end
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 3ee867ba6f2..bee91c358ce 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -462,93 +462,5 @@ RSpec.describe Projects::Operations::UpdateService do
end
end
end
-
- context 'tracing setting' do
- context 'with valid params' do
- let(:params) do
- {
- tracing_setting_attributes: {
- external_url: 'http://some-url.com'
- }
- }
- end
-
- context 'with an existing setting' do
- before do
- create(:project_tracing_setting, project: project)
- end
-
- shared_examples 'setting deletion' do
- let!(:original_params) { params.deep_dup }
-
- it 'deletes the setting' do
- expect(result[:status]).to eq(:success)
- expect(project.reload.tracing_setting).to be_nil
- end
-
- it 'does not modify original params' do
- subject.execute
-
- expect(params).to eq(original_params)
- end
- end
-
- it 'updates the setting' do
- expect(project.tracing_setting).not_to be_nil
-
- expect(result[:status]).to eq(:success)
- expect(project.reload.tracing_setting.external_url)
- .to eq('http://some-url.com')
- end
-
- context 'with missing external_url' do
- before do
- params[:tracing_setting_attributes].delete(:external_url)
- end
-
- it_behaves_like 'setting deletion'
- end
-
- context 'with empty external_url' do
- before do
- params[:tracing_setting_attributes][:external_url] = ''
- end
-
- it_behaves_like 'setting deletion'
- end
-
- context 'with blank external_url' do
- before do
- params[:tracing_setting_attributes][:external_url] = ' '
- end
-
- it_behaves_like 'setting deletion'
- end
- end
-
- context 'without an existing setting' do
- it 'creates a setting' do
- expect(project.tracing_setting).to be_nil
-
- expect(result[:status]).to eq(:success)
- expect(project.reload.tracing_setting.external_url)
- .to eq('http://some-url.com')
- end
- end
- end
-
- context 'with empty params' do
- let(:params) { {} }
-
- let!(:tracing_setting) do
- create(:project_tracing_setting, project: project)
- end
-
- it 'does nothing' do
- expect(result[:status]).to eq(:success)
- expect(project.reload.tracing_setting).to eq(tracing_setting)
- end
- end
- end
end
end
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 0d0bb317df2..6f760e6dbfa 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -228,12 +228,14 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
context 'when payload exceeds max amount of processable alerts' do
# We are defining 2 alerts in payload_raw above
let(:max_alerts) { 1 }
+ let(:fingerprint) { prometheus_alert_payload_fingerprint(alert_resolved) }
before do
stub_const("#{described_class}::PROCESS_MAX_ALERTS", max_alerts)
create(:prometheus_integration, project: project)
create(:project_alerting_setting, project: project, token: token)
+ create(:alert_management_alert, project: project, fingerprint: fingerprint)
allow(Gitlab::AppLogger).to receive(:warn)
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index bebe80b710b..ecf9f92d74f 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -372,16 +372,16 @@ RSpec.describe Projects::TransferService do
end
context 'namespace which contains orphan repository with same projects path name' do
- let(:fake_repo_path) { File.join(TestEnv.repos_path, group.full_path, "#{project.path}.git") }
+ let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(group.full_path, "#{project.path}.git"), nil, nil) }
before do
group.add_owner(user)
- TestEnv.create_bare_repository(fake_repo_path)
+ raw_fake_repo.create_repository
end
after do
- FileUtils.rm_rf(fake_repo_path)
+ raw_fake_repo.remove
end
it 'does not allow the project transfer' do
@@ -715,20 +715,6 @@ RSpec.describe Projects::TransferService do
end
end
- context 'moving pages' do
- let_it_be(:project) { create(:project, namespace: user.namespace) }
-
- before do
- group.add_owner(user)
- end
-
- it 'does not schedule a job when no pages are deployed' do
- expect(PagesTransferWorker).not_to receive(:perform_async)
-
- execute_transfer
- end
- end
-
context 'handling issue contacts' do
let_it_be(:root_group) { create(:group) }
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index cbbed82aa0b..24b5e35e422 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -43,6 +43,16 @@ RSpec.describe Projects::UpdatePagesService do
expect(project.pages_deployed?).to be_truthy
end
+ it 'publishes a PageDeployedEvent event with project id and namespace id' do
+ expected_data = {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
+ }
+
+ expect { subject.execute }.to publish_event(Pages::PageDeployedEvent).with(expected_data)
+ end
+
it 'creates pages_deployment and saves it in the metadata' do
expect do
expect(execute).to eq(:success)
@@ -161,16 +171,6 @@ RSpec.describe Projects::UpdatePagesService do
end
end
- shared_examples 'fails with outdated reference message' do
- it 'fails' do
- expect(execute).not_to eq(:success)
- expect(project.reload.pages_metadatum).not_to be_deployed
-
- expect(deploy_status).to be_failed
- expect(deploy_status.description).to eq('build SHA is outdated for this ref')
- end
- end
-
shared_examples 'successfully deploys' do
it 'succeeds' do
expect do
@@ -202,27 +202,29 @@ RSpec.describe Projects::UpdatePagesService do
project.update_pages_deployment!(new_deployment)
end
- include_examples 'fails with outdated reference message'
+ it 'fails with outdated reference message' do
+ expect(execute).to eq(:error)
+ expect(project.reload.pages_metadatum).not_to be_deployed
+
+ expect(deploy_status).to be_failed
+ expect(deploy_status.description).to eq('build SHA is outdated for this ref')
+ end
end
end
- context 'when uploaded deployment size is wrong' do
- it 'raises an error' do
- allow_next_instance_of(PagesDeployment) do |deployment|
- allow(deployment)
- .to receive(:size)
- .and_return(file.size + 1)
- end
+ it 'fails when uploaded deployment size is wrong' do
+ allow_next_instance_of(PagesDeployment) do |deployment|
+ allow(deployment)
+ .to receive(:size)
+ .and_return(file.size + 1)
+ end
- expect do
- expect(execute).not_to eq(:success)
+ expect(execute).not_to eq(:success)
- expect(GenericCommitStatus.last.description).to eq("Error: The uploaded artifact size does not match the expected value.")
- project.pages_metadatum.reload
- expect(project.pages_metadatum).not_to be_deployed
- expect(project.pages_metadatum.pages_deployment).to be_ni
- end.to raise_error(Projects::UpdatePagesService::WrongUploadedDeploymentSizeError)
- end
+ expect(GenericCommitStatus.last.description).to eq('The uploaded artifact size does not match the expected value')
+ project.pages_metadatum.reload
+ expect(project.pages_metadatum).not_to be_deployed
+ expect(project.pages_metadatum.pages_deployment).to be_nil
end
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 7b5bf1db030..f019434a4fe 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -289,6 +289,42 @@ RSpec.describe Projects::UpdateService do
end
end
+ context 'when changing operations feature visibility' do
+ let(:feature_params) { { operations_access_level: ProjectFeature::DISABLED } }
+
+ it 'does not sync the changes to the related fields' do
+ result = update_project(project, user, project_feature_attributes: feature_params)
+
+ expect(result).to eq({ status: :success })
+ feature = project.project_feature
+
+ expect(feature.operations_access_level).to eq(ProjectFeature::DISABLED)
+ expect(feature.monitor_access_level).not_to eq(ProjectFeature::DISABLED)
+ expect(feature.infrastructure_access_level).not_to eq(ProjectFeature::DISABLED)
+ expect(feature.feature_flags_access_level).not_to eq(ProjectFeature::DISABLED)
+ expect(feature.environments_access_level).not_to eq(ProjectFeature::DISABLED)
+ end
+
+ context 'when split_operations_visibility_permissions feature is disabled' do
+ before do
+ stub_feature_flags(split_operations_visibility_permissions: false)
+ end
+
+ it 'syncs the changes to the related fields' do
+ result = update_project(project, user, project_feature_attributes: feature_params)
+
+ expect(result).to eq({ status: :success })
+ feature = project.project_feature
+
+ expect(feature.operations_access_level).to eq(ProjectFeature::DISABLED)
+ expect(feature.monitor_access_level).to eq(ProjectFeature::DISABLED)
+ expect(feature.infrastructure_access_level).to eq(ProjectFeature::DISABLED)
+ expect(feature.feature_flags_access_level).to eq(ProjectFeature::DISABLED)
+ expect(feature.environments_access_level).to eq(ProjectFeature::DISABLED)
+ end
+ end
+ end
+
context 'when updating a project that contains container images' do
before do
stub_container_registry_config(enabled: true)
@@ -312,17 +348,17 @@ RSpec.describe Projects::UpdateService do
end
context 'when renaming a project' do
- let(:fake_repo_path) { File.join(TestEnv.repos_path, user.namespace.full_path, 'existing.git') }
+ let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(user.namespace.full_path, 'existing.git'), nil, nil) }
context 'with legacy storage' do
let(:project) { create(:project, :legacy_storage, :repository, creator: user, namespace: user.namespace) }
before do
- TestEnv.create_bare_repository(fake_repo_path)
+ raw_fake_repo.create_repository
end
after do
- FileUtils.rm_rf(fake_repo_path)
+ raw_fake_repo.remove
end
it 'does not allow renaming when new path matches existing repository on disk' do
@@ -388,7 +424,7 @@ RSpec.describe Projects::UpdateService do
it 'does not update when not project owner' do
maintainer = create(:user)
- project.add_user(maintainer, :maintainer)
+ project.add_member(maintainer, :maintainer)
expect { update_project(project, maintainer, emails_disabled: true) }
.not_to change { project.emails_disabled }
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index f7ed6006099..3f11eaa7e93 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe QuickActions::InterpretService do
+ include AfterNextHelpers
+
let_it_be(:group) { create(:group, :crm_enabled) }
let_it_be(:public_project) { create(:project, :public, group: group) }
let_it_be(:repository_project) { create(:project, :repository) }
@@ -17,8 +19,9 @@ RSpec.describe QuickActions::InterpretService do
let(:milestone) { create(:milestone, project: project, title: '9.10') }
let(:commit) { create(:commit, project: project) }
+ let(:current_user) { developer }
- subject(:service) { described_class.new(project, developer) }
+ subject(:service) { described_class.new(project, current_user) }
before_all do
public_project.add_developer(developer)
@@ -682,6 +685,58 @@ RSpec.describe QuickActions::InterpretService do
end
shared_examples 'assign command' do
+ it 'assigns to me' do
+ cmd = '/assign me'
+
+ _, updates, _ = service.execute(cmd, issuable)
+
+ expect(updates).to eq(assignee_ids: [current_user.id])
+ end
+
+ it 'does not assign to group members' do
+ grp = create(:group)
+ grp.add_developer(developer)
+ grp.add_developer(developer2)
+
+ cmd = "/assign #{grp.to_reference}"
+
+ _, updates, message = service.execute(cmd, issuable)
+
+ expect(updates).to be_blank
+ expect(message).to include('Failed to find users')
+ end
+
+ context 'when there are too many references' do
+ before do
+ stub_const('Gitlab::QuickActions::UsersExtractor::MAX_QUICK_ACTION_USERS', 2)
+ end
+
+ it 'says what went wrong' do
+ cmd = '/assign her and you, me and them'
+
+ _, updates, message = service.execute(cmd, issuable)
+
+ expect(updates).to be_blank
+ expect(message).to include('Too many references. Quick actions are limited to at most 2 user references')
+ end
+ end
+
+ context 'when the user extractor raises an uninticipated error' do
+ before do
+ allow_next(Gitlab::QuickActions::UsersExtractor)
+ .to receive(:execute).and_raise(Gitlab::QuickActions::UsersExtractor::Error)
+ end
+
+ it 'tracks the exception in dev, and reports a generic message in production' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).twice
+
+ _, updates, message = service.execute('/assign some text', issuable)
+
+ expect(updates).to be_blank
+ expect(message).to include('Something went wrong')
+ end
+ end
+
it 'assigns to users with escaped underscores' do
user = create(:user)
base = user.username
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 82546ae810b..3615747e191 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -194,6 +194,25 @@ RSpec.describe Repositories::ChangelogService do
end
end
end
+
+ context 'with specified changelog config file path' do
+ it 'return specified changelog content' do
+ config = Gitlab::Changelog::Config.from_hash(project, { 'template' => 'specified_changelog_content' }, creator)
+
+ allow(Gitlab::Changelog::Config)
+ .to receive(:from_git)
+ .with(project, creator, 'specified_changelog_config.yml')
+ .and_return(config)
+
+ described_class
+ .new(project, creator, version: '1.0.0', from: sha1, config_file: 'specified_changelog_config.yml')
+ .execute(commit_to_changelog: commit_to_changelog)
+
+ changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
+
+ expect(changelog).to include('specified_changelog_content')
+ end
+ end
end
describe '#start_of_commit_range' do
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index d7a36ff370e..5edea13afa4 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -511,7 +511,7 @@ RSpec.describe SearchService do
end
context 'with :with_api_entity_associations' do
- it_behaves_like "redaction limits N+1 queries", limit: 13
+ it_behaves_like "redaction limits N+1 queries", limit: 14
end
end
@@ -599,25 +599,13 @@ RSpec.describe SearchService do
let(:search_service) { double(:search_service) }
before do
- stub_feature_flags(prevent_abusive_searches: should_detect_abuse)
expect(Gitlab::Search::Params).to receive(:new)
- .with(raw_params, detect_abuse: should_detect_abuse).and_call_original
+ .with(raw_params, detect_abuse: true).and_call_original
allow(subject).to receive(:search_service).and_return search_service
end
- context 'when abusive search but prevent_abusive_searches FF is disabled' do
- let(:should_detect_abuse) { false }
- let(:scope) { '1;drop%20table' }
-
- it 'executes search even if params are abusive' do
- expect(search_service).to receive(:execute)
- subject.search_results
- end
- end
-
context 'a search is abusive' do
- let(:should_detect_abuse) { true }
let(:scope) { '1;drop%20table' }
it 'does NOT execute search service' do
@@ -627,7 +615,6 @@ RSpec.describe SearchService do
end
context 'a search is NOT abusive' do
- let(:should_detect_abuse) { true }
let(:scope) { 'issues' }
it 'executes search service' do
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index 7a8bd1910fe..b863b2a46b0 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -377,12 +377,15 @@ RSpec.describe ServicePing::SubmitService do
stub_database_flavor_check
stub_application_setting(usage_ping_enabled: true)
stub_response(body: with_conv_index_params)
+ allow_next_instance_of(ServicePing::BuildPayload) do |service|
+ allow(service).to receive(:execute).and_return(payload)
+ end
end
- context 'with feature flag measure_service_ping_metric_collection turned on' do
- let(:metric_double) { instance_double(Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator, duration: 123) }
- let(:payload) do
- {
+ let(:metric_double) { instance_double(Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator, duration: 123) }
+ let(:payload) do
+ {
+ uuid: 'uuid',
metric_a: metric_double,
metric_group: {
metric_b: metric_double
@@ -390,49 +393,27 @@ RSpec.describe ServicePing::SubmitService do
metric_without_timing: "value",
recorded_at: Time.current
}
- end
+ end
- let(:metadata_payload) do
- {
- metadata: {
+ let(:metadata_payload) do
+ {
+ metadata: {
+ uuid: 'uuid',
metrics: [
{ name: 'metric_a', time_elapsed: 123 },
{ name: 'metric_group.metric_b', time_elapsed: 123 }
]
}
}
- end
-
- before do
- stub_feature_flags(measure_service_ping_metric_collection: true)
-
- allow_next_instance_of(ServicePing::BuildPayload) do |service|
- allow(service).to receive(:execute).and_return(payload)
- end
- end
-
- it 'submits metadata' do
- response = stub_full_request(service_ping_metadata_url, method: :post)
- .with(body: metadata_payload)
-
- subject.execute
-
- expect(response).to have_been_requested
- end
end
- context 'with feature flag measure_service_ping_metric_collection turned off' do
- before do
- stub_feature_flags(measure_service_ping_metric_collection: false)
- end
+ it 'submits metadata' do
+ response = stub_full_request(service_ping_metadata_url, method: :post)
+ .with(body: metadata_payload)
- it 'does NOT submit metadata' do
- response = stub_full_request(service_ping_metadata_url, method: :post)
-
- subject.execute
+ subject.execute
- expect(response).not_to have_been_requested
- end
+ expect(response).to have_been_requested
end
end
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index dc330a5546f..6052882813e 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -581,8 +581,7 @@ RSpec.describe Suggestions::ApplyService do
let(:project) { create(:project, :public, :repository) }
let(:forked_project) do
- fork_project_with_submodules(project,
- user, repository: project.repository)
+ fork_project_with_submodules(project, user)
end
let(:merge_request) do
diff --git a/spec/services/system_notes/incidents_service_spec.rb b/spec/services/system_notes/incidents_service_spec.rb
index d1b831e9c4c..6439f9fae93 100644
--- a/spec/services/system_notes/incidents_service_spec.rb
+++ b/spec/services/system_notes/incidents_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe SystemNotes::IncidentsService do
- include Gitlab::Routing
-
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
@@ -22,14 +20,12 @@ RSpec.describe SystemNotes::IncidentsService do
end
it 'posts the correct text to the system note' do
- path = project_issues_incident_path(project, incident, anchor: "timeline_event_#{timeline_event.id}")
- expect(subject.note).to match("added an [incident timeline event](#{path})")
+ expect(subject.note).to match("added an incident timeline event")
end
end
describe '#edit_timeline_event' do
let(:was_changed) { :unknown }
- let(:path) { project_issues_incident_path(project, incident, anchor: "timeline_event_#{timeline_event.id}") }
subject do
described_class.new(noteable: incident).edit_timeline_event(timeline_event, author, was_changed: was_changed)
@@ -44,7 +40,7 @@ RSpec.describe SystemNotes::IncidentsService do
let(:was_changed) { :occurred_at }
it 'posts the correct text to the system note' do
- expect(subject.note).to match("edited the event time/date on [incident timeline event](#{path})")
+ expect(subject.note).to match("edited the event time/date on incident timeline event")
end
end
@@ -52,7 +48,7 @@ RSpec.describe SystemNotes::IncidentsService do
let(:was_changed) { :note }
it 'posts the correct text to the system note' do
- expect(subject.note).to match("edited the text on [incident timeline event](#{path})")
+ expect(subject.note).to match("edited the text on incident timeline event")
end
end
@@ -60,7 +56,7 @@ RSpec.describe SystemNotes::IncidentsService do
let(:was_changed) { :occurred_at_and_note }
it 'posts the correct text to the system note' do
- expect(subject.note).to match("edited the event time/date and text on [incident timeline event](#{path})")
+ expect(subject.note).to match("edited the event time/date and text on incident timeline event")
end
end
@@ -68,7 +64,7 @@ RSpec.describe SystemNotes::IncidentsService do
let(:was_changed) { :unknown }
it 'posts the correct text to the system note' do
- expect(subject.note).to match("edited [incident timeline event](#{path})")
+ expect(subject.note).to match("edited incident timeline event")
end
end
end
diff --git a/spec/services/terraform/states/trigger_destroy_service_spec.rb b/spec/services/terraform/states/trigger_destroy_service_spec.rb
index 2e96331779c..459f4c3bdb9 100644
--- a/spec/services/terraform/states/trigger_destroy_service_spec.rb
+++ b/spec/services/terraform/states/trigger_destroy_service_spec.rb
@@ -9,7 +9,9 @@ RSpec.describe Terraform::States::TriggerDestroyService do
describe '#execute', :aggregate_failures do
let_it_be(:state) { create(:terraform_state, project: project) }
- subject { described_class.new(state, current_user: user).execute }
+ let(:service) { described_class.new(state, current_user: user) }
+
+ subject { service.execute }
it 'marks the state as deleted and schedules a cleanup worker' do
expect(Terraform::States::DestroyWorker).to receive(:perform_async).with(state.id).once
@@ -18,6 +20,15 @@ RSpec.describe Terraform::States::TriggerDestroyService do
expect(state.deleted_at).to be_like_time(Time.current)
end
+ context 'within a database transaction' do
+ subject { state.with_lock { service.execute } }
+
+ it 'does not raise an EnqueueFromTransactionError' do
+ expect { subject }.not_to raise_error
+ expect(state.deleted_at).to be_like_time(Time.current)
+ end
+ end
+
shared_examples 'unable to delete state' do
it 'does not modify the state' do
expect(Terraform::States::DestroyWorker).not_to receive(:perform_async)
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index e4582e19416..1cb44366457 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -186,8 +186,8 @@ RSpec.describe TodoService do
before do
group.add_owner(author)
- group.add_user(member, Gitlab::Access::DEVELOPER)
- group.add_user(john_doe, Gitlab::Access::DEVELOPER)
+ group.add_member(member, Gitlab::Access::DEVELOPER)
+ group.add_member(john_doe, Gitlab::Access::DEVELOPER)
service.new_issue(issue, author)
end
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index 092c5cd3e5e..47a4b943d83 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -34,6 +34,13 @@ RSpec.describe Users::ActivityService do
subject.execute
end
+
+ it 'tracks RedisHLL event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ .with('unique_active_user', values: user.id)
+
+ subject.execute
+ end
end
context 'when a bad object is passed' do
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 068550ec234..339ffc44e4d 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -84,8 +84,74 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
Gitlab::WebHooks::RecursionDetection.set_request_uuid(uuid)
end
+ context 'when there is an interpolation error' do
+ let(:error) { ::WebHook::InterpolationError.new('boom') }
+
+ before do
+ stub_full_request(project_hook.url, method: :post)
+ allow(project_hook).to receive(:interpolated_url).and_raise(error)
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(error)
+
+ expect(service_instance).to receive(:log_execution).with(
+ execution_duration: (be > 0),
+ response: have_attributes(code: 200)
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'when there are URL variables' do
+ before do
+ project_hook.update!(
+ url: 'http://example.com/{one}/{two}',
+ url_variables: { 'one' => 'a', 'two' => 'b' }
+ )
+ end
+
+ it 'POSTs to the interpolated URL, and logs the hook.url' do
+ stub_full_request(project_hook.interpolated_url, method: :post)
+
+ expect(service_instance).to receive(:queue_log_execution_with_retry).with(
+ include(url: project_hook.url),
+ :ok
+ )
+
+ service_instance.execute
+
+ expect(WebMock)
+ .to have_requested(:post, stubbed_hostname(project_hook.interpolated_url)).once
+ end
+
+ context 'there is userinfo' do
+ before do
+ project_hook.update!(url: 'http://{one}:{two}@example.com')
+ stub_full_request('http://example.com', method: :post)
+ end
+
+ it 'POSTs to the interpolated URL, and logs the hook.url' do
+ expect(service_instance).to receive(:queue_log_execution_with_retry).with(
+ include(url: project_hook.url),
+ :ok
+ )
+
+ service_instance.execute
+
+ expect(WebMock)
+ .to have_requested(:post, stubbed_hostname('http://example.com'))
+ .with(headers: headers.merge('Authorization' => 'Basic YTpi'))
+ .once
+ end
+ end
+ end
+
context 'when token is defined' do
- let_it_be(:project_hook) { create(:project_hook, :token) }
+ before do
+ project_hook.token = generate(:token)
+ end
it 'POSTs to the webhook URL' do
stub_full_request(project_hook.url, method: :post)
diff --git a/spec/services/web_hooks/log_execution_service_spec.rb b/spec/services/web_hooks/log_execution_service_spec.rb
index 0ba0372b99d..873f6adc8dc 100644
--- a/spec/services/web_hooks/log_execution_service_spec.rb
+++ b/spec/services/web_hooks/log_execution_service_spec.rb
@@ -35,6 +35,12 @@ RSpec.describe WebHooks::LogExecutionService do
expect(WebHookLog.recent.first).to have_attributes(data)
end
+ it 'updates the last failure' do
+ expect(project_hook).to receive(:update_last_failure)
+
+ service.execute
+ end
+
context 'obtaining an exclusive lease' do
let(:lease_key) { "web_hooks:update_hook_failure_state:#{project_hook.id}" }
diff --git a/spec/services/work_items/create_and_link_service_spec.rb b/spec/services/work_items/create_and_link_service_spec.rb
index 93c029bdab1..81be15f9e2f 100644
--- a/spec/services/work_items/create_and_link_service_spec.rb
+++ b/spec/services/work_items/create_and_link_service_spec.rb
@@ -7,13 +7,16 @@ RSpec.describe WorkItems::CreateAndLinkService do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
let_it_be(:related_work_item) { create(:work_item, project: project) }
+ let_it_be(:invalid_parent) { create(:work_item, :task, project: project) }
let(:spam_params) { double }
let(:link_params) { {} }
+
let(:params) do
{
title: 'Awesome work item',
- description: 'please fix'
+ description: 'please fix',
+ work_item_type_id: WorkItems::Type.default_by_type(:task).id
}
end
@@ -40,32 +43,32 @@ RSpec.describe WorkItems::CreateAndLinkService do
end
context 'when link params are valid' do
- let(:link_params) { { issuable_references: [related_work_item.to_reference] } }
+ let(:link_params) { { parent_work_item: related_work_item } }
it 'creates a work item successfully with links' do
expect do
service_result
end.to change(WorkItem, :count).by(1).and(
- change(IssueLink, :count).by(1)
+ change(WorkItems::ParentLink, :count).by(1)
)
end
end
- context 'when link params are invalid' do
- let(:link_params) { { issuable_references: ['invalid reference'] } }
+ context 'when link creation fails' do
+ let(:link_params) { { parent_work_item: invalid_parent } }
it { is_expected.to be_error }
it 'does not create a link and does not rollback transaction' do
expect do
service_result
- end.to not_change(IssueLink, :count).and(
+ end.to not_change(WorkItems::ParentLink, :count).and(
change(WorkItem, :count).by(1)
)
end
it 'returns a link creation error message' do
- expect(service_result.errors).to contain_exactly('No matching issue found. Make sure that you are adding a valid issue URL.')
+ expect(service_result.errors).to contain_exactly(/only Issue and Incident can be parent of Task./)
end
end
end
@@ -84,7 +87,7 @@ RSpec.describe WorkItems::CreateAndLinkService do
expect do
service_result
end.to not_change(WorkItem, :count).and(
- not_change(IssueLink, :count)
+ not_change(WorkItems::ParentLink, :count)
)
end
diff --git a/spec/services/work_items/create_from_task_service_spec.rb b/spec/services/work_items/create_from_task_service_spec.rb
index b4db925f053..7d2dab228b1 100644
--- a/spec/services/work_items/create_from_task_service_spec.rb
+++ b/spec/services/work_items/create_from_task_service_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe WorkItems::CreateFromTaskService do
expect do
service_result
end.to not_change(WorkItem, :count).and(
- not_change(IssueLink, :count)
+ not_change(WorkItems::ParentLink, :count)
)
end
end
@@ -47,12 +47,14 @@ RSpec.describe WorkItems::CreateFromTaskService do
context 'when work item params are valid' do
it { is_expected.to be_success }
- it 'creates a work item and links it to the original work item successfully' do
+ it 'creates a work item and creates parent link to the original work item' do
expect do
service_result
end.to change(WorkItem, :count).by(1).and(
- change(IssueLink, :count)
+ change(WorkItems::ParentLink, :count).by(1)
)
+
+ expect(work_item_to_update.reload.work_item_children).not_to be_empty
end
it 'replaces the original issue markdown description with new work item reference' do
@@ -73,7 +75,7 @@ RSpec.describe WorkItems::CreateFromTaskService do
expect do
service_result
end.to not_change(WorkItem, :count).and(
- not_change(IssueLink, :count)
+ not_change(WorkItems::ParentLink, :count)
)
end
diff --git a/spec/services/work_items/create_service_spec.rb b/spec/services/work_items/create_service_spec.rb
index f495e967b26..4009c85bacd 100644
--- a/spec/services/work_items/create_service_spec.rb
+++ b/spec/services/work_items/create_service_spec.rb
@@ -6,9 +6,12 @@ RSpec.describe WorkItems::CreateService do
include AfterNextHelpers
let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:parent) { create(:work_item, project: project) }
let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
let_it_be(:user_with_no_access) { create(:user) }
+ let(:widget_params) { {} }
let(:spam_params) { double }
let(:current_user) { guest }
let(:opts) do
@@ -20,10 +23,21 @@ RSpec.describe WorkItems::CreateService do
before_all do
project.add_guest(guest)
+ project.add_reporter(reporter)
end
describe '#execute' do
- subject(:service_result) { described_class.new(project: project, current_user: current_user, params: opts, spam_params: spam_params).execute }
+ let(:service) do
+ described_class.new(
+ project: project,
+ current_user: current_user,
+ params: opts,
+ spam_params: spam_params,
+ widget_params: widget_params
+ )
+ end
+
+ subject(:service_result) { service.execute }
before do
stub_spam_services
@@ -61,6 +75,14 @@ RSpec.describe WorkItems::CreateService do
it 'returns validation errors' do
expect(service_result.errors).to contain_exactly("Title can't be blank")
end
+
+ it 'does not execute after-create transaction widgets' do
+ expect(service).to receive(:create).and_call_original
+ expect(service).not_to receive(:execute_widgets)
+ .with(callback: :after_create_in_transaction, widget_params: widget_params)
+
+ service_result
+ end
end
context 'checking spam' do
@@ -80,5 +102,104 @@ RSpec.describe WorkItems::CreateService do
service_result
end
end
+
+ it_behaves_like 'work item widgetable service' do
+ let(:widget_params) do
+ {
+ hierarchy_widget: { parent: parent }
+ }
+ end
+
+ let(:service) do
+ described_class.new(
+ project: project,
+ current_user: current_user,
+ params: opts,
+ spam_params: spam_params,
+ widget_params: widget_params
+ )
+ end
+
+ let(:service_execute) { service.execute }
+
+ let(:supported_widgets) do
+ [
+ {
+ klass: WorkItems::Widgets::HierarchyService::CreateService,
+ callback: :after_create_in_transaction,
+ params: { parent: parent }
+ }
+ ]
+ end
+ end
+
+ describe 'hierarchy widget' do
+ let(:widget_params) { { hierarchy_widget: { parent: parent } } }
+
+ shared_examples 'fails creating work item and returns errors' do
+ it 'does not create new work item if parent can not be set' do
+ expect { service_result }.not_to change(WorkItem, :count)
+
+ expect(service_result[:status]).to be(:error)
+ expect(service_result[:message]).to match(error_message)
+ end
+ end
+
+ context 'when user can admin parent link' do
+ let(:current_user) { reporter }
+
+ context 'when parent is valid work item' do
+ let(:opts) do
+ {
+ title: 'Awesome work_item',
+ description: 'please fix',
+ work_item_type: create(:work_item_type, :task)
+ }
+ end
+
+ it 'creates new work item and sets parent reference' do
+ expect { service_result }.to change(
+ WorkItem, :count).by(1).and(change(
+ WorkItems::ParentLink, :count).by(1))
+
+ expect(service_result[:status]).to be(:success)
+ end
+ end
+
+ context 'when parent type is invalid' do
+ let_it_be(:parent) { create(:work_item, :task, project: project) }
+
+ it_behaves_like 'fails creating work item and returns errors' do
+ let(:error_message) { 'only Issue and Incident can be parent of Task.'}
+ end
+ end
+
+ context 'when hierarchy feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items_hierarchy: false)
+ end
+
+ it_behaves_like 'fails creating work item and returns errors' do
+ let(:error_message) { '`work_items_hierarchy` feature flag disabled for this project' }
+ end
+ end
+ end
+
+ context 'when user cannot admin parent link' do
+ let(:current_user) { guest }
+
+ let(:opts) do
+ {
+ title: 'Awesome work_item',
+ description: 'please fix',
+ work_item_type: create(:work_item_type, :task)
+ }
+ end
+
+ it_behaves_like 'fails creating work item and returns errors' do
+ let(:error_message) { 'No matching task found. Make sure that you are adding a valid task ID.'}
+ end
+ end
+ end
end
end
diff --git a/spec/services/work_items/delete_task_service_spec.rb b/spec/services/work_items/delete_task_service_spec.rb
index 04944645c9b..07a0d8d6c1a 100644
--- a/spec/services/work_items/delete_task_service_spec.rb
+++ b/spec/services/work_items/delete_task_service_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe WorkItems::DeleteTaskService do
it 'removes the task list item with the work item reference' do
expect do
service_result
- end.to change(list_work_item, :description).from(list_work_item.description).to('')
+ end.to change(list_work_item, :description).from(list_work_item.description).to("- [ ] #{task.title}")
end
end
diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb
new file mode 100644
index 00000000000..85b0ee040cd
--- /dev/null
+++ b/spec/services/work_items/parent_links/create_service_spec.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::ParentLinks::CreateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:task) { create(:work_item, :task, project: project) }
+ let_it_be(:task1) { create(:work_item, :task, project: project) }
+ let_it_be(:task2) { create(:work_item, :task, project: project) }
+ let_it_be(:guest_task) { create(:work_item, :task) }
+ let_it_be(:invalid_task) { build_stubbed(:work_item, :task, id: non_existing_record_id)}
+ let_it_be(:another_project) { (create :project) }
+ let_it_be(:other_project_task) { create(:work_item, :task, iid: 100, project: another_project) }
+ let_it_be(:existing_parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item)}
+
+ let(:parent_link_class) { WorkItems::ParentLink }
+ let(:issuable_type) { :task }
+ let(:params) { {} }
+
+ before do
+ project.add_reporter(user)
+ project.add_guest(guest)
+ guest_task.project.add_guest(user)
+ another_project.add_reporter(user)
+ end
+
+ shared_examples 'returns not found error' do
+ it 'returns error' do
+ error = "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} ID."
+
+ is_expected.to eq(service_error(error))
+ end
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(parent_link_class, :count)
+ end
+ end
+
+ subject { described_class.new(work_item, user, params).execute }
+
+ context 'when the reference list is empty' do
+ let(:params) { { issuable_references: [] } }
+
+ it_behaves_like 'returns not found error'
+ end
+
+ context 'when work item not found' do
+ let(:params) { { issuable_references: [invalid_task] } }
+
+ it_behaves_like 'returns not found error'
+ end
+
+ context 'when user has no permission to link work items' do
+ let(:params) { { issuable_references: [guest_task] } }
+
+ it_behaves_like 'returns not found error'
+ end
+
+ context 'child and parent are the same work item' do
+ let(:params) { { issuable_references: [work_item] } }
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(parent_link_class, :count)
+ end
+ end
+
+ context 'when there are tasks to relate' do
+ let(:params) { { issuable_references: [task1, task2] } }
+
+ it 'creates relationships', :aggregate_failures do
+ expect { subject }.to change(parent_link_class, :count).by(2)
+
+ tasks_parent = parent_link_class.where(work_item: [task1, task2]).map(&:work_item_parent).uniq
+ expect(tasks_parent).to match_array([work_item])
+ end
+
+ it 'returns success status and created links', :aggregate_failures do
+ expect(subject.keys).to match_array([:status, :created_references])
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:created_references].map(&:work_item_id)).to match_array([task1.id, task2.id])
+ end
+
+ context 'when task is already assigned' do
+ let(:params) { { issuable_references: [task, task2] } }
+
+ it 'creates links only for non related tasks' do
+ expect { subject }.to change(parent_link_class, :count).by(1)
+
+ expect(subject[:created_references].map(&:work_item_id)).to match_array([task2.id])
+ end
+ end
+
+ context 'when there are invalid children' do
+ let_it_be(:issue) { create(:work_item, project: project) }
+
+ let(:params) { { issuable_references: [task1, issue, other_project_task] } }
+
+ it 'creates links only for valid children' do
+ expect { subject }.to change { parent_link_class.count }.by(1)
+ end
+
+ it 'returns error status' do
+ error = "#{issue.to_reference} cannot be added: only Task can be assigned as a child in hierarchy.. " \
+ "#{other_project_task.to_reference} cannot be added: parent must be in the same project as child."
+
+ is_expected.to eq(service_error(error, http_status: 422))
+ end
+ end
+
+ context 'when parent type is invalid' do
+ let(:work_item) { create :work_item, :task, project: project }
+
+ let(:params) { { target_issuable: task1 } }
+
+ it 'returns error status' do
+ error = "#{task1.to_reference} cannot be added: only Issue and Incident can be parent of Task."
+
+ is_expected.to eq(service_error(error, http_status: 422))
+ end
+ end
+
+ context 'when max depth is reached' do
+ let(:params) { { issuable_references: [task2] } }
+
+ before do
+ stub_const("#{parent_link_class}::MAX_CHILDREN", 1)
+ end
+
+ it 'returns error status' do
+ error = "#{task2.to_reference} cannot be added: parent already has maximum number of children."
+
+ is_expected.to eq(service_error(error, http_status: 422))
+ end
+ end
+
+ context 'when params include invalid ids' do
+ let(:params) { { issuable_references: [task1, invalid_task] } }
+
+ it 'creates links only for valid IDs' do
+ expect { subject }.to change(parent_link_class, :count).by(1)
+ end
+ end
+
+ context 'when user is a guest' do
+ let(:user) { guest }
+
+ it_behaves_like 'returns not found error'
+ end
+
+ context 'when user is a guest assigned to the work item' do
+ let(:user) { guest }
+
+ before do
+ work_item.assignees = [guest]
+ end
+
+ it_behaves_like 'returns not found error'
+ end
+ end
+ end
+
+ def service_error(message, http_status: 404)
+ {
+ message: message,
+ status: :error,
+ http_status: http_status
+ }
+ end
+end
diff --git a/spec/services/work_items/parent_links/destroy_service_spec.rb b/spec/services/work_items/parent_links/destroy_service_spec.rb
new file mode 100644
index 00000000000..574b70af397
--- /dev/null
+++ b/spec/services/work_items/parent_links/destroy_service_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::ParentLinks::DestroyService do
+ describe '#execute' do
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:task) { create(:work_item, :task, project: project) }
+ let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item)}
+
+ let(:parent_link_class) { WorkItems::ParentLink }
+
+ subject { described_class.new(parent_link, user).execute }
+
+ before do
+ project.add_reporter(reporter)
+ project.add_guest(guest)
+ end
+
+ context 'when user has permissions to update work items' do
+ let(:user) { reporter }
+
+ it 'removes relation' do
+ expect { subject }.to change(parent_link_class, :count).by(-1)
+ end
+
+ it 'returns success message' do
+ is_expected.to eq(message: 'Relation was removed', status: :success)
+ end
+ end
+
+ context 'when user has insufficient permissions' do
+ let(:user) { guest }
+
+ it 'does not remove relation' do
+ expect { subject }.not_to change(parent_link_class, :count).from(1)
+ end
+
+ it 'returns error message' do
+ is_expected.to eq(message: 'No Work Item Link found', status: :error, http_status: 404)
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/task_list_reference_removal_service_spec.rb b/spec/services/work_items/task_list_reference_removal_service_spec.rb
index bca72da0efa..91b7814ae92 100644
--- a/spec/services/work_items/task_list_reference_removal_service_spec.rb
+++ b/spec/services/work_items/task_list_reference_removal_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe WorkItems::TaskListReferenceRemovalService do
let_it_be(:developer) { create(:user) }
let_it_be(:project) { create(:project, :repository).tap { |project| project.add_developer(developer) } }
- let_it_be(:task) { create(:work_item, project: project) }
+ let_it_be(:task) { create(:work_item, project: project, title: 'Task title') }
let_it_be(:single_line_work_item, refind: true) do
create(:work_item, project: project, description: "- [ ] #{task.to_reference}+ single line")
end
@@ -82,7 +82,7 @@ RSpec.describe WorkItems::TaskListReferenceRemovalService do
let(:line_number_end) { 1 }
let(:work_item) { single_line_work_item }
- it_behaves_like 'successful work item task reference removal service', ''
+ it_behaves_like 'successful work item task reference removal service', '- [ ] Task title single line'
context 'when description does not contain a task' do
let_it_be(:no_matching_work_item) { create(:work_item, project: project, description: 'no matching task') }
@@ -102,7 +102,8 @@ RSpec.describe WorkItems::TaskListReferenceRemovalService do
end
context 'when task mardown spans multiple lines' do
- it_behaves_like 'successful work item task reference removal service', "Any text\n\n* [x] task\n\nMore text"
+ it_behaves_like 'successful work item task reference removal service',
+ "Any text\n\n* [ ] Item to be converted\n Task title second line\n third line\n* [x] task\n\nMore text"
end
context 'when updating the work item fails' do
diff --git a/spec/services/work_items/task_list_reference_replacement_service_spec.rb b/spec/services/work_items/task_list_reference_replacement_service_spec.rb
index e7914eb4a92..965c5f1d554 100644
--- a/spec/services/work_items/task_list_reference_replacement_service_spec.rb
+++ b/spec/services/work_items/task_list_reference_replacement_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe WorkItems::TaskListReferenceReplacementService do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:project) { create(:project, :repository).tap { |project| project.add_developer(developer) } }
let_it_be(:single_line_work_item, refind: true) { create(:work_item, project: project, description: '- [ ] single line', lock_version: 3) }
let_it_be(:multiple_line_work_item, refind: true) { create(:work_item, project: project, description: "Any text\n\n* [ ] Item to be converted\n second line\n third line", lock_version: 3) }
@@ -37,6 +38,7 @@ RSpec.describe WorkItems::TaskListReferenceReplacementService do
subject(:result) do
described_class.new(
work_item: work_item,
+ current_user: developer,
work_item_reference: reference,
line_number_start: line_number_start,
line_number_end: line_number_end,
@@ -52,6 +54,12 @@ RSpec.describe WorkItems::TaskListReferenceReplacementService do
let(:task_prefix) { '- [ ]' }
it_behaves_like 'successful work item task reference replacement service'
+
+ it 'creates description version note' do
+ expect { result }.to change(Note, :count).by(1)
+ expect(work_item.notes.last.note).to eq('changed the description')
+ expect(work_item.saved_description_version.id).to eq(work_item.notes.last.system_note_metadata.description_version_id)
+ end
end
context 'when task mardown spans multiple lines' do
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 9030326dadb..b17c9ffb4fb 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe WorkItems::UpdateService do
let_it_be(:developer) { create(:user) }
let_it_be(:project) { create(:project).tap { |proj| proj.add_developer(developer) } }
+ let_it_be(:parent) { create(:work_item, project: project) }
let_it_be_with_reload(:work_item) { create(:work_item, project: project, assignees: [developer]) }
let(:spam_params) { double }
@@ -13,7 +14,15 @@ RSpec.describe WorkItems::UpdateService do
let(:current_user) { developer }
describe '#execute' do
- subject(:update_work_item) { described_class.new(project: project, current_user: current_user, params: opts, spam_params: spam_params, widget_params: widget_params).execute(work_item) }
+ subject(:update_work_item) do
+ described_class.new(
+ project: project,
+ current_user: current_user,
+ params: opts,
+ spam_params: spam_params,
+ widget_params: widget_params
+ ).execute(work_item)
+ end
before do
stub_spam_services
@@ -27,8 +36,7 @@ RSpec.describe WorkItems::UpdateService do
expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).to receive(:track_work_item_title_changed_action).with(author: current_user)
# During the work item transition we also want to track work items as issues
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_title_changed_action)
-
- update_work_item
+ expect(update_work_item[:status]).to eq(:success)
end
end
@@ -38,8 +46,7 @@ RSpec.describe WorkItems::UpdateService do
it 'does not trigger issuable_title_updated graphql subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_title_updated)
expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_title_changed_action)
-
- update_work_item
+ expect(update_work_item[:status]).to eq(:success)
end
end
@@ -71,16 +78,104 @@ RSpec.describe WorkItems::UpdateService do
end
end
+ it_behaves_like 'work item widgetable service' do
+ let(:widget_params) do
+ {
+ hierarchy_widget: { parent: parent },
+ description_widget: { description: 'foo' },
+ weight_widget: { weight: 1 }
+ }
+ end
+
+ let(:service) do
+ described_class.new(
+ project: project,
+ current_user: current_user,
+ params: opts,
+ spam_params: spam_params,
+ widget_params: widget_params
+ )
+ end
+
+ let(:service_execute) { service.execute(work_item) }
+
+ let(:supported_widgets) do
+ [
+ { klass: WorkItems::Widgets::DescriptionService::UpdateService, callback: :update, params: { description: 'foo' } },
+ { klass: WorkItems::Widgets::WeightService::UpdateService, callback: :update, params: { weight: 1 } },
+ { klass: WorkItems::Widgets::HierarchyService::UpdateService, callback: :before_update_in_transaction, params: { parent: parent } }
+ ]
+ end
+ end
+
context 'when updating widgets' do
- context 'for the description widget' do
- let(:widget_params) { { description_widget: { description: 'changed' } } }
+ let(:widget_service_class) { WorkItems::Widgets::DescriptionService::UpdateService }
+ let(:widget_params) { { description_widget: { description: 'changed' } } }
+
+ context 'when widget service is not present' do
+ before do
+ allow(widget_service_class).to receive(:new).and_return(nil)
+ end
+
+ it 'ignores widget param' do
+ expect { update_work_item }.not_to change(work_item, :description)
+ end
+ end
+ context 'when the widget does not support update callback' do
+ before do
+ allow_next_instance_of(widget_service_class) do |instance|
+ allow(instance)
+ .to receive(:update)
+ .with(params: { description: 'changed' }).and_return(nil)
+ end
+ end
+
+ it 'ignores widget param' do
+ expect { update_work_item }.not_to change(work_item, :description)
+ end
+ end
+
+ context 'for the description widget' do
it 'updates the description of the work item' do
update_work_item
expect(work_item.description).to eq('changed')
end
end
+
+ context 'for the hierarchy widget' do
+ let(:opts) { { title: 'changed' } }
+ let_it_be(:child_work_item) { create(:work_item, :task, project: project) }
+
+ let(:widget_params) { { hierarchy_widget: { children: [child_work_item] } } }
+
+ it 'updates the children of the work item' do
+ expect do
+ update_work_item
+ work_item.reload
+ end.to change(WorkItems::ParentLink, :count).by(1)
+
+ expect(work_item.work_item_children).to include(child_work_item)
+ end
+
+ context 'when child type is invalid' do
+ let_it_be(:child_work_item) { create(:work_item, project: project) }
+
+ it 'returns error status' do
+ expect(subject[:status]).to be(:error)
+ expect(subject[:message])
+ .to match("#{child_work_item.to_reference} cannot be added: only Task can be assigned as a child in hierarchy.")
+ end
+
+ it 'does not update work item attributes' do
+ expect do
+ update_work_item
+ work_item.reload
+ end.to not_change(WorkItems::ParentLink, :count).and(not_change(work_item, :title))
+ end
+ end
+ end
end
end
end
diff --git a/spec/services/work_items/widgets/description_service/update_service_spec.rb b/spec/services/work_items/widgets/description_service/update_service_spec.rb
new file mode 100644
index 00000000000..a2eceb97f09
--- /dev/null
+++ b/spec/services/work_items/widgets/description_service/update_service_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:work_item) { create(:work_item, project: project, description: 'old description') }
+
+ let(:widget) { work_item.widgets.find {|widget| widget.is_a?(WorkItems::Widgets::Description) } }
+
+ describe '#update' do
+ subject { described_class.new(widget: widget, current_user: user).update(params: params) } # rubocop:disable Rails/SaveBang
+
+ context 'when description param is present' do
+ let(:params) { { description: 'updated description' } }
+
+ it 'correctly sets work item description value' do
+ subject
+
+ expect(work_item.description).to eq('updated description')
+ end
+ end
+
+ context 'when description param is not present' do
+ let(:params) { {} }
+
+ it 'does not change work item description value' do
+ subject
+
+ expect(work_item.description).to eq('old description')
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
new file mode 100644
index 00000000000..4f6ff1b8676
--- /dev/null
+++ b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:parent_work_item) { create(:work_item, project: project) }
+ let_it_be(:child_work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:existing_link) { create(:parent_link, work_item: child_work_item, work_item_parent: work_item) }
+
+ let(:widget) { work_item.widgets.find {|widget| widget.is_a?(WorkItems::Widgets::Hierarchy) } }
+ let(:not_found_error) { 'No matching task found. Make sure that you are adding a valid task ID.' }
+
+ shared_examples 'raises a WidgetError' do
+ it { expect { subject }.to raise_error(described_class::WidgetError, message) }
+ end
+
+ describe '#update' do
+ subject { described_class.new(widget: widget, current_user: user).before_update_in_transaction(params: params) }
+
+ context 'when parent and children params are present' do
+ let(:params) { { parent: parent_work_item, children: [child_work_item] } }
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) { 'A Work Item can be a parent or a child, but not both.' }
+ end
+ end
+
+ context 'when updating children' do
+ let_it_be(:child_work_item2) { create(:work_item, :task, project: project) }
+ let_it_be(:child_work_item3) { create(:work_item, :task, project: project) }
+ let_it_be(:child_work_item4) { create(:work_item, :task, project: project) }
+
+ context 'when work_items_hierarchy feature flag is disabled' do
+ let(:params) { { children: [child_work_item4] }}
+
+ before do
+ stub_feature_flags(work_items_hierarchy: false)
+ end
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) { '`work_items_hierarchy` feature flag disabled for this project' }
+ end
+ end
+
+ context 'when user has insufficient permissions to link work items' do
+ let(:params) { { children: [child_work_item4] }}
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) { not_found_error }
+ end
+ end
+
+ context 'when user has sufficient permissions to link work item' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'with valid params' do
+ let(:params) { { children: [child_work_item2, child_work_item3] }}
+
+ it 'correctly sets work item parent' do
+ subject
+
+ expect(work_item.reload.work_item_children)
+ .to contain_exactly(child_work_item, child_work_item2, child_work_item3)
+ end
+ end
+
+ context 'when child is already assigned' do
+ let(:params) { { children: [child_work_item] }}
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) { 'Task(s) already assigned' }
+ end
+ end
+
+ context 'when child type is invalid' do
+ let_it_be(:child_issue) { create(:work_item, project: project) }
+
+ let(:params) { { children: [child_issue] }}
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) do
+ "#{child_issue.to_reference} cannot be added: only Task can be assigned as a child in hierarchy."
+ end
+ end
+ end
+ end
+ end
+
+ context 'when updating parent' do
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
+
+ let(:params) {{ parent: parent_work_item } }
+
+ context 'when work_items_hierarchy feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items_hierarchy: false)
+ end
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) { '`work_items_hierarchy` feature flag disabled for this project' }
+ end
+ end
+
+ context 'when user has insufficient permissions to link work items' do
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) { not_found_error }
+ end
+ end
+
+ context 'when user has sufficient permissions to link work item' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'correctly sets new parent' do
+ expect(subject[:status]).to eq(:success)
+ expect(work_item.work_item_parent).to eq(parent_work_item)
+ end
+
+ context 'when parent is nil' do
+ let(:params) { { parent: nil } }
+
+ it 'removes the work item parent if present' do
+ work_item.update!(work_item_parent: parent_work_item)
+
+ expect do
+ subject
+ work_item.reload
+ end.to change(work_item, :work_item_parent).from(parent_work_item).to(nil)
+ end
+
+ it 'returns success status if parent not present', :aggregate_failure do
+ work_item.update!(work_item_parent: nil)
+
+ expect(subject[:status]).to eq(:success)
+ expect(work_item.reload.work_item_parent).to be_nil
+ end
+ end
+
+ context 'when type is invalid' do
+ let_it_be(:parent_task) { create(:work_item, :task, project: project)}
+
+ let(:params) {{ parent: parent_task } }
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) do
+ "#{work_item.to_reference} cannot be added: only Issue and Incident can be parent of Task."
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/widgets/weight_service/update_service_spec.rb b/spec/services/work_items/widgets/weight_service/update_service_spec.rb
new file mode 100644
index 00000000000..97e17f1c526
--- /dev/null
+++ b/spec/services/work_items/widgets/weight_service/update_service_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::WeightService::UpdateService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:work_item) { create(:work_item, project: project, weight: 1) }
+
+ let(:widget) { work_item.widgets.find {|widget| widget.is_a?(WorkItems::Widgets::Weight) } }
+
+ describe '#update' do
+ subject { described_class.new(widget: widget, current_user: user).update(params: params) } # rubocop:disable Rails/SaveBang
+
+ context 'when weight param is present' do
+ let(:params) { { weight: 2 } }
+
+ it 'correctly sets work item weight value' do
+ subject
+
+ expect(work_item.weight).to eq(2)
+ end
+ end
+
+ context 'when weight param is not present' do
+ let(:params) { {} }
+
+ it 'does not change work item weight value', :aggregate_failures do
+ expect { subject }
+ .to not_change { work_item.weight }
+
+ expect(work_item.weight).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/simplecov_env.rb b/spec/simplecov_env.rb
index da4a0e8da80..dbaecc6a233 100644
--- a/spec/simplecov_env.rb
+++ b/spec/simplecov_env.rb
@@ -9,7 +9,7 @@ module SimpleCovEnv
extend self
def start!
- return unless ENV['SIMPLECOV']
+ return if !ENV.key?('SIMPLECOV') || ENV['SIMPLECOV'] == '0'
configure_profile
configure_job
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index b39153e79fc..47cd78873f8 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -201,10 +201,13 @@ RSpec.configure do |config|
config.include SidekiqMiddleware
config.include StubActionCableConnection, type: :channel
config.include StubSpamServices
+ config.include SnowplowHelpers
config.include RenderedHelpers
config.include RSpec::Benchmark::Matchers, type: :benchmark
+ config.include DetailedErrorHelpers
include StubFeatureFlags
+ include StubSnowplow
if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
@@ -273,6 +276,9 @@ RSpec.configure do |config|
# (ie. ApplicationSetting#auto_devops_enabled)
stub_feature_flags(force_autodevops_on_by_default: false)
+ # The survey popover can block the diffs causing specs to fail
+ stub_feature_flags(mr_experience_survey: false)
+
# Merge request widget GraphQL requests are disabled in the tests
# for now whilst we migrate as much as we can over the GraphQL
# stub_feature_flags(merge_request_widget_graphql: false)
@@ -289,6 +295,10 @@ RSpec.configure do |config|
stub_feature_flags(ci_queueing_disaster_recovery_disable_fair_scheduling: false)
stub_feature_flags(ci_queueing_disaster_recovery_disable_quota: false)
+ # It's disabled in specs because we don't support certain features which
+ # cause spec failures.
+ stub_feature_flags(use_click_house_database_for_error_tracking: false)
+
enable_rugged = example.metadata[:enable_rugged].present?
# Disable Rugged features by default
@@ -296,10 +306,6 @@ RSpec.configure do |config|
stub_feature_flags(flag => enable_rugged)
end
- # Disable the usage of file_identifier_hash by default until it is ready
- # See https://gitlab.com/gitlab-org/gitlab/-/issues/33867
- stub_feature_flags(file_identifier_hash: false)
-
# Disable `main_branch_over_master` as we migrate
# from `master` to `main` accross our codebase.
# It's done in order to preserve the concistency in tests
@@ -320,6 +326,14 @@ RSpec.configure do |config|
# most cases. We do test the CAPTCHA flow in the appropriate specs.
stub_feature_flags(arkose_labs_login_challenge: false)
+ # Specs should not require email verification by default, this makes the sign-in flow simpler in
+ # most cases. We do test the email verification flow in the appropriate specs.
+ stub_feature_flags(require_email_verification: false)
+
+ # This feature flag is for selectively disabling by actor, therefore we don't enable it by default.
+ # See https://docs.gitlab.com/ee/development/feature_flags/#selectively-disable-by-actor
+ stub_feature_flags(legacy_merge_request_state_check_for_merged_result_pipelines: false)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
@@ -363,6 +377,9 @@ RSpec.configure do |config|
stub_application_setting(admin_mode: true) unless example.metadata[:do_not_mock_admin_mode_setting]
allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_return(false)
+
+ # Ensure that Snowplow is enabled by default unless forced to the opposite
+ stub_snowplow unless example.metadata[:do_not_stub_snowplow_by_default]
end
config.around(:example, :quarantine) do |example|
diff --git a/spec/support/finder_collection.rb b/spec/support/finder_collection.rb
new file mode 100644
index 00000000000..494dd4bdca1
--- /dev/null
+++ b/spec/support/finder_collection.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'set'
+
+module Support
+ # Ensure that finders' `execute` method always returns
+ # `ActiveRecord::Relation`.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/298771
+ module FinderCollection
+ def self.install_check(finder_class)
+ return unless check?(finder_class)
+
+ finder_class.prepend CheckResult
+ end
+
+ ALLOWLIST_YAML = File.join(__dir__, 'finder_collection_allowlist.yml')
+
+ def self.check?(finder_class)
+ @allowlist ||= YAML.load_file(ALLOWLIST_YAML).to_set
+
+ @allowlist.exclude?(finder_class.name)
+ end
+
+ module CheckResult
+ def execute(...)
+ result = super
+
+ unless result.is_a?(ActiveRecord::Relation)
+ raise <<~MESSAGE
+ #{self.class}#execute returned `#{result.class}` instead of `ActiveRecord::Relation`.
+ All finder classes are expected to return `ActiveRecord::Relation`.
+
+ Read more at https://docs.gitlab.com/ee/development/reusing_abstractions.html#finders
+ MESSAGE
+ end
+
+ result
+ end
+ end
+ end
+end
+
+RSpec.configure do |config|
+ config.before(:all, type: :finder) do
+ Support::FinderCollection.install_check(described_class)
+ end
+end
diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml
new file mode 100644
index 00000000000..8f09153afec
--- /dev/null
+++ b/spec/support/finder_collection_allowlist.yml
@@ -0,0 +1,66 @@
+# Allow list for spec/support/finder_collection.rb
+
+# Permenant excludes
+# For example:
+# FooFinder # Reason: It uses a memory backend
+
+# Temporary excludes (aka TODOs)
+# For example:
+# BarFinder # See <ISSUE_URL>
+- AccessRequestsFinder
+- Admin::PlansFinder
+- Analytics::CycleAnalytics::StageFinder
+- ApplicationsFinder
+- Autocomplete::GroupFinder
+- Autocomplete::ProjectFinder
+- Autocomplete::UsersFinder
+- BilledUsersFinder
+- Boards::BoardsFinder
+- Boards::VisitsFinder
+- BranchesFinder
+- Ci::AuthJobFinder
+- Ci::CommitStatusesFinder
+- Ci::DailyBuildGroupReportResultsFinder
+- ClusterAncestorsFinder
+- Clusters::AgentAuthorizationsFinder
+- Clusters::KubernetesNamespaceFinder
+- ComplianceManagement::MergeRequests::ComplianceViolationsFinder
+- ContainerRepositoriesFinder
+- ContextCommitsFinder
+- Environments::EnvironmentNamesFinder
+- Environments::EnvironmentsByDeploymentsFinder
+- EventsFinder
+- GroupDescendantsFinder
+- Groups::ProjectsRequiringAuthorizationsRefresh::OnDirectMembershipFinder
+- Groups::ProjectsRequiringAuthorizationsRefresh::OnTransferFinder
+- KeysFinder
+- LfsPointersFinder
+- LicenseTemplateFinder
+- MergeRequests::OldestPerCommitFinder
+- NotesFinder
+- Packages::BuildInfosFinder
+- Packages::Conan::PackageFileFinder
+- Packages::Go::ModuleFinder
+- Packages::Go::PackageFinder
+- Packages::Go::VersionFinder
+- Packages::PackageFileFinder
+- Packages::PackageFinder
+- Packages::Pypi::PackageFinder
+- Projects::Integrations::Jira::ByIdsFinder
+- Projects::Integrations::Jira::IssuesFinder
+- Releases::EvidencePipelineFinder
+- Repositories::BranchNamesFinder
+- Repositories::ChangelogTagFinder
+- Repositories::TreeFinder
+- Security::FindingsFinder
+- Security::PipelineVulnerabilitiesFinder
+- Security::ScanExecutionPoliciesFinder
+- Security::TrainingProviders::BaseUrlFinder
+- Security::TrainingUrlsFinder
+- SentryIssueFinder
+- ServerlessDomainFinder
+- TagsFinder
+- TemplateFinder
+- UploaderFinder
+- UserGroupNotificationSettingsFinder
+- UserGroupsCounter
diff --git a/spec/support/gitlab_experiment.rb b/spec/support/gitlab_experiment.rb
index 823aab0436e..4236091ca6c 100644
--- a/spec/support/gitlab_experiment.rb
+++ b/spec/support/gitlab_experiment.rb
@@ -2,7 +2,6 @@
# Require the provided spec helper and matchers.
require 'gitlab/experiment/rspec'
-require_relative 'stub_snowplow'
RSpec.configure do |config|
config.include StubSnowplow, :experiment
diff --git a/spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml b/spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml
new file mode 100644
index 00000000000..583d44c452e
--- /dev/null
+++ b/spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml
@@ -0,0 +1,10 @@
+dast:
+ stage: dast
+ image:
+ name: "$SECURE_ANALYZERS_PREFIX/dast:$DAST_VERSION"
+ variables:
+ GIT_STRATEGY: none
+ allow_failure: true
+ dast_configuration:
+ site_profile: "site_profile_name_included"
+ scanner_profile: "scanner_profile_name_included" \ No newline at end of file
diff --git a/spec/support/graphql/arguments.rb b/spec/support/graphql/arguments.rb
index a5bb01c31a3..478a460a0f6 100644
--- a/spec/support/graphql/arguments.rb
+++ b/spec/support/graphql/arguments.rb
@@ -5,7 +5,7 @@ module Graphql
delegate :blank?, :empty?, to: :to_h
def initialize(values)
- @values = values.compact
+ @values = values
end
def to_h
@@ -42,7 +42,7 @@ module Graphql
when Integer, Float, Symbol then value.to_s
when String, GlobalID then "\"#{value.to_s.gsub(/"/, '\\"')}\""
when Time, Date then "\"#{value.iso8601}\""
- when nil then 'null'
+ when NilClass then 'null'
when true then 'true'
when false then 'false'
else
diff --git a/spec/support/helpers/database/database_helpers.rb b/spec/support/helpers/database/database_helpers.rb
index db093bcef85..f3b2a2a6147 100644
--- a/spec/support/helpers/database/database_helpers.rb
+++ b/spec/support/helpers/database/database_helpers.rb
@@ -4,8 +4,10 @@ module Database
module DatabaseHelpers
# In order to directly work with views using factories,
# we can swapout the view for a table of identical structure.
- def swapout_view_for_table(view)
- ActiveRecord::Base.connection.execute(<<~SQL.squish)
+ def swapout_view_for_table(view, connection: nil)
+ connection ||= ActiveRecord::Base.connection
+
+ connection.execute(<<~SQL.squish)
CREATE TABLE #{view}_copy (LIKE #{view});
DROP VIEW #{view};
ALTER TABLE #{view}_copy RENAME TO #{view};
diff --git a/spec/support/helpers/detailed_error_helpers.rb b/spec/support/helpers/detailed_error_helpers.rb
new file mode 100644
index 00000000000..2da53a6bffd
--- /dev/null
+++ b/spec/support/helpers/detailed_error_helpers.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'google/rpc/status_pb'
+require 'google/protobuf/well_known_types'
+
+module DetailedErrorHelpers
+ def new_detailed_error(error_code, error_message, details)
+ status_error = Google::Rpc::Status.new(
+ code: error_code,
+ message: error_message,
+ details: [Google::Protobuf::Any.pack(details)]
+ )
+
+ GRPC::BadStatus.new(
+ error_code,
+ error_message,
+ { "grpc-status-details-bin" => Google::Rpc::Status.encode(status_error) })
+ end
+end
diff --git a/spec/support/helpers/features/invite_members_modal_helper.rb b/spec/support/helpers/features/invite_members_modal_helper.rb
index 7ed64615020..b56ac5b32c6 100644
--- a/spec/support/helpers/features/invite_members_modal_helper.rb
+++ b/spec/support/helpers/features/invite_members_modal_helper.rb
@@ -9,18 +9,28 @@ module Spec
click_on 'Invite members'
page.within invite_modal_selector do
- Array.wrap(names).each do |name|
- find(member_dropdown_selector).set(name)
+ select_members(names)
+ choose_options(role, expires_at)
+ click_button 'Invite'
+ end
- wait_for_requests
- click_button name
- end
+ page.refresh if refresh
+ end
- choose_options(role, expires_at)
+ def input_invites(names)
+ click_on 'Invite members'
- click_button 'Invite'
+ page.within invite_modal_selector do
+ select_members(names)
+ end
+ end
+
+ def select_members(names)
+ Array.wrap(names).each do |name|
+ find(member_dropdown_selector).set(name)
- page.refresh if refresh
+ wait_for_requests
+ click_button name
end
end
@@ -64,6 +74,24 @@ module Spec
'[data-testid="invite-modal"]'
end
+ def member_token_error_selector(id)
+ "[data-testid='error-icon-#{id}']"
+ end
+
+ def member_token_avatar_selector
+ "[data-testid='token-avatar']"
+ end
+
+ def member_token_selector(id)
+ "[data-token-id='#{id}']"
+ end
+
+ def remove_token(id)
+ page.within member_token_selector(id) do
+ find('[data-testid="close-icon"]').click
+ end
+ end
+
def expect_to_have_group(group)
expect(page).to have_selector("[entity-id='#{group.id}']")
end
diff --git a/spec/support/helpers/features/source_editor_spec_helpers.rb b/spec/support/helpers/features/source_editor_spec_helpers.rb
index 57057b47fbb..cdc59f9cbe1 100644
--- a/spec/support/helpers/features/source_editor_spec_helpers.rb
+++ b/spec/support/helpers/features/source_editor_spec_helpers.rb
@@ -15,13 +15,6 @@ module Spec
execute_script("monaco.editor.getModel('#{uri}').setValue('#{escape_javascript(value)}')")
end
-
- def editor_get_value
- editor = find('.monaco-editor')
- uri = editor['data-uri']
-
- evaluate_script("monaco.editor.getModel('#{uri}').getValue()")
- end
end
end
end
diff --git a/spec/support/helpers/features/web_ide_spec_helpers.rb b/spec/support/helpers/features/web_ide_spec_helpers.rb
index 358bfacce05..70dedc3ac50 100644
--- a/spec/support/helpers/features/web_ide_spec_helpers.rb
+++ b/spec/support/helpers/features/web_ide_spec_helpers.rb
@@ -12,7 +12,7 @@
# ide_commit
#
module WebIdeSpecHelpers
- include ActionView::Helpers::JavaScriptHelper
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
def ide_visit(project)
visit project_path(project)
@@ -97,17 +97,7 @@ module WebIdeSpecHelpers
end
def ide_set_editor_value(value)
- editor = find('.monaco-editor')
- uri = editor['data-uri']
-
- execute_script("monaco.editor.getModel('#{uri}').setValue('#{escape_javascript(value)}')")
- end
-
- def ide_editor_value
- editor = find('.monaco-editor')
- uri = editor['data-uri']
-
- evaluate_script("monaco.editor.getModel('#{uri}').getValue()")
+ editor_set_value(value)
end
def ide_commit_tab_selector
diff --git a/spec/support/helpers/harbor_helper.rb b/spec/support/helpers/harbor_helper.rb
new file mode 100644
index 00000000000..3f13710ede6
--- /dev/null
+++ b/spec/support/helpers/harbor_helper.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module HarborHelper
+ def harbor_repository_url(container, *args)
+ if container.is_a?(Project)
+ project_harbor_repositories_path(container, *args)
+ else
+ group_harbor_repositories_path(container, *args)
+ end
+ end
+
+ def harbor_artifact_url(container, *args)
+ if container.is_a?(Project)
+ project_harbor_repository_artifacts_path(container, *args)
+ else
+ group_harbor_repository_artifacts_path(container, *args)
+ end
+ end
+
+ def harbor_tag_url(container, *args)
+ if container.is_a?(Project)
+ project_harbor_repository_artifact_tags_path(container, *args)
+ else
+ group_harbor_repository_artifact_tags_path(container, *args)
+ end
+ end
+end
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 29064f01913..dd210f02ae7 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -126,24 +126,6 @@ module KubernetesHelpers
WebMock.stub_request(:get, pod_url).to_return(response || kube_pod_response)
end
- def stub_kubeclient_logs(pod_name, namespace, container: nil, status: nil, message: nil)
- stub_kubeclient_discover(service.api_url)
-
- if container
- container_query_param = "container=#{container}&"
- end
-
- logs_url = service.api_url + "/api/v1/namespaces/#{namespace}/pods/#{pod_name}" \
- "/log?#{container_query_param}tailLines=#{::PodLogs::KubernetesService::LOGS_LIMIT}&timestamps=true"
-
- if status
- response = { status: status }
- response[:body] = { message: message }.to_json if message
- end
-
- WebMock.stub_request(:get, logs_url).to_return(response || kube_logs_response)
- end
-
def stub_kubeclient_deployments(namespace, status: nil)
stub_kubeclient_discover(service.api_url)
deployments_url = service.api_url + "/apis/apps/v1/namespaces/#{namespace}/deployments"
diff --git a/spec/support/helpers/project_forks_helper.rb b/spec/support/helpers/project_forks_helper.rb
index 84b5dbc1d23..1504625bb00 100644
--- a/spec/support/helpers/project_forks_helper.rb
+++ b/spec/support/helpers/project_forks_helper.rb
@@ -70,12 +70,9 @@ module ProjectForksHelper
def fork_project_with_submodules(project, user = nil, params = {})
Gitlab::GitalyClient.allow_n_plus_1_calls do
forked_project = fork_project_direct(project, user, params)
- TestEnv.copy_repo(
- forked_project,
- bare_repo: TestEnv.forked_repo_path_bare,
- refs: TestEnv::FORKED_BRANCH_SHA
- )
- forked_project.repository.expire_content_cache
+ repo = Gitlab::GlRepository::PROJECT.repository_for(forked_project)
+ repo.create_from_bundle(TestEnv.forked_repo_bundle_path)
+ repo.expire_content_cache
forked_project
end
diff --git a/spec/support/helpers/project_helpers.rb b/spec/support/helpers/project_helpers.rb
index ef8947ab340..2427ed2bcc9 100644
--- a/spec/support/helpers/project_helpers.rb
+++ b/spec/support/helpers/project_helpers.rb
@@ -13,7 +13,7 @@ module ProjectHelpers
when :admin
create(:user, :admin, name: 'admin')
else
- create(:user, name: membership).tap { |u| target.add_user(u, membership) }
+ create(:user, name: membership).tap { |u| target.add_member(u, membership) }
end
end
diff --git a/spec/support/helpers/prometheus_helpers.rb b/spec/support/helpers/prometheus_helpers.rb
index d49abbf3f19..e1f5e6dee14 100644
--- a/spec/support/helpers/prometheus_helpers.rb
+++ b/spec/support/helpers/prometheus_helpers.rb
@@ -267,6 +267,13 @@ module PrometheusHelpers
}
end
+ def prometheus_alert_payload_fingerprint(prometheus_alert)
+ # timestamp is hard-coded in #prometheus_map_alert_payload
+ fingerprint = "#{prometheus_alert.prometheus_metric_id}/2018-09-24T08:57:31.095725221Z"
+
+ Gitlab::AlertManagement::Fingerprint.generate(fingerprint)
+ end
+
private
def prometheus_map_alert_payload(status, alert)
diff --git a/spec/support/helpers/repo_helpers.rb b/spec/support/helpers/repo_helpers.rb
index f275be39dc4..e76a1dd5a74 100644
--- a/spec/support/helpers/repo_helpers.rb
+++ b/spec/support/helpers/repo_helpers.rb
@@ -137,80 +137,4 @@ eos
file_content: content
).execute
end
-
- def commit_options(repo, index, target, ref, message)
- options = {}
- options[:tree] = index.write_tree(repo)
- options[:author] = {
- email: "test@example.com",
- name: "Test Author",
- time: Time.gm(2014, "mar", 3, 20, 15, 1)
- }
- options[:committer] = {
- email: "test@example.com",
- name: "Test Author",
- time: Time.gm(2014, "mar", 3, 20, 15, 1)
- }
- options[:message] ||= message
- options[:parents] = repo.empty? ? [] : [target].compact
- options[:update_ref] = ref
-
- options
- end
-
- # Writes a new commit to the repo and returns a Rugged::Commit. Replaces the
- # contents of CHANGELOG with a single new line of text.
- def new_commit_edit_old_file(repo)
- oid = repo.write("I replaced the changelog with this text", :blob)
- index = repo.index
- index.read_tree(repo.head.target.tree)
- index.add(path: "CHANGELOG", oid: oid, mode: 0100644)
-
- options = commit_options(
- repo,
- index,
- repo.head.target,
- "HEAD",
- "Edit CHANGELOG in its original location"
- )
-
- sha = Rugged::Commit.create(repo, options)
- repo.lookup(sha)
- end
-
- # Writes a new commit to the repo and returns a Rugged::Commit. Replaces the
- # contents of the specified file_path with new text.
- def new_commit_edit_new_file(repo, file_path, commit_message, text, branch = repo.head)
- oid = repo.write(text, :blob)
- index = repo.index
- index.read_tree(branch.target.tree)
- index.add(path: file_path, oid: oid, mode: 0100644)
- options = commit_options(repo, index, branch.target, branch.canonical_name, commit_message)
- sha = Rugged::Commit.create(repo, options)
- repo.lookup(sha)
- end
-
- # Writes a new commit to the repo and returns a Rugged::Commit. Replaces the
- # contents of encoding/CHANGELOG with new text.
- def new_commit_edit_new_file_on_branch(repo, file_path, branch_name, commit_message, text)
- branch = repo.branches[branch_name]
- new_commit_edit_new_file(repo, file_path, commit_message, text, branch)
- end
-
- # Writes a new commit to the repo and returns a Rugged::Commit. Moves the
- # CHANGELOG file to the encoding/ directory.
- def new_commit_move_file(repo)
- blob_oid = repo.head.target.tree.detect { |i| i[:name] == "CHANGELOG" }[:oid]
- file_content = repo.lookup(blob_oid).content
- oid = repo.write(file_content, :blob)
- index = repo.index
- index.read_tree(repo.head.target.tree)
- index.add(path: "encoding/CHANGELOG", oid: oid, mode: 0100644)
- index.remove("CHANGELOG")
-
- options = commit_options(repo, index, repo.head.target, "HEAD", "Move CHANGELOG to encoding/")
-
- sha = Rugged::Commit.create(repo, options)
- repo.lookup(sha)
- end
end
diff --git a/spec/support/helpers/seed_helper.rb b/spec/support/helpers/seed_helper.rb
index f65993efa05..59723583cbc 100644
--- a/spec/support/helpers/seed_helper.rb
+++ b/spec/support/helpers/seed_helper.rb
@@ -9,7 +9,6 @@ TEST_REPO_PATH = 'gitlab-git-test.git'
TEST_NORMAL_REPO_PATH = 'not-bare-repo.git'
TEST_MUTABLE_REPO_PATH = 'mutable-repo.git'
TEST_BROKEN_REPO_PATH = 'broken-repo.git'
-TEST_GITATTRIBUTES_REPO_PATH = 'with-git-attributes.git'
module SeedHelper
GITLAB_GIT_TEST_REPO_URL = File.expand_path('../gitlab-git-test.git', __dir__)
@@ -25,8 +24,6 @@ module SeedHelper
create_normal_seeds
create_mutable_seeds
create_broken_seeds
- create_git_attributes
- create_invalid_git_attributes
end
def create_bare_seeds
@@ -67,48 +64,4 @@ module SeedHelper
FileUtils.rm_r(refs_path)
end
-
- def create_git_attributes
- system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{TEST_REPO_PATH} #{TEST_GITATTRIBUTES_REPO_PATH}),
- chdir: SEED_STORAGE_PATH,
- out: '/dev/null',
- err: '/dev/null')
-
- dir = File.join(SEED_STORAGE_PATH, 'with-git-attributes.git', 'info')
-
- FileUtils.mkdir_p(dir)
-
- File.open(File.join(dir, 'attributes'), 'w') do |handle|
- handle.write <<-EOF.strip
-# This is a comment, it should be ignored.
-
-*.txt text
-*.jpg -text
-*.sh eol=lf gitlab-language=shell
-*.haml.* gitlab-language=haml
-foo/bar.* foo
-*.cgi key=value?p1=v1&p2=v2
-/*.png gitlab-language=png
-*.binary binary
-/custom-highlighting/*.gitlab-custom gitlab-language=ruby
-/custom-highlighting/*.gitlab-cgi gitlab-language=erb?parent=json
-
-# This uses a tab instead of spaces to ensure the parser also supports this.
-*.md\tgitlab-language=markdown
-bla/bla.txt
- EOF
- end
- end
-
- def create_invalid_git_attributes
- dir = File.join(SEED_STORAGE_PATH, 'with-invalid-git-attributes.git', 'info')
-
- FileUtils.mkdir_p(dir)
-
- enc = Encoding::UTF_16
-
- File.open(File.join(dir, 'attributes'), 'w', encoding: enc) do |handle|
- handle.write('# hello'.encode(enc))
- end
- end
end
diff --git a/spec/support/stub_snowplow.rb b/spec/support/helpers/stub_snowplow.rb
index c6e3b40972f..85c605efea3 100644
--- a/spec/support/stub_snowplow.rb
+++ b/spec/support/helpers/stub_snowplow.rb
@@ -13,7 +13,7 @@ module StubSnowplow
.and_return(SnowplowTracker::Emitter.new(host, buffer_size: buffer_size))
# rubocop:enable RSpec/AnyInstanceOf
- stub_application_setting(snowplow_enabled: true)
+ stub_application_setting(snowplow_enabled: true, snowplow_collector_hostname: host)
allow(SnowplowTracker::SelfDescribingJson).to receive(:new).and_call_original
allow(Gitlab::Tracking).to receive(:event).and_call_original # rubocop:disable RSpec/ExpectGitlabTracking
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 7c865dd7e11..03e9ad1a08e 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -82,7 +82,13 @@ module TestEnv
'trailers' => 'f0a5ed6',
'add_commit_with_5mb_subject' => '8cf8e80',
'blame-on-renamed' => '32c33da',
- 'with-executables' => '6b8dc4a'
+ 'with-executables' => '6b8dc4a',
+ 'spooky-stuff' => 'ba3343b',
+ 'few-commits' => '0031876',
+ 'two-commits' => '304d257',
+ 'utf-16' => 'f05a987',
+ 'gitaly-rename-test' => '94bb47c',
+ 'smime-signed-commits' => 'ed775cc'
}.freeze
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
@@ -259,43 +265,35 @@ module TestEnv
# Create repository for FactoryBot.create(:project)
def setup_factory_repo
- setup_repo(factory_repo_path, factory_repo_path_bare, factory_repo_name, BRANCH_SHA)
+ setup_repo(factory_repo_path, factory_repo_bundle_path, factory_repo_name, BRANCH_SHA)
end
# Create repository for FactoryBot.create(:forked_project_with_submodules)
# This repo has a submodule commit that is not present in the main test
# repository.
def setup_forked_repo
- setup_repo(forked_repo_path, forked_repo_path_bare, forked_repo_name, FORKED_BRANCH_SHA)
+ setup_repo(forked_repo_path, forked_repo_bundle_path, forked_repo_name, FORKED_BRANCH_SHA)
end
- def setup_repo(repo_path, repo_path_bare, repo_name, refs)
+ def setup_repo(repo_path, repo_bundle_path, repo_name, refs)
clone_url = "https://gitlab.com/gitlab-org/#{repo_name}.git"
unless File.directory?(repo_path)
start = Time.now
system(*%W(#{Gitlab.config.git.bin_path} clone --quiet -- #{clone_url} #{repo_path}))
+ system(*%W(#{Gitlab.config.git.bin_path} -C #{repo_path} remote remove origin))
puts "==> #{repo_path} set up in #{Time.now - start} seconds...\n"
end
set_repo_refs(repo_path, refs)
- unless File.directory?(repo_path_bare)
+ unless File.file?(repo_bundle_path)
start = Time.now
- # We must copy bare repositories because we will push to them.
- system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --quiet --bare -- #{repo_path} #{repo_path_bare}))
- puts "==> #{repo_path_bare} set up in #{Time.now - start} seconds...\n"
+ system(git_env, *%W(#{Gitlab.config.git.bin_path} -C #{repo_path} bundle create #{repo_bundle_path} --all))
+ puts "==> #{repo_bundle_path} generated in #{Time.now - start} seconds...\n"
end
end
- def copy_repo(subject, bare_repo:, refs:)
- target_repo_path = File.expand_path(repos_path + "/#{subject.disk_path}.git")
-
- FileUtils.mkdir_p(target_repo_path)
- FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path)
- FileUtils.chmod_R 0755, target_repo_path
- end
-
def rm_storage_dir(storage, dir)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
target_repo_refs_path = File.join(GitalySetup.repos_path(storage), dir)
@@ -310,14 +308,6 @@ module TestEnv
end
end
- def create_bare_repository(path)
- FileUtils.mkdir_p(path)
-
- system(git_env, *%W(#{Gitlab.config.git.bin_path} -C #{path} init --bare),
- out: '/dev/null',
- err: '/dev/null')
- end
-
def repos_path
@repos_path ||= GitalySetup.repos_path
end
@@ -357,20 +347,12 @@ module TestEnv
Capybara.current_session.visit '/'
end
- def factory_repo_path_bare
- "#{factory_repo_path}_bare"
- end
-
- def forked_repo_path_bare
- "#{forked_repo_path}_bare"
+ def factory_repo_bundle_path
+ "#{factory_repo_path}.bundle"
end
- def with_empty_bare_repository(name = nil)
- path = Rails.root.join('tmp/tests', name || 'empty-bare-repository').to_s
-
- yield(Rugged::Repository.init_at(path, :bare))
- ensure
- FileUtils.rm_rf(path)
+ def forked_repo_bundle_path
+ "#{forked_repo_path}.bundle"
end
def seed_db
@@ -386,9 +368,9 @@ module TestEnv
gitaly
gitlab-shell
gitlab-test
- gitlab-test_bare
+ gitlab-test.bundle
gitlab-test-fork
- gitlab-test-fork_bare
+ gitlab-test-fork.bundle
gitlab-workhorse
gitlab_workhorse_secret
]
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index 50d1b14cf56..2a9144614d0 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -53,7 +53,6 @@ module UsageDataHelpers
clusters_platforms_eks
clusters_platforms_gke
clusters_platforms_user
- clusters_integrations_elastic_stack
clusters_integrations_prometheus
clusters_management_project
in_review_folder
@@ -91,7 +90,6 @@ module UsageDataHelpers
projects_with_repositories_enabled
projects_with_error_tracking_enabled
projects_with_enabled_alert_integrations
- projects_with_tracing_enabled
projects_with_expiration_policy_enabled
projects_with_expiration_policy_disabled
projects_with_expiration_policy_enabled_with_keep_n_unset
diff --git a/spec/support/matchers/background_migrations_matchers.rb b/spec/support/matchers/background_migrations_matchers.rb
index c5b3e140585..9f39f576b95 100644
--- a/spec/support/matchers/background_migrations_matchers.rb
+++ b/spec/support/matchers/background_migrations_matchers.rb
@@ -74,6 +74,13 @@ RSpec::Matchers.define :have_scheduled_batched_migration do |gitlab_schema: :git
.for_configuration(gitlab_schema, migration, table_name, column_name, job_arguments)
expect(batched_migrations.count).to be(1)
+
+ # the :batch_min_value & :batch_max_value attribute argument values get applied to the
+ # :min_value & :max_value columns on the database. Here we change the attribute names
+ # for the rspec have_attributes matcher used below to pass
+ attributes[:min_value] = attributes.delete :batch_min_value if attributes.include?(:batch_min_value)
+ attributes[:max_value] = attributes.delete :batch_max_value if attributes.include?(:batch_max_value)
+
expect(batched_migrations).to all(have_attributes(attributes)) if attributes.present?
end
diff --git a/spec/support/matchers/event_store.rb b/spec/support/matchers/event_store.rb
index eb5b37f39e5..14f6a42d7f4 100644
--- a/spec/support/matchers/event_store.rb
+++ b/spec/support/matchers/event_store.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec::Matchers.define :publish_event do |expected_event_class|
+ include RSpec::Matchers::Composable
+
supports_block_expectations
match do |proc|
@@ -15,10 +17,17 @@ RSpec::Matchers.define :publish_event do |expected_event_class|
proc.call
@events.any? do |event|
- event.instance_of?(expected_event_class) && event.data == @expected_data
+ event.instance_of?(expected_event_class) && match_data?(event.data, @expected_data)
end
end
+ def match_data?(actual, expected)
+ values_match?(actual.keys, expected.keys) &&
+ actual.keys.each do |key|
+ values_match?(actual[key], expected[key])
+ end
+ end
+
chain :with do |expected_data|
@expected_data = expected_data
end
diff --git a/spec/support/matchers/match_file.rb b/spec/support/matchers/match_file.rb
index 4e522b52912..0c1f95d45f5 100644
--- a/spec/support/matchers/match_file.rb
+++ b/spec/support/matchers/match_file.rb
@@ -2,6 +2,6 @@
RSpec::Matchers.define :match_file do |expected|
match do |actual|
- expect(Digest::MD5.hexdigest(actual)).to eq(Digest::MD5.hexdigest(File.read(expected)))
+ expect(Digest::SHA256.hexdigest(actual)).to eq(Digest::SHA256.hexdigest(File.read(expected)))
end
end
diff --git a/spec/support/services/issuable_import_csv_service_shared_examples.rb b/spec/support/services/issuable_import_csv_service_shared_examples.rb
index 07118198969..0dea6cfb729 100644
--- a/spec/support/services/issuable_import_csv_service_shared_examples.rb
+++ b/spec/support/services/issuable_import_csv_service_shared_examples.rb
@@ -37,6 +37,10 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
end
describe '#execute' do
+ before do
+ project.add_developer(user)
+ end
+
context 'invalid file extension' do
let(:file) { fixture_file_upload('spec/fixtures/banana_sample.gif') }
diff --git a/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb b/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
index 8635c9a8ff9..b31fe9ee0d1 100644
--- a/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
+++ b/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
@@ -14,6 +14,8 @@ RSpec.shared_context 'Ldap::OmniauthCallbacksController' do
{ main: ldap_config_defaults(:main) }
end
+ let(:multiple_ldap_servers_license_available) { true }
+
def ldap_config_defaults(key, hash = {})
{
provider_name: "ldap#{key}",
@@ -23,6 +25,7 @@ RSpec.shared_context 'Ldap::OmniauthCallbacksController' do
end
before do
+ stub_licensed_features(multiple_ldap_servers: multiple_ldap_servers_license_available)
stub_ldap_setting(ldap_settings)
described_class.define_providers!
Rails.application.reload_routes!
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index aa8bc6fa79f..a3c688bb69e 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -16,9 +16,6 @@ RSpec.shared_context 'structured_logger' do
"created_at" => created_at.to_f,
"enqueued_at" => created_at.to_f,
"correlation_id" => 'cid',
- "error_message" => "wrong number of arguments (2 for 3)",
- "error_class" => "ArgumentError",
- "error_backtrace" => [],
"exception.message" => "wrong number of arguments (2 for 3)",
"exception.class" => "ArgumentError",
"exception.backtrace" => []
@@ -32,7 +29,6 @@ RSpec.shared_context 'structured_logger' do
let(:clock_thread_cputime_end) { 1.333333799 }
let(:start_payload) do
job.except(
- 'error_message', 'error_class', 'error_backtrace',
'exception.backtrace', 'exception.class', 'exception.message'
).merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
@@ -73,9 +69,6 @@ RSpec.shared_context 'structured_logger' do
end_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
'job_status' => 'fail',
- 'error_class' => 'ArgumentError',
- 'error_message' => 'Something went wrong',
- 'error_backtrace' => be_a(Array).and(be_present),
'exception.class' => 'ArgumentError',
'exception.message' => 'Something went wrong',
'exception.backtrace' => be_a(Array).and(be_present)
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
index 0d992f33c61..449db59e35d 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -10,6 +10,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
let(:gitaly_seconds_metric) { double('gitaly seconds metric') }
let(:failed_total_metric) { double('failed total metric') }
let(:retried_total_metric) { double('retried total metric') }
+ let(:interrupted_total_metric) { double('interrupted total metric') }
let(:redis_requests_total) { double('redis calls total metric') }
let(:running_jobs_metric) { double('running jobs metric') }
let(:redis_seconds_metric) { double('redis seconds metric') }
@@ -30,6 +31,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_elasticsearch_requests_duration_seconds, anything, anything, anything).and_return(elasticsearch_seconds_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_interrupted_total, anything).and_return(interrupted_total_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_redis_requests_total, anything).and_return(redis_requests_total)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_elasticsearch_requests_total, anything).and_return(elasticsearch_requests_total)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
diff --git a/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb b/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
index de52b58982e..a90fe9e1723 100644
--- a/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
+++ b/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
@@ -5,12 +5,12 @@ require 'spec_helper'
# See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
# for documentation on this spec.
# rubocop:disable Layout/LineLength
-RSpec.shared_context 'with API::Markdown Snapshot shared context' do |glfm_specification_dir, glfm_example_snapshots_dir|
+RSpec.shared_context 'with API::Markdown Snapshot shared context' do |glfm_specification_dir|
# rubocop:enable Layout/LineLength
include ApiHelpers
markdown_examples, html_examples = %w[markdown.yml html.yml].map do |file_name|
- yaml = File.read("#{glfm_example_snapshots_dir}/#{file_name}")
+ yaml = File.read("#{glfm_specification_dir}/example_snapshots/#{file_name}")
YAML.safe_load(yaml, symbolize_names: true, aliases: true)
end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index d277a45584d..6c2ed79b343 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -83,8 +83,6 @@ RSpec.shared_context 'project navbar structure' do
nav_item: _('Monitor'),
nav_sub_items: [
_('Metrics'),
- _('Logs'),
- _('Tracing'),
_('Error Tracking'),
_('Alerts'),
_('Incidents'),
@@ -112,6 +110,7 @@ RSpec.shared_context 'project navbar structure' do
_('Access Tokens'),
_('Repository'),
_('CI/CD'),
+ _('Packages & Registries'),
_('Monitor'),
s_('UsageQuota|Usage Quotas')
]
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 483bca07ba6..eec6e92c5fe 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -31,6 +31,7 @@ RSpec.shared_context 'GroupPolicy context' do
admin_milestone
admin_issue_board
read_container_image
+ read_harbor_registry
read_metrics_dashboard_annotation
read_prometheus
read_crm_contact
diff --git a/spec/support/shared_contexts/policies/project_policy_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
index 7396643823c..789b385c435 100644
--- a/spec/support/shared_contexts/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
@@ -12,6 +12,7 @@ RSpec.shared_context 'ProjectPolicy context' do
let_it_be_with_refind(:private_project) { create(:project, :private, namespace: owner.namespace) }
let_it_be_with_refind(:internal_project) { create(:project, :internal, namespace: owner.namespace) }
let_it_be_with_refind(:public_project) { create(:project, :public, namespace: owner.namespace) }
+ let_it_be_with_refind(:public_project_in_group) { create(:project, :public, namespace: create(:group, :public)) }
let(:base_guest_permissions) do
%i[
@@ -29,7 +30,7 @@ RSpec.shared_context 'ProjectPolicy context' do
create_snippet create_incident daily_statistics create_merge_request_in download_code
download_wiki_code fork_project metrics_dashboard read_build
read_commit_status read_confidential_issues read_container_image
- read_deployment read_environment read_merge_request
+ read_harbor_registry read_deployment read_environment read_merge_request
read_metrics_dashboard_annotation read_pipeline read_prometheus
read_sentry_issue update_issue create_merge_request_in
]
@@ -93,7 +94,7 @@ RSpec.shared_context 'ProjectPolicy context' do
let(:owner_permissions) { base_owner_permissions + additional_owner_permissions }
before_all do
- [private_project, internal_project, public_project].each do |project|
+ [private_project, internal_project, public_project, public_project_in_group].each do |project|
project.add_guest(guest)
project.add_reporter(reporter)
project.add_developer(developer)
diff --git a/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
new file mode 100644
index 00000000000..98fc52add51
--- /dev/null
+++ b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+#
+# Requires a context containing:
+# - subject
+# - project
+# - feature_flag_name
+# - category
+# - action
+# - namespace
+# - user
+
+shared_examples 'Snowplow event tracking' do
+ let(:label) { nil }
+
+ it 'is not emitted if FF is disabled' do
+ stub_feature_flags(feature_flag_name => false)
+
+ subject
+
+ expect_no_snowplow_event
+ end
+
+ it 'is emitted' do
+ params = {
+ category: category,
+ action: action,
+ namespace: namespace,
+ user: user,
+ project: project,
+ label: label
+ }.compact
+
+ subject
+
+ expect_snowplow_event(**params)
+ end
+end
diff --git a/spec/support/shared_examples/csp.rb b/spec/support/shared_examples/csp.rb
index 9143d0f4720..91242ae9f37 100644
--- a/spec/support/shared_examples/csp.rb
+++ b/spec/support/shared_examples/csp.rb
@@ -15,64 +15,66 @@ RSpec.shared_examples 'setting CSP' do |rule_name|
end
end
- context 'when no CSP config' do
- include_context 'csp config', nil
+ context 'csp config and feature toggle', :do_not_stub_snowplow_by_default do
+ context 'when no CSP config' do
+ include_context 'csp config', nil
- it 'does not add CSP directives' do
- is_expected.to be_blank
+ it 'does not add CSP directives' do
+ is_expected.to be_blank
+ end
end
- end
- describe "when a CSP config exists for #{rule_name}" do
- include_context 'csp config', rule_name.parameterize.underscore.to_sym
+ describe "when a CSP config exists for #{rule_name}" do
+ include_context 'csp config', rule_name.parameterize.underscore.to_sym
- context 'when feature is enabled' do
- it "appends to #{rule_name}" do
- is_expected.to eql("#{rule_name} #{default_csp_values} #{allowlisted_url}")
+ context 'when feature is enabled' do
+ it "appends to #{rule_name}" do
+ is_expected.to eql("#{rule_name} #{default_csp_values} #{allowlisted_url}")
+ end
end
- end
- context 'when feature is disabled' do
- include_context 'disable feature'
+ context 'when feature is disabled' do
+ include_context 'disable feature'
- it "keeps original #{rule_name}" do
- is_expected.to eql("#{rule_name} #{default_csp_values}")
+ it "keeps original #{rule_name}" do
+ is_expected.to eql("#{rule_name} #{default_csp_values}")
+ end
end
end
- end
- describe "when a CSP config exists for default-src but not #{rule_name}" do
- include_context 'csp config', :default_src
+ describe "when a CSP config exists for default-src but not #{rule_name}" do
+ include_context 'csp config', :default_src
- context 'when feature is enabled' do
- it "uses default-src values in #{rule_name}" do
- is_expected.to eql("default-src #{default_csp_values}; #{rule_name} #{default_csp_values} #{allowlisted_url}")
+ context 'when feature is enabled' do
+ it "uses default-src values in #{rule_name}" do
+ is_expected.to eql("default-src #{default_csp_values}; #{rule_name} #{default_csp_values} #{allowlisted_url}")
+ end
end
- end
- context 'when feature is disabled' do
- include_context 'disable feature'
+ context 'when feature is disabled' do
+ include_context 'disable feature'
- it "does not add #{rule_name}" do
- is_expected.to eql("default-src #{default_csp_values}")
+ it "does not add #{rule_name}" do
+ is_expected.to eql("default-src #{default_csp_values}")
+ end
end
end
- end
- describe "when a CSP config exists for font-src but not #{rule_name}" do
- include_context 'csp config', :font_src
+ describe "when a CSP config exists for font-src but not #{rule_name}" do
+ include_context 'csp config', :font_src
- context 'when feature is enabled' do
- it "uses default-src values in #{rule_name}" do
- is_expected.to eql("font-src #{default_csp_values}; #{rule_name} #{allowlisted_url}")
+ context 'when feature is enabled' do
+ it "uses default-src values in #{rule_name}" do
+ is_expected.to eql("font-src #{default_csp_values}; #{rule_name} #{allowlisted_url}")
+ end
end
- end
- context 'when feature is disabled' do
- include_context 'disable feature'
+ context 'when feature is disabled' do
+ include_context 'disable feature'
- it "does not add #{rule_name}" do
- is_expected.to eql("font-src #{default_csp_values}")
+ it "does not add #{rule_name}" do
+ is_expected.to eql("font-src #{default_csp_values}")
+ end
end
end
end
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index 591f7973454..0ea82f37db0 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -31,8 +31,6 @@ RSpec.shared_examples 'edits content using the content editor' do
page.go_back
refresh
-
- click_button 'Edit rich text'
end
it 'applies theme classes to code blocks' do
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index 6c06cbf9082..24dc4bcfc59 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -293,7 +293,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
it 'can be collapsed' do
submit_reply('another text')
- find('.js-collapse-replies').click
+ click_button s_('Notes|Collapse replies'), match: :first
expect(page).to have_css('.discussion-notes .note', count: 1)
expect(page).to have_content '1 reply'
end
diff --git a/spec/support/shared_examples/features/inviting_members_shared_examples.rb b/spec/support/shared_examples/features/inviting_members_shared_examples.rb
index 58357b262f5..bca0e02fcdd 100644
--- a/spec/support/shared_examples/features/inviting_members_shared_examples.rb
+++ b/spec/support/shared_examples/features/inviting_members_shared_examples.rb
@@ -23,6 +23,22 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
)
end
+ it 'displays the user\'s avatar in the member input token', :js do
+ visit members_page_path
+
+ input_invites(user2.name)
+
+ expect(page).to have_selector(member_token_avatar_selector)
+ end
+
+ it 'does not display an avatar in the member input token for an email address', :js do
+ visit members_page_path
+
+ input_invites('test@example.com')
+
+ expect(page).not_to have_selector(member_token_avatar_selector)
+ end
+
it 'invites user by email', :js, :snowplow, :aggregate_failures do
visit members_page_path
@@ -78,22 +94,23 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
end
context 'when member is already a member by email' do
- it 'fails with an error', :js do
+ it 'updates the member for that email', :js do
+ email = 'test@example.com'
+
visit members_page_path
- invite_member('test@example.com', role: 'Developer')
+ invite_member(email, role: 'Developer')
- invite_member('test@example.com', role: 'Reporter', refresh: false)
+ invite_member(email, role: 'Reporter', refresh: false)
- expect(page).to have_selector(invite_modal_selector)
- expect(page).to have_content("The member's email address has already been taken")
+ expect(page).not_to have_selector(invite_modal_selector)
page.refresh
click_link 'Invited'
- page.within find_invited_member_row('test@example.com') do
- expect(page).to have_button('Developer')
+ page.within find_invited_member_row(email) do
+ expect(page).to have_button('Reporter')
end
end
end
@@ -131,8 +148,8 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
invite_member(user2.name, role: role, refresh: false)
expect(page).to have_selector(invite_modal_selector)
- expect(page).to have_content "Access level should be greater than or equal to Developer inherited membership " \
- "from group #{group.name}"
+ expect(page).to have_content "#{user2.name}: Access level should be greater than or equal to Developer " \
+ "inherited membership from group #{group.name}"
page.refresh
@@ -149,13 +166,31 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
group.add_maintainer(user3)
end
- it 'only shows the first user error', :js do
+ it 'shows the user errors and then removes them from the form', :js do
visit subentity_members_page_path
invite_member([user2.name, user3.name], role: role, refresh: false)
expect(page).to have_selector(invite_modal_selector)
- expect(page).to have_text("Access level should be greater than or equal to", count: 1)
+ expect(page).to have_selector(member_token_error_selector(user2.id))
+ expect(page).to have_selector(member_token_error_selector(user3.id))
+ expect(page).to have_text("The following 2 members couldn't be invited")
+ expect(page).to have_text("#{user2.name}: Access level should be greater than or equal to")
+ expect(page).to have_text("#{user3.name}: Access level should be greater than or equal to")
+
+ remove_token(user2.id)
+
+ expect(page).not_to have_selector(member_token_error_selector(user2.id))
+ expect(page).to have_selector(member_token_error_selector(user3.id))
+ expect(page).to have_text("The following member couldn't be invited")
+ expect(page).not_to have_text("#{user2.name}: Access level should be greater than or equal to")
+
+ remove_token(user3.id)
+
+ expect(page).not_to have_selector(member_token_error_selector(user3.id))
+ expect(page).not_to have_text("The following member couldn't be invited")
+ expect(page).not_to have_text("Review the invite errors and try again")
+ expect(page).not_to have_text("#{user3.name}: Access level should be greater than or equal to")
page.refresh
@@ -169,6 +204,19 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
expect(page).not_to have_button('Maintainer')
end
end
+
+ it 'only shows the error for an invalid formatted email and does not display other member errors', :js do
+ visit subentity_members_page_path
+
+ invite_member([user2.name, user3.name, 'bad@email'], role: role, refresh: false)
+
+ expect(page).to have_selector(invite_modal_selector)
+ expect(page).to have_text('email contains an invalid email address')
+ expect(page).not_to have_text("The following 2 members couldn't be invited")
+ expect(page).not_to have_text("Review the invite errors and try again")
+ expect(page).not_to have_text("#{user2.name}: Access level should be greater than or equal to")
+ expect(page).not_to have_text("#{user3.name}: Access level should be greater than or equal to")
+ end
end
end
end
diff --git a/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
index 4565108b5e4..9d023d9514a 100644
--- a/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
+++ b/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
@@ -4,7 +4,7 @@ RSpec.shared_examples 'multiple assignees merge request' do |action, save_button
it "#{action} a MR with multiple assignees", :js do
find('.js-assignee-search').click
page.within '.dropdown-menu-user' do
- click_link user.name unless action == 'creates'
+ click_link user.name
click_link user2.name
end
diff --git a/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
index a44a699c878..bbde448a1a1 100644
--- a/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
+++ b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
@@ -4,7 +4,7 @@ RSpec.shared_examples 'multiple assignees widget merge request' do |action, save
it "#{action} a MR with multiple assignees", :js do
find('.js-assignee-search').click
page.within '.dropdown-menu-user' do
- click_link user.name unless action == 'creates'
+ click_link user.name
click_link user2.name
end
diff --git a/spec/support/shared_examples/features/wiki/autocomplete_shared_examples.rb b/spec/support/shared_examples/features/wiki/autocomplete_shared_examples.rb
new file mode 100644
index 00000000000..79de2aedf3b
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/autocomplete_shared_examples.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'autocompletes items' do
+ before do
+ if defined?(project)
+ create(:issue, project: project, title: 'My Cool Linked Issue')
+ create(:merge_request, source_project: project, title: 'My Cool Merge Request')
+ create(:label, project: project, title: 'My Cool Label')
+ create(:milestone, project: project, title: 'My Cool Milestone')
+
+ project.add_maintainer(create(:user, name: 'JohnDoe123'))
+ else # group wikis
+ project = create(:project, group: group)
+
+ create(:issue, project: project, title: 'My Cool Linked Issue')
+ create(:merge_request, source_project: project, title: 'My Cool Merge Request')
+ create(:group_label, group: group, title: 'My Cool Label')
+ create(:milestone, group: group, title: 'My Cool Milestone')
+
+ project.add_maintainer(create(:user, name: 'JohnDoe123'))
+ end
+ end
+
+ it 'works well for issues, labels, MRs, members, etc' do
+ fill_in :wiki_content, with: "#"
+ expect(page).to have_text 'My Cool Linked Issue'
+
+ fill_in :wiki_content, with: "~"
+ expect(page).to have_text 'My Cool Label'
+
+ fill_in :wiki_content, with: "!"
+ expect(page).to have_text 'My Cool Merge Request'
+
+ fill_in :wiki_content, with: "%"
+ expect(page).to have_text 'My Cool Milestone'
+
+ fill_in :wiki_content, with: "@"
+ expect(page).to have_text 'JohnDoe123'
+
+ fill_in :wiki_content, with: ':smil'
+ expect(page).to have_text 'smile_cat'
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index 12a4c6d7583..79c7c1891ac 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -146,6 +146,8 @@ RSpec.shared_examples 'User updates wiki page' do
it_behaves_like 'edits content using the content editor'
end
end
+
+ it_behaves_like 'autocompletes items'
end
context 'when the page is in a subdir', :js do
diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
index 622a88e8323..9d8f37a3e64 100644
--- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
+++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
@@ -269,6 +269,17 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
it 'returns items not assigned to that milestone' do
expect(items).to contain_exactly(item2, item3, item4, item5)
end
+
+ context 'with multiple milestones' do
+ let(:milestone2) { create(:milestone, project: project2) }
+ let(:params) { { not: { milestone_title: [milestone.title, milestone2.title] } } }
+
+ it 'returns items not assigned to both milestones' do
+ item2.update!(milestone: milestone2)
+
+ expect(items).to contain_exactly(item3, item4, item5)
+ end
+ end
end
context 'filtering by group milestone' do
@@ -962,7 +973,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
group = create(:group)
project = create(:project, group: group)
item = create(factory, project: project)
- group.add_user(user, :owner)
+ group.add_member(user, :owner)
expect(items).to include(item)
end
diff --git a/spec/support/shared_examples/graphql/mutations/work_items/update_description_widget_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/work_items/update_description_widget_shared_examples.rb
new file mode 100644
index 00000000000..56c2ca22e15
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/work_items/update_description_widget_shared_examples.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'update work item description widget' do
+ it 'updates the description widget' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :description).from(nil).to(new_description)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']['widgets']).to include(
+ {
+ 'description' => new_description,
+ 'type' => 'DESCRIPTION'
+ }
+ )
+ end
+
+ context 'when the updated work item is not valid' do
+ it 'returns validation errors without the work item' do
+ errors = ActiveModel::Errors.new(work_item).tap { |e| e.add(:description, 'error message') }
+
+ allow_next_found_instance_of(::WorkItem) do |instance|
+ allow(instance).to receive(:valid?).and_return(false)
+ allow(instance).to receive(:errors).and_return(errors)
+ end
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['workItem']).to be_nil
+ expect(mutation_response['errors']).to match_array(['Description error message'])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb
new file mode 100644
index 00000000000..3c32b7e0310
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'update work item weight widget' do
+ it 'updates the weight widget' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :weight).from(nil).to(new_weight)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']['widgets']).to include(
+ {
+ 'weight' => new_weight,
+ 'type' => 'WEIGHT'
+ }
+ )
+ end
+
+ context 'when the updated work item is not valid' do
+ it 'returns validation errors without the work item' do
+ errors = ActiveModel::Errors.new(work_item).tap { |e| e.add(:weight, 'error message') }
+
+ allow_next_found_instance_of(::WorkItem) do |instance|
+ allow(instance).to receive(:valid?).and_return(false)
+ allow(instance).to receive(:errors).and_return(errors)
+ end
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['workItem']).to be_nil
+ expect(mutation_response['errors']).to match_array(['Weight error message'])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb b/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
index 6d6e7b761f6..59927fa1cc9 100644
--- a/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
@@ -44,19 +44,25 @@
# end
# end
#
+
+# Include this context if your field does not accept a sort argument
+RSpec.shared_context 'no sort argument' do
+ let(:sort_argument) { graphql_args }
+end
+
RSpec.shared_examples 'sorted paginated query' do |conditions = {}|
# Provided as a convenience when constructing queries using string concatenation
let(:page_info) { 'pageInfo { startCursor endCursor }' }
# Convenience for using default implementation of pagination_results_data
let(:node_path) { ['id'] }
+ let(:sort_argument) { graphql_args(sort: sort_param) }
it_behaves_like 'requires variables' do
- let(:required_variables) { [:sort_param, :first_param, :all_records, :data_path, :current_user] }
+ let(:required_variables) { [:first_param, :all_records, :data_path, :current_user] }
end
describe do
- let(:sort_argument) { graphql_args(sort: sort_param) }
- let(:params) { sort_argument }
+ let(:params) { sort_argument }
# Convenience helper for the large number of queries defined as a projection
# from some root value indexed by full_path to a collection of objects with IID
diff --git a/spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb b/spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb
new file mode 100644
index 00000000000..85fcd426e3d
--- /dev/null
+++ b/spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a harbor artifacts controller' do |args|
+ include HarborHelper
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauthorized_user) { create(:user) }
+ let_it_be(:json_header) { { accept: 'application/json' } }
+
+ let(:mock_artifacts) do
+ [
+ {
+ "digest": "sha256:661e8e44e5d7290fbd42d0495ab4ff6fdf1ad251a9f358969b3264a22107c14d",
+ "icon": "sha256:0048162a053eef4d4ce3fe7518615bef084403614f8bca43b40ae2e762e11e06",
+ "id": 1,
+ "project_id": 1,
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-04-23T08:04:08.901Z",
+ "repository_id": 1,
+ "size": 126745886,
+ "tags": [
+ {
+ "artifact_id": 1,
+ "id": 1,
+ "immutable": false,
+ "name": "2",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-04-23T08:04:08.920Z",
+ "repository_id": 1,
+ "signed": false
+ }
+ ],
+ "type": "IMAGE"
+ }
+ ]
+ end
+
+ let(:repository_id) { 'test' }
+
+ shared_examples 'responds with 404 status' do
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ shared_examples 'responds with 200 status with json' do
+ it 'renders the index template' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).not_to render_template(:index)
+ end
+ end
+
+ shared_examples 'responds with 302 status' do
+ it 'returns 302' do
+ subject
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ shared_examples 'responds with 422 status with json' do
+ it 'returns 422' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ before do
+ stub_request(:get,
+ "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts"\
+ "?page=1&page_size=10&with_tag=true")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ }).to_return(status: 200, body: mock_artifacts.to_json, headers: { "x-total-count": 2 })
+ container.add_reporter(user)
+ sign_in(user)
+ end
+
+ describe 'GET #index.json' do
+ subject do
+ get harbor_artifact_url(container, repository_id), headers: json_header
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+
+ context 'with anonymous user' do
+ before do
+ sign_out(user)
+ end
+
+ it_behaves_like "responds with #{args[:anonymous_status_code]} status"
+ end
+
+ context 'with unauthorized user' do
+ before do
+ sign_in(unauthorized_user)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+
+ context 'with valid params' do
+ context 'with valid repository' do
+ subject do
+ get harbor_artifact_url(container, repository_id), headers: json_header
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with valid page' do
+ subject do
+ get harbor_artifact_url(container, repository_id, page: '1'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with valid limit' do
+ subject do
+ get harbor_artifact_url(container, repository_id, limit: '10'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+ end
+
+ context 'with invalid params' do
+ context 'with invalid page' do
+ subject do
+ get harbor_artifact_url(container, repository_id, page: 'aaa'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 422 status with json'
+ end
+
+ context 'with invalid limit' do
+ subject do
+ get harbor_artifact_url(container, repository_id, limit: 'aaa'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 422 status with json'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/harbor/container_shared_examples.rb b/spec/support/shared_examples/harbor/container_shared_examples.rb
new file mode 100644
index 00000000000..57274e0b457
--- /dev/null
+++ b/spec/support/shared_examples/harbor/container_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'raises NotImplementedError when calling #container' do
+ describe '#container' do
+ it 'raises NotImplementedError' do
+ expect { controller.send(:container) }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb b/spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb
new file mode 100644
index 00000000000..b35595a10b2
--- /dev/null
+++ b/spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb
@@ -0,0 +1,172 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a harbor repositories controller' do |args|
+ include HarborHelper
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauthorized_user) { create(:user) }
+ let_it_be(:json_header) { { accept: 'application/json' } }
+
+ let(:mock_repositories) do
+ [
+ {
+ "artifact_count": 6,
+ "creation_time": "2022-04-24T10:59:02.719Z",
+ "id": 33,
+ "name": "test/photon",
+ "project_id": 3,
+ "pull_count": 12,
+ "update_time": "2022-04-24T11:06:27.678Z"
+ },
+ {
+ "artifact_count": 1,
+ "creation_time": "2022-04-23T08:04:08.880Z",
+ "id": 1,
+ "name": "test/gemnasium",
+ "project_id": 3,
+ "pull_count": 0,
+ "update_time": "2022-04-23T08:04:08.880Z"
+ }
+ ]
+ end
+
+ shared_examples 'responds with 404 status' do
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ shared_examples 'responds with 200 status with html' do
+ it 'renders the index template' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+ end
+
+ shared_examples 'responds with 302 status' do
+ it 'returns 302' do
+ subject
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ shared_examples 'responds with 200 status with json' do
+ it 'renders the index template' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).not_to render_template(:index)
+ end
+ end
+
+ shared_examples 'responds with 422 status with json' do
+ it 'returns 422' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ before do
+ stub_request(:get, "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories?page=1&page_size=10")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ }).to_return(status: 200, body: mock_repositories.to_json, headers: { "x-total-count": 2 })
+ container.add_reporter(user)
+ sign_in(user)
+ end
+
+ describe 'GET #index.html' do
+ subject do
+ get harbor_repository_url(container)
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status with html'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+
+ context 'with anonymous user' do
+ before do
+ sign_out(user)
+ end
+
+ it_behaves_like "responds with #{args[:anonymous_status_code]} status"
+ end
+
+ context 'with unauthorized user' do
+ before do
+ sign_in(unauthorized_user)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+ end
+
+ describe 'GET #index.json' do
+ subject do
+ get harbor_repository_url(container), headers: json_header
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+
+ context 'with valid params' do
+ context 'with valid page params' do
+ subject do
+ get harbor_repository_url(container, page: '1'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with valid limit params' do
+ subject do
+ get harbor_repository_url(container, limit: '10'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+ end
+
+ context 'with invalid params' do
+ context 'with invalid page params' do
+ subject do
+ get harbor_repository_url(container, page: 'aaa'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 422 status with json'
+ end
+
+ context 'with invalid limit params' do
+ subject do
+ get harbor_repository_url(container, limit: 'aaa'), headers: json_header
+ end
+
+ it_behaves_like 'responds with 422 status with json'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/harbor/tags_controller_shared_examples.rb b/spec/support/shared_examples/harbor/tags_controller_shared_examples.rb
new file mode 100644
index 00000000000..46fea7fdff6
--- /dev/null
+++ b/spec/support/shared_examples/harbor/tags_controller_shared_examples.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a harbor tags controller' do |args|
+ include HarborHelper
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauthorized_user) { create(:user) }
+ let_it_be(:json_header) { { accept: 'application/json' } }
+
+ let(:mock_artifacts) do
+ [
+ {
+ "artifact_id": 1,
+ "id": 1,
+ "immutable": false,
+ "name": "2",
+ "pull_time": "0001-01-01T00:00:00.000Z",
+ "push_time": "2022-04-23T08:04:08.920Z",
+ "repository_id": 1,
+ "signed": false
+ }
+ ]
+ end
+
+ let(:repository_id) { 'test' }
+ let(:artifact_id) { '1' }
+
+ shared_examples 'responds with 404 status' do
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ shared_examples 'responds with 200 status with json' do
+ it 'renders the index template' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).not_to render_template(:index)
+ end
+ end
+
+ shared_examples 'responds with 302 status' do
+ it 'returns 302' do
+ subject
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ shared_examples 'responds with 422 status with json' do
+ it 'returns 422' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ before do
+ stub_request(:get,
+ "https://demo.goharbor.io/api/v2.0/projects/testproject/repositories/test/artifacts/1/tags"\
+ "?page=1&page_size=10")
+ .with(
+ headers: {
+ 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=',
+ 'Content-Type': 'application/json'
+ }).to_return(status: 200, body: mock_artifacts.to_json, headers: { "x-total-count": 2 })
+ container.add_reporter(user)
+ sign_in(user)
+ end
+
+ describe 'GET #index.json' do
+ subject do
+ get(harbor_tag_url(container, repository_id, artifact_id),
+ headers: json_header)
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+
+ context 'with anonymous user' do
+ before do
+ sign_out(user)
+ end
+
+ it_behaves_like "responds with #{args[:anonymous_status_code]} status"
+ end
+
+ context 'with unauthorized user' do
+ before do
+ sign_in(unauthorized_user)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+
+ context 'with valid params' do
+ context 'with valid repository' do
+ subject do
+ get harbor_tag_url(container, repository_id, artifact_id), headers: json_header
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with valid page' do
+ subject do
+ get(harbor_tag_url(container, repository_id, artifact_id, page: '1'),
+ headers: json_header)
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+
+ context 'with valid limit' do
+ subject do
+ get(harbor_tag_url(container, repository_id, artifact_id, limit: '10'),
+ headers: json_header)
+ end
+
+ it_behaves_like 'responds with 200 status with json'
+ end
+ end
+
+ context 'with invalid params' do
+ context 'with invalid page' do
+ subject do
+ get(harbor_tag_url(container, repository_id, artifact_id, page: 'aaa'),
+ headers: json_header)
+ end
+
+ it_behaves_like 'responds with 422 status with json'
+ end
+
+ context 'with invalid limit' do
+ subject do
+ get(harbor_tag_url(container, repository_id, artifact_id, limit: 'aaa'),
+ headers: json_header)
+ end
+
+ it_behaves_like 'responds with 422 status with json'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/integrations/integration_settings_form.rb b/spec/support/shared_examples/integrations/integration_settings_form.rb
index dfe5a071f91..5041ac4a660 100644
--- a/spec/support/shared_examples/integrations/integration_settings_form.rb
+++ b/spec/support/shared_examples/integrations/integration_settings_form.rb
@@ -20,6 +20,11 @@ RSpec.shared_examples 'integration settings form' do
"#{integration.title} field #{field_name} not present"
end
+ api_only_fields = integration.fields.select { _1[:api_only] }
+ api_only_fields.each do |field|
+ expect(page).not_to have_field("service[#{field.name}]", wait: 0)
+ end
+
sections = integration.sections
events = parse_json(trigger_events_for_integration(integration))
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index 284c129221b..b786d7e5527 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -265,10 +265,9 @@ RSpec.shared_examples 'common trace features' do
end
context 'build token' do
- let(:token) { 'my_secret_token' }
+ let(:token) { build.token }
before do
- build.update!(token: token)
trace.append(token, 0)
end
diff --git a/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
index 326800e6dc2..c9300aff3e6 100644
--- a/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
@@ -32,21 +32,7 @@ RSpec.shared_examples "position formatter" do
subject { formatter.to_h }
- context 'when file_identifier_hash is disabled' do
- before do
- stub_feature_flags(file_identifier_hash: false)
- end
-
- it { is_expected.to eq(formatter_hash.except(:file_identifier_hash)) }
- end
-
- context 'when file_identifier_hash is enabled' do
- before do
- stub_feature_flags(file_identifier_hash: true)
- end
-
- it { is_expected.to eq(formatter_hash) }
- end
+ it { is_expected.to eq(formatter_hash) }
end
describe '#==' do
diff --git a/spec/support/shared_examples/lib/gitlab/search_language_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_language_filter_shared_examples.rb
new file mode 100644
index 00000000000..a3e4379f4d3
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/search_language_filter_shared_examples.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search results filtered by language' do
+ let(:scope) { 'blobs' }
+ let(:filters) { { language: %w[Ruby Markdown] } }
+ let(:query) { 'def | popen | test' }
+
+ before do
+ project.repository.index_commits_and_blobs
+
+ ensure_elasticsearch_index!
+ end
+
+ subject(:blob_results) { results.objects('blobs') }
+
+ it 'filters by language', :sidekiq_inline, :aggregate_failures do
+ expected_paths = %w[
+ files/ruby/popen.rb
+ files/markdown/ruby-style-guide.md
+ files/ruby/regex.rb
+ files/ruby/version_info.rb
+ CONTRIBUTING.md
+ ]
+
+ paths = blob_results.map { |blob| blob.binary_path }
+ expect(blob_results.size).to eq(5)
+ expect(paths).to match_array(expected_paths)
+ end
+
+ context 'when the search_blobs_language_aggregation feature flag is disabled' do
+ before do
+ stub_feature_flags(search_blobs_language_aggregation: false)
+ end
+
+ it 'does not filter by language', :sidekiq_inline, :aggregate_failures do
+ expected_paths = %w[
+ CHANGELOG
+ CONTRIBUTING.md
+ bar/branch-test.txt
+ custom-highlighting/test.gitlab-custom
+ files/ruby/popen.rb
+ files/ruby/regex.rb
+ files/ruby/version_info.rb
+ files/whitespace
+ encoding/test.txt
+ files/markdown/ruby-style-guide.md
+ ]
+
+ paths = blob_results.map { |blob| blob.binary_path }
+ expect(blob_results.size).to eq(10)
+ expect(paths).to match_array(expected_paths)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
index b5d93aec1bf..9d280d9404a 100644
--- a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
@@ -32,3 +32,45 @@ RSpec.shared_examples 'does not track when feature flag is disabled' do |feature
end
end
end
+
+RSpec.shared_examples 'a daily tracked issuable snowplow and service ping events' do
+ before do
+ stub_application_setting(usage_ping_enabled: true)
+ end
+
+ def count_unique(date_from: 1.minute.ago, date_to: 1.minute.from_now)
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: action, start_date: date_from, end_date: date_to)
+ end
+
+ specify do
+ aggregate_failures do
+ expect(track_action(author: user1, project: project)).to be_truthy
+ expect(track_action(author: user1, project: project)).to be_truthy
+ expect(track_action(author: user2, project: project)).to be_truthy
+ expect(count_unique).to eq(2)
+ end
+ end
+
+ it 'does not track edit actions if author is not present' do
+ expect(track_action(author: nil, project: project)).to be_nil
+ end
+
+ it 'emits snowplow event' do
+ track_action(author: user1, project: project)
+
+ expect_snowplow_event(category: 'issues_edit', action: action, user: user1,
+ namespace: project.namespace, project: project)
+ end
+
+ context 'with route_hll_to_snowplow_phase2 disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ end
+
+ it 'does not emit snowplow event' do
+ track_action(author: user1, project: project)
+
+ expect_no_snowplow_event
+ end
+ end
+end
diff --git a/spec/support/shared_examples/merge_request_author_auto_assign_shared_examples.rb b/spec/support/shared_examples/merge_request_author_auto_assign_shared_examples.rb
deleted file mode 100644
index d4986975f03..00000000000
--- a/spec/support/shared_examples/merge_request_author_auto_assign_shared_examples.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'merge request author auto assign' do
- it 'populates merge request author as assignee' do
- expect(find('.js-assignee-search')).to have_content(user.name)
- expect(page).not_to have_content 'Assign yourself'
- end
-end
diff --git a/spec/support/shared_examples/models/chat_integration_shared_examples.rb b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
index fa10b03fa90..d189e91effd 100644
--- a/spec/support/shared_examples/models/chat_integration_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
@@ -357,7 +357,8 @@ RSpec.shared_examples "chat integration" do |integration_name|
end
context 'deployment events' do
- let(:sample_data) { Gitlab::DataBuilder::Deployment.build(create(:deployment), Time.now) }
+ let(:deployment) { create(:deployment) }
+ let(:sample_data) { Gitlab::DataBuilder::Deployment.build(deployment, deployment.status, Time.now) }
it_behaves_like "untriggered #{integration_name} integration"
end
diff --git a/spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb b/spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb
deleted file mode 100644
index 744262d79ea..00000000000
--- a/spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-# frozen_string_literal: true
-
-# Input
-# - factory: [:clusters_applications_elastic_stack, :clusters_integrations_elastic_stack]
-RSpec.shared_examples 'cluster-based #elasticsearch_client' do |factory|
- describe '#elasticsearch_client' do
- context 'cluster is nil' do
- subject { build(factory, cluster: nil) }
-
- it 'returns nil' do
- expect(subject.cluster).to be_nil
- expect(subject.elasticsearch_client).to be_nil
- end
- end
-
- context "cluster doesn't have kubeclient" do
- let(:cluster) { create(:cluster) }
-
- subject { create(factory, cluster: cluster) }
-
- it 'returns nil' do
- expect(subject.elasticsearch_client).to be_nil
- end
- end
-
- context 'cluster has kubeclient' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:kubernetes_url) { subject.cluster.platform_kubernetes.api_url }
- let(:kube_client) { subject.cluster.kubeclient.core_client }
-
- subject { create(factory, cluster: cluster) }
-
- before do
- subject.cluster.platform_kubernetes.namespace = 'a-namespace'
- stub_kubeclient_discover(cluster.platform_kubernetes.api_url)
-
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- cluster_project: cluster.cluster_project,
- project: cluster.cluster_project.project)
- end
-
- it 'creates proxy elasticsearch_client' do
- expect(subject.elasticsearch_client).to be_instance_of(Elasticsearch::Transport::Client)
- end
-
- it 'copies proxy_url, options and headers from kube client to elasticsearch_client' do
- expect(Elasticsearch::Client)
- .to(receive(:new))
- .with(url: a_valid_url, adapter: :net_http)
- .and_call_original
-
- client = subject.elasticsearch_client
- faraday_connection = client.transport.connections.first.connection
-
- expect(faraday_connection.headers["Authorization"]).to eq(kube_client.headers[:Authorization])
- expect(faraday_connection.ssl.cert_store).to be_instance_of(OpenSSL::X509::Store)
- expect(faraday_connection.ssl.verify).to eq(1)
- expect(faraday_connection.options.timeout).to be_nil
- end
-
- context 'when cluster is not reachable' do
- before do
- allow(kube_client).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
- end
-
- it 'returns nil' do
- expect(subject.elasticsearch_client).to be_nil
- end
- end
-
- context 'when timeout is provided' do
- it 'sets timeout in elasticsearch_client' do
- client = subject.elasticsearch_client(timeout: 123)
- faraday_connection = client.transport.connections.first.connection
-
- expect(faraday_connection.options.timeout).to eq(123)
- end
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
index 2e062cda4e9..d80be5be3b3 100644
--- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
@@ -230,7 +230,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
context 'deployment events' do
let_it_be(:deployment) { create(:deployment) }
- let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, Time.current) }
+ let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, 'created', Time.current) }
it_behaves_like 'calls the integration API with the event message', /Deploy to (.*?) created/
end
@@ -677,7 +677,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
create(:deployment, :success, project: project, sha: project.commit.sha, ref: project.default_branch)
end
- let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, Time.now) }
+ let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, deployment.status, Time.now) }
before do
allow(chat_integration).to receive_messages(
diff --git a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
index a2b4cdc33d0..d06e8391a9a 100644
--- a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
@@ -82,7 +82,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
it { is_expected.to belong_to(:group) }
it { is_expected.to have_many(:issues) }
it { is_expected.to have_many(:merge_requests) }
- it { is_expected.to have_many(:labels) }
+ it { is_expected.to have_many(:labels).through(:issues) }
end
describe '#timebox_name' do
diff --git a/spec/support/shared_examples/models/issuable_participants_shared_examples.rb b/spec/support/shared_examples/models/issuable_participants_shared_examples.rb
new file mode 100644
index 00000000000..c3eaae0ace2
--- /dev/null
+++ b/spec/support/shared_examples/models/issuable_participants_shared_examples.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'issuable participants' do
+ context 'when resource parent is public' do
+ context 'and users are referenced on notes' do
+ let_it_be(:notes_author) { create(:user) }
+
+ let(:note_params) { params.merge(author: notes_author) }
+
+ before do
+ create(:note, note_params)
+ end
+
+ it 'includes the issue author' do
+ expect(issuable.participants).to include(issuable.author)
+ end
+
+ it 'includes the authors of the notes' do
+ expect(issuable.participants).to include(notes_author)
+ end
+
+ context 'and note is confidential' do
+ context 'and mentions users' do
+ let_it_be(:guest_1) { create(:user) }
+ let_it_be(:guest_2) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ before do
+ issuable_parent.add_guest(guest_1)
+ issuable_parent.add_guest(guest_2)
+ issuable_parent.add_reporter(reporter)
+
+ confidential_note_params =
+ note_params.merge(
+ confidential: true,
+ note: "mentions #{guest_1.to_reference} and #{guest_2.to_reference} and #{reporter.to_reference}"
+ )
+
+ regular_note_params =
+ note_params.merge(note: "Mentions #{guest_2.to_reference}")
+
+ create(:note, confidential_note_params)
+ create(:note, regular_note_params)
+ end
+
+ it 'only includes users that can read the note as participants' do
+ expect(issuable.participants).to contain_exactly(issuable.author, notes_author, reporter, guest_2)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index 75fff11cecd..aa40a2c7135 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -80,7 +80,7 @@ RSpec.shared_examples_for "member creation" do
let_it_be(:admin) { create(:admin) }
it 'returns a Member object', :aggregate_failures do
- member = described_class.add_user(source, user, :maintainer)
+ member = described_class.add_member(source, user, :maintainer)
expect(member).to be_a member_type
expect(member).to be_persisted
@@ -99,7 +99,7 @@ RSpec.shared_examples_for "member creation" do
end
it 'does not update the member' do
- member = described_class.add_user(source, project_bot, :maintainer, current_user: user)
+ member = described_class.add_member(source, project_bot, :maintainer, current_user: user)
expect(source.users.reload).to include(project_bot)
expect(member).to be_persisted
@@ -110,7 +110,7 @@ RSpec.shared_examples_for "member creation" do
context 'when project_bot is not already a member' do
it 'adds the member' do
- member = described_class.add_user(source, project_bot, :maintainer, current_user: user)
+ member = described_class.add_member(source, project_bot, :maintainer, current_user: user)
expect(source.users.reload).to include(project_bot)
expect(member).to be_persisted
@@ -120,7 +120,7 @@ RSpec.shared_examples_for "member creation" do
context 'when admin mode is enabled', :enable_admin_mode, :aggregate_failures do
it 'sets members.created_by to the given admin current_user' do
- member = described_class.add_user(source, user, :maintainer, current_user: admin)
+ member = described_class.add_member(source, user, :maintainer, current_user: admin)
expect(member).to be_persisted
expect(source.users.reload).to include(user)
@@ -130,7 +130,7 @@ RSpec.shared_examples_for "member creation" do
context 'when admin mode is disabled' do
it 'rejects setting members.created_by to the given admin current_user', :aggregate_failures do
- member = described_class.add_user(source, user, :maintainer, current_user: admin)
+ member = described_class.add_member(source, user, :maintainer, current_user: admin)
expect(member).not_to be_persisted
expect(source.users.reload).not_to include(user)
@@ -139,7 +139,7 @@ RSpec.shared_examples_for "member creation" do
end
it 'sets members.expires_at to the given expires_at' do
- member = described_class.add_user(source, user, :maintainer, expires_at: Date.new(2016, 9, 22))
+ member = described_class.add_member(source, user, :maintainer, expires_at: Date.new(2016, 9, 22))
expect(member.expires_at).to eq(Date.new(2016, 9, 22))
end
@@ -148,7 +148,7 @@ RSpec.shared_examples_for "member creation" do
it "accepts the :#{sym_key} symbol as access level", :aggregate_failures do
expect(source.users).not_to include(user)
- member = described_class.add_user(source, user.id, sym_key)
+ member = described_class.add_member(source, user.id, sym_key)
expect(member.access_level).to eq(int_access_level)
expect(source.users.reload).to include(user)
@@ -157,7 +157,7 @@ RSpec.shared_examples_for "member creation" do
it "accepts the #{int_access_level} integer as access level", :aggregate_failures do
expect(source.users).not_to include(user)
- member = described_class.add_user(source, user.id, int_access_level)
+ member = described_class.add_member(source, user.id, int_access_level)
expect(member.access_level).to eq(int_access_level)
expect(source.users.reload).to include(user)
@@ -169,7 +169,7 @@ RSpec.shared_examples_for "member creation" do
it 'adds the user as a member' do
expect(source.users).not_to include(user)
- described_class.add_user(source, user.id, :maintainer)
+ described_class.add_member(source, user.id, :maintainer)
expect(source.users.reload).to include(user)
end
@@ -179,7 +179,7 @@ RSpec.shared_examples_for "member creation" do
it 'does not add the user as a member' do
expect(source.users).not_to include(user)
- described_class.add_user(source, non_existing_record_id, :maintainer)
+ described_class.add_member(source, non_existing_record_id, :maintainer)
expect(source.users.reload).not_to include(user)
end
@@ -189,7 +189,7 @@ RSpec.shared_examples_for "member creation" do
it 'adds the user as a member' do
expect(source.users).not_to include(user)
- described_class.add_user(source, user, :maintainer)
+ described_class.add_member(source, user, :maintainer)
expect(source.users.reload).to include(user)
end
@@ -205,7 +205,7 @@ RSpec.shared_examples_for "member creation" do
expect(source.requesters.exists?(user_id: user)).to be_truthy
expect do
- described_class.add_user(source, user, :maintainer)
+ described_class.add_member(source, user, :maintainer)
end.to raise_error(Gitlab::Access::AccessDeniedError)
expect(source.users.reload).not_to include(user)
@@ -217,7 +217,7 @@ RSpec.shared_examples_for "member creation" do
it 'adds the user as a member' do
expect(source.users).not_to include(user)
- described_class.add_user(source, user.email, :maintainer)
+ described_class.add_member(source, user.email, :maintainer)
expect(source.users.reload).to include(user)
end
@@ -227,7 +227,7 @@ RSpec.shared_examples_for "member creation" do
it 'creates an invited member' do
expect(source.users).not_to include(user)
- described_class.add_user(source, 'user@example.com', :maintainer)
+ described_class.add_member(source, 'user@example.com', :maintainer)
expect(source.members.invite.pluck(:invite_email)).to include('user@example.com')
end
@@ -237,7 +237,7 @@ RSpec.shared_examples_for "member creation" do
it 'creates an invited member', :aggregate_failures do
email_starting_with_number = "#{user.id}_email@example.com"
- described_class.add_user(source, email_starting_with_number, :maintainer)
+ described_class.add_member(source, email_starting_with_number, :maintainer)
expect(source.members.invite.pluck(:invite_email)).to include(email_starting_with_number)
expect(source.users.reload).not_to include(user)
@@ -249,7 +249,7 @@ RSpec.shared_examples_for "member creation" do
it 'creates the member' do
expect(source.users).not_to include(user)
- described_class.add_user(source, user, :maintainer, current_user: admin)
+ described_class.add_member(source, user, :maintainer, current_user: admin)
expect(source.users.reload).to include(user)
end
@@ -263,7 +263,7 @@ RSpec.shared_examples_for "member creation" do
expect(source.users).not_to include(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
- described_class.add_user(source, user, :maintainer, current_user: admin)
+ described_class.add_member(source, user, :maintainer, current_user: admin)
expect(source.users.reload).to include(user)
expect(source.requesters.reload.exists?(user_id: user)).to be_falsy
@@ -275,7 +275,7 @@ RSpec.shared_examples_for "member creation" do
it 'does not create the member', :aggregate_failures do
expect(source.users).not_to include(user)
- member = described_class.add_user(source, user, :maintainer, current_user: user)
+ member = described_class.add_member(source, user, :maintainer, current_user: user)
expect(source.users.reload).not_to include(user)
expect(member).not_to be_persisted
@@ -290,7 +290,7 @@ RSpec.shared_examples_for "member creation" do
expect(source.users).not_to include(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
- described_class.add_user(source, user, :maintainer, current_user: user)
+ described_class.add_member(source, user, :maintainer, current_user: user)
expect(source.users.reload).not_to include(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
@@ -299,37 +299,51 @@ RSpec.shared_examples_for "member creation" do
end
context 'when member already exists' do
- before do
- source.add_user(user, :developer)
- end
+ context 'when member is a user' do
+ before do
+ source.add_member(user, :developer)
+ end
- context 'with no current_user' do
- it 'updates the member' do
- expect(source.users).to include(user)
+ context 'with no current_user' do
+ it 'updates the member' do
+ expect(source.users).to include(user)
- described_class.add_user(source, user, :maintainer)
+ described_class.add_member(source, user, :maintainer)
- expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
end
- end
- context 'when current_user can update member', :enable_admin_mode do
- it 'updates the member' do
- expect(source.users).to include(user)
+ context 'when current_user can update member', :enable_admin_mode do
+ it 'updates the member' do
+ expect(source.users).to include(user)
- described_class.add_user(source, user, :maintainer, current_user: admin)
+ described_class.add_member(source, user, :maintainer, current_user: admin)
- expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
end
- end
- context 'when current_user cannot update member' do
- it 'does not update the member' do
- expect(source.users).to include(user)
+ context 'when current_user cannot update member' do
+ it 'does not update the member' do
+ expect(source.users).to include(user)
+
+ described_class.add_member(source, user, :maintainer, current_user: user)
+
+ expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::DEVELOPER)
+ end
+ end
+ end
- described_class.add_user(source, user, :maintainer, current_user: user)
+ context 'when member is an invite by email' do
+ let_it_be(:email) { 'user@email.com' }
+ let_it_be(:existing_member) { source.add_developer(email) }
- expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::DEVELOPER)
+ it 'updates the member for that email' do
+ expect do
+ described_class.add_member(source, email, :maintainer)
+ end.to change { existing_member.reset.access_level }.from(Member::DEVELOPER).to(Member::MAINTAINER)
+ .and not_change { source.members.invite.count }
end
end
end
@@ -345,12 +359,12 @@ RSpec.shared_examples_for "bulk member creation" do
# maintainers cannot add owners
source.add_maintainer(user)
- expect(described_class.add_users(source, [user1, user2], :owner, current_user: user)).to be_empty
+ expect(described_class.add_members(source, [user1, user2], :owner, current_user: user)).to be_empty
end
end
it 'returns Member objects' do
- members = described_class.add_users(source, [user1, user2], :maintainer)
+ members = described_class.add_members(source, [user1, user2], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1, user2)
expect(members).to all(be_a(member_type))
@@ -358,7 +372,7 @@ RSpec.shared_examples_for "bulk member creation" do
end
it 'returns an empty array' do
- members = described_class.add_users(source, [], :maintainer)
+ members = described_class.add_members(source, [], :maintainer)
expect(members).to be_a Array
expect(members).to be_empty
@@ -367,7 +381,7 @@ RSpec.shared_examples_for "bulk member creation" do
it 'supports different formats' do
list = ['joe@local.test', admin, user1.id, user2.id.to_s]
- members = described_class.add_users(source, list, :maintainer)
+ members = described_class.add_members(source, list, :maintainer)
expect(members.size).to eq(4)
expect(members.first).to be_invite
@@ -375,7 +389,7 @@ RSpec.shared_examples_for "bulk member creation" do
context 'with de-duplication' do
it 'has the same user by id and user' do
- members = described_class.add_users(source, [user1.id, user1, user1.id, user2, user2.id, user2], :maintainer)
+ members = described_class.add_members(source, [user1.id, user1, user1.id, user2, user2.id, user2], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1, user2)
expect(members).to all(be_a(member_type))
@@ -383,7 +397,7 @@ RSpec.shared_examples_for "bulk member creation" do
end
it 'has the same user sent more than once' do
- members = described_class.add_users(source, [user1, user1], :maintainer)
+ members = described_class.add_members(source, [user1, user1], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1)
expect(members).to all(be_a(member_type))
@@ -392,7 +406,7 @@ RSpec.shared_examples_for "bulk member creation" do
end
it 'with the same user sent more than once by user and by email' do
- members = described_class.add_users(source, [user1, user1.email], :maintainer)
+ members = described_class.add_members(source, [user1, user1.email], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1)
expect(members).to all(be_a(member_type))
@@ -400,7 +414,7 @@ RSpec.shared_examples_for "bulk member creation" do
end
it 'with the same user sent more than once by user id and by email' do
- members = described_class.add_users(source, [user1.id, user1.email], :maintainer)
+ members = described_class.add_members(source, [user1.id, user1.email], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1)
expect(members).to all(be_a(member_type))
@@ -409,12 +423,12 @@ RSpec.shared_examples_for "bulk member creation" do
context 'when a member already exists' do
before do
- source.add_user(user1, :developer)
+ source.add_member(user1, :developer)
end
it 'has the same user sent more than once with the member already existing' do
expect do
- members = described_class.add_users(source, [user1, user1, user2], :maintainer)
+ members = described_class.add_members(source, [user1, user1, user2], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1, user2)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
@@ -425,7 +439,7 @@ RSpec.shared_examples_for "bulk member creation" do
user3 = create(:user)
expect do
- members = described_class.add_users(source, [user1.id, user2, user3.id], :maintainer)
+ members = described_class.add_members(source, [user1.id, user2, user3.id], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
@@ -436,7 +450,7 @@ RSpec.shared_examples_for "bulk member creation" do
user3 = create(:user)
expect do
- members = described_class.add_users(source, [user1, user2, user3], :maintainer)
+ members = described_class.add_members(source, [user1, user2, user3], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
@@ -448,7 +462,7 @@ RSpec.shared_examples_for "bulk member creation" do
let(:task_project) { source.is_a?(Group) ? create(:project, group: source) : source }
it 'creates a member_task with the correct attributes', :aggregate_failures do
- members = described_class.add_users(source, [user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id)
+ members = described_class.add_members(source, [user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id)
member = members.last
expect(member.tasks_to_be_done).to match_array([:ci, :code])
@@ -457,7 +471,7 @@ RSpec.shared_examples_for "bulk member creation" do
context 'with an already existing member' do
before do
- source.add_user(user1, :developer)
+ source.add_member(user1, :developer)
end
it 'does not update tasks to be done if tasks already exist', :aggregate_failures do
@@ -465,7 +479,7 @@ RSpec.shared_examples_for "bulk member creation" do
create(:member_task, member: member, project: task_project, tasks_to_be_done: %w(code ci))
expect do
- described_class.add_users(source,
+ described_class.add_members(source,
[user1.id],
:developer,
tasks_to_be_done: %w(issues),
@@ -479,7 +493,7 @@ RSpec.shared_examples_for "bulk member creation" do
it 'adds tasks to be done if they do not exist', :aggregate_failures do
expect do
- described_class.add_users(source,
+ described_class.add_members(source,
[user1.id],
:developer,
tasks_to_be_done: %w(issues),
diff --git a/spec/support/shared_examples/models/mentionable_shared_examples.rb b/spec/support/shared_examples/models/mentionable_shared_examples.rb
index e23658d1774..f9612dd61be 100644
--- a/spec/support/shared_examples/models/mentionable_shared_examples.rb
+++ b/spec/support/shared_examples/models/mentionable_shared_examples.rb
@@ -260,6 +260,25 @@ RSpec.shared_examples 'mentions in notes' do |mentionable_type|
expect(mentionable.referenced_projects(user)).to eq [mentionable.project].compact # epic.project is nil, and we want empty []
expect(mentionable.referenced_groups(user)).to eq [group]
end
+
+ if [:epic, :issue].include?(mentionable_type)
+ context 'and note is confidential' do
+ let_it_be(:guest) { create(:user) }
+
+ let(:note_desc) { "#{guest.to_reference} and #{user2.to_reference} and #{user.to_reference}" }
+
+ before do
+ note.resource_parent.add_reporter(user2)
+ note.resource_parent.add_guest(guest)
+ # Bypass :confidential update model validation for testing purposes
+ note.update_attribute(:confidential, true)
+ end
+
+ it 'returns only mentioned users that has permissions' do
+ expect(note.mentioned_users).to contain_exactly(user, user2)
+ end
+ end
+ end
end
end
@@ -294,6 +313,26 @@ RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
end
end
+ if [:epic, :issue].include?(mentionable_type)
+ context 'and note is confidential' do
+ let_it_be(:guest) { create(:user) }
+
+ let(:note_desc) { "#{guest.to_reference} and #{mentioned_user.to_reference}" }
+
+ before do
+ note.resource_parent.add_reporter(mentioned_user)
+ note.resource_parent.add_guest(guest)
+ # Bypass :confidential update model validation for testing purposes
+ note.update_attribute(:confidential, true)
+ note.store_mentions!
+ end
+
+ it 'stores only mentioned users that has permissions' do
+ expect(mentionable.referenced_users).to contain_exactly(mentioned_user)
+ end
+ end
+ end
+
context 'when private projects and groups are mentioned' do
let(:mega_user) { create(:user) }
let(:private_project) { create(:project, :private) }
diff --git a/spec/support/shared_examples/quick_actions/issue/clone_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/clone_quick_action_shared_examples.rb
index ab04692616a..d42e925ed22 100644
--- a/spec/support/shared_examples/quick_actions/issue/clone_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/clone_quick_action_shared_examples.rb
@@ -89,10 +89,13 @@ RSpec.shared_examples 'clone quick action' do
let(:bug) { create(:label, project: project, title: 'bug') }
let(:wontfix) { create(:label, project: project, title: 'wontfix') }
- let!(:target_milestone) { create(:milestone, title: '1.0', project: target_project) }
-
before do
target_project.add_maintainer(user)
+
+ # create equivalent labels and milestones in the target project
+ create(:label, project: target_project, title: 'bug')
+ create(:label, project: target_project, title: 'wontfix')
+ create(:milestone, title: '1.0', project: target_project)
end
shared_examples 'applies the commands to issues in both projects, target and source' do
diff --git a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
index e6b0772aec1..bb2f8965294 100644
--- a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
RSpec.shared_examples 'conan ping endpoint' do
+ it_behaves_like 'conan FIPS mode' do
+ subject { get api(url) }
+ end
+
it 'responds with 200 OK when no token provided' do
get api(url)
@@ -68,7 +72,7 @@ RSpec.shared_examples 'conan search endpoint' do
project.update!(visibility: 'private')
project.team.truncate
user.project_authorizations.delete_all
- project.add_user(user, role) unless role == :anonymous
+ project.add_member(user, role) unless role == :anonymous
get api(url), params: params, headers: headers
end
@@ -85,6 +89,8 @@ end
RSpec.shared_examples 'conan authenticate endpoint' do
subject { get api(url), headers: headers }
+ it_behaves_like 'conan FIPS mode'
+
context 'when using invalid token' do
let(:auth_token) { 'invalid_token' }
@@ -159,6 +165,10 @@ RSpec.shared_examples 'conan authenticate endpoint' do
end
RSpec.shared_examples 'conan check_credentials endpoint' do
+ it_behaves_like 'conan FIPS mode' do
+ subject { get api(url), headers: headers }
+ end
+
it 'responds with a 200 OK with PAT' do
get api(url), headers: headers
@@ -390,6 +400,7 @@ end
RSpec.shared_examples 'recipe snapshot endpoint' do
subject { get api(url), headers: headers }
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects recipe for invalid project'
it_behaves_like 'empty recipe for not found package'
@@ -415,6 +426,7 @@ end
RSpec.shared_examples 'package snapshot endpoint' do
subject { get api(url), headers: headers }
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects recipe for invalid project'
it_behaves_like 'empty recipe for not found package'
@@ -436,6 +448,10 @@ RSpec.shared_examples 'package snapshot endpoint' do
end
RSpec.shared_examples 'recipe download_urls endpoint' do
+ it_behaves_like 'conan FIPS mode' do
+ let(:recipe_path) { package.conan_recipe_path }
+ end
+
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects recipe for invalid project'
it_behaves_like 'recipe download_urls'
@@ -443,6 +459,10 @@ RSpec.shared_examples 'recipe download_urls endpoint' do
end
RSpec.shared_examples 'package download_urls endpoint' do
+ it_behaves_like 'conan FIPS mode' do
+ let(:recipe_path) { package.conan_recipe_path }
+ end
+
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects recipe for invalid project'
it_behaves_like 'package download_urls'
@@ -457,6 +477,7 @@ RSpec.shared_examples 'recipe upload_urls endpoint' do
'conanmanifest.txt': 123 }
end
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects invalid upload_url params'
it_behaves_like 'handling empty values for username and channel'
@@ -519,6 +540,7 @@ RSpec.shared_examples 'package upload_urls endpoint' do
'conan_package.tgz': 523 }
end
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects invalid upload_url params'
it_behaves_like 'handling empty values for username and channel'
@@ -556,6 +578,7 @@ end
RSpec.shared_examples 'delete package endpoint' do
let(:recipe_path) { package.conan_recipe_path }
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'handling empty values for username and channel'
@@ -665,6 +688,7 @@ RSpec.shared_examples 'not found request' do
end
RSpec.shared_examples 'recipe file download endpoint' do
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'a public project with packages'
it_behaves_like 'an internal project with packages'
it_behaves_like 'a private project with packages'
@@ -672,6 +696,7 @@ RSpec.shared_examples 'recipe file download endpoint' do
end
RSpec.shared_examples 'package file download endpoint' do
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'a public project with packages'
it_behaves_like 'an internal project with packages'
it_behaves_like 'a private project with packages'
@@ -697,6 +722,7 @@ RSpec.shared_examples 'project not found by project id' do
end
RSpec.shared_examples 'workhorse authorize endpoint' do
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects invalid file_name', 'conanfile.py.git%2fgit-upload-pack'
it_behaves_like 'workhorse authorization'
@@ -718,6 +744,7 @@ RSpec.shared_examples 'workhorse recipe file upload endpoint' do
)
end
+ it_behaves_like 'conan FIPS mode'
it_behaves_like 'rejects invalid recipe'
it_behaves_like 'rejects invalid file_name', 'conanfile.py.git%2fgit-upload-pack'
it_behaves_like 'uploads a package file'
@@ -979,3 +1006,9 @@ RSpec.shared_examples 'workhorse authorization' do
end
end
end
+
+RSpec.shared_examples 'conan FIPS mode' do
+ context 'when FIPS mode is enabled', :fips_mode do
+ it_behaves_like 'returning response status', :not_found
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/debian_common_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_common_shared_examples.rb
index e0225070986..2ba42b8e8fa 100644
--- a/spec/support/shared_examples/requests/api/debian_common_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_common_shared_examples.rb
@@ -15,3 +15,9 @@ RSpec.shared_examples 'rejects Debian access with unknown container id' do |anon
end
end
end
+
+RSpec.shared_examples 'Debian API FIPS mode' do
+ context 'when FIPS mode is enabled', :fips_mode do
+ it_behaves_like 'returning response status', :not_found
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/debian_distributions_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_distributions_shared_examples.rb
index 5cd63c33936..f13ac05591c 100644
--- a/spec/support/shared_examples/requests/api/debian_distributions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_distributions_shared_examples.rb
@@ -3,6 +3,8 @@
RSpec.shared_examples 'Debian distributions GET request' do |status, body = nil|
and_body = body.nil? ? '' : ' and expected body'
+ it_behaves_like 'Debian API FIPS mode'
+
it "returns #{status}#{and_body}" do
subject
@@ -17,6 +19,8 @@ end
RSpec.shared_examples 'Debian distributions PUT request' do |status, body|
and_body = body.nil? ? '' : ' and expected body'
+ it_behaves_like 'Debian API FIPS mode'
+
if status == :success
it 'updates distribution', :aggregate_failures do
expect(::Packages::Debian::UpdateDistributionService).to receive(:new).with(distribution, api_params.except(:codename)).and_call_original
@@ -49,6 +53,8 @@ end
RSpec.shared_examples 'Debian distributions DELETE request' do |status, body|
and_body = body.nil? ? '' : ' and expected body'
+ it_behaves_like 'Debian API FIPS mode'
+
if status == :success
it 'updates distribution', :aggregate_failures do
expect { subject }
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index 9f96cb2a164..de7032450a5 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -3,6 +3,8 @@
RSpec.shared_examples 'Debian packages GET request' do |status, body = nil|
and_body = body.nil? ? '' : ' and expected body'
+ it_behaves_like 'Debian API FIPS mode'
+
it "returns #{status}#{and_body}" do
subject
@@ -17,6 +19,8 @@ end
RSpec.shared_examples 'Debian packages upload request' do |status, body = nil|
and_body = body.nil? ? '' : ' and expected body'
+ it_behaves_like 'Debian API FIPS mode'
+
if status == :created
it 'creates package files', :aggregate_failures do
expect(::Packages::Debian::FindOrCreateIncomingService).to receive(:new).with(container, user).and_call_original
diff --git a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
index e534a02e562..8ab820e9d43 100644
--- a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
@@ -64,7 +64,8 @@ RSpec.shared_examples 'group and project boards query' do
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
- let(:sort_param) { }
+ include_context 'no sort argument'
+
let(:first_param) { 2 }
def pagination_results_data(nodes)
diff --git a/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb
index a42a1fda62e..b459e479c91 100644
--- a/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb
@@ -22,7 +22,7 @@ RSpec.shared_examples 'snippet edit usage data counters' do
context 'when user is not sessionless', :clean_gitlab_redis_sessions do
before do
- stub_session('warden.user.user.key' => [[current_user.id], current_user.encrypted_password[0, 29]])
+ stub_session('warden.user.user.key' => [[current_user.id], current_user.authenticatable_salt])
end
it 'tracks usage data actions', :clean_gitlab_redis_sessions do
diff --git a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
new file mode 100644
index 00000000000..013945bd578
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
@@ -0,0 +1,415 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'web-hook API endpoints test hook' do |prefix|
+ describe "POST #{prefix}/:hook_id" do
+ it 'tests the hook' do
+ expect(WebHookService)
+ .to receive(:new).with(hook, anything, String, force: false)
+ .and_return(instance_double(WebHookService, execute: nil))
+
+ post api(hook_uri, user)
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+end
+
+RSpec.shared_examples 'web-hook API endpoints with branch-filter' do |prefix|
+ describe "POST #{prefix}/hooks" do
+ it "returns a 422 error if branch filter is not valid" do
+ post api(collection_uri, user),
+ params: { url: "http://example.com", push_events_branch_filter: '~badbranchname/' }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+end
+
+RSpec.shared_examples 'web-hook API endpoints' do |prefix|
+ def hooks_count
+ scope.count
+ end
+
+ def hook_param_overrides
+ if defined?(super)
+ super
+ else
+ { push_events_branch_filter: 'some-feature-branch' }
+ end
+ end
+
+ let(:hook_params) do
+ event_names.to_h { [_1, true] }.merge(hook_param_overrides).merge(
+ url: "http://example.com",
+ url_variables: [
+ { key: 'token', value: 'very-secret' },
+ { key: 'abc', value: 'other value' }
+ ]
+ )
+ end
+
+ let(:update_params) do
+ {
+ push_events: false,
+ job_events: true,
+ push_events_branch_filter: 'updated-branch-filter'
+ }
+ end
+
+ let(:default_values) { {} }
+
+ describe "GET #{prefix}/hooks" do
+ context "authorized user" do
+ it "returns all hooks" do
+ get api(collection_uri, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_collection_schema
+ end
+ end
+
+ context "when user is forbidden" do
+ it "prevents access to hooks" do
+ get api(collection_uri, unauthorized_user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context "when user is unauthorized" do
+ it "prevents access to hooks" do
+ get api(collection_uri, nil)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'the hook has URL variables' do
+ before do
+ hook.update!(url_variables: { 'token' => 'supers3cret' })
+ end
+
+ it 'returns the names of the url variables' do
+ get api(collection_uri, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to contain_exactly(
+ a_hash_including(
+ 'url_variables' => [{ 'key' => 'token' }]
+ )
+ )
+ end
+ end
+ end
+
+ describe "GET #{prefix}/hooks/:hook_id" do
+ context "authorized user" do
+ it "returns a project hook" do
+ get api(hook_uri, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_hook_schema
+
+ expect(json_response['url']).to eq(hook.url)
+ end
+
+ it "returns a 404 error if hook id is not available" do
+ get api(hook_uri(non_existing_record_id), user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'the hook is disabled' do
+ before do
+ hook.disable!
+ end
+
+ it "has the correct alert status", :aggregate_failures do
+ get api(hook_uri, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to include('alert_status' => 'disabled')
+ end
+ end
+
+ context 'the hook is backed-off' do
+ before do
+ hook.backoff!
+ end
+
+ it "has the correct alert status", :aggregate_failures do
+ get api(hook_uri, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to include(
+ 'alert_status' => 'temporarily_disabled',
+ 'disabled_until' => hook.disabled_until.iso8601(3)
+ )
+ end
+ end
+ end
+
+ context "when user is forbidden" do
+ it "does not access an existing hook" do
+ get api(hook_uri, unauthorized_user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context "when user is unauthorized" do
+ it "does not access an existing hook" do
+ get api(hook_uri, nil)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ describe "POST #{prefix}/hooks" do
+ let(:hook_creation_params) { hook_params }
+
+ it "adds hook", :aggregate_failures do
+ expect do
+ post api(collection_uri, user),
+ params: hook_creation_params
+ end.to change { hooks_count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_hook_schema
+
+ expect(json_response['url']).to eq(hook_creation_params[:url])
+ hook_param_overrides.each do |k, v|
+ expect(json_response[k.to_s]).to eq(v)
+ end
+ event_names.each do |name|
+ expect(json_response[name.to_s]).to eq(true), name
+ end
+ expect(json_response['url_variables']).to match_array [
+ { 'key' => 'token' },
+ { 'key' => 'abc' }
+ ]
+ expect(json_response).not_to include('token')
+ end
+
+ it "adds the token without including it in the response" do
+ token = "secret token"
+
+ expect do
+ post api(collection_uri, user),
+ params: { url: "http://example.com", token: token }
+ end.to change { hooks_count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response["url"]).to eq("http://example.com")
+ expect(json_response).not_to include("token")
+
+ hook = scope.find(json_response["id"])
+
+ expect(hook.url).to eq("http://example.com")
+ expect(hook.token).to eq(token)
+ end
+
+ it "returns a 400 error if url not given" do
+ post api(collection_uri, user), params: { event_names.first => true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it "returns a 400 error if no parameters are provided" do
+ post api(collection_uri, user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'sets default values for events', :aggregate_failures do
+ post api(collection_uri, user), params: { url: 'http://mep.mep' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_hook_schema
+ expect(json_response['enable_ssl_verification']).to be true
+ event_names.each do |name|
+ expect(json_response[name.to_s]).to eq(default_values.fetch(name, false)), name
+ end
+ end
+
+ it "returns a 422 error if token not valid" do
+ post api(collection_uri, user),
+ params: { url: "http://example.com", token: "foo\nbar" }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+
+ it "returns a 422 error if url not valid" do
+ post api(collection_uri, user), params: { url: "ftp://example.com" }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ describe "PUT #{prefix}/hooks/:hook_id" do
+ it "updates an existing hook" do
+ put api(hook_uri, user), params: update_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_hook_schema
+
+ update_params.each do |k, v|
+ expect(json_response[k.to_s]).to eq(v)
+ end
+ end
+
+ it 'updates the URL variables' do
+ hook.update!(url_variables: { 'abc' => 'some value' })
+
+ put api(hook_uri, user),
+ params: { url_variables: [{ key: 'def', value: 'other value' }] }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['url_variables']).to match_array [
+ { 'key' => 'abc' },
+ { 'key' => 'def' }
+ ]
+ end
+
+ it "adds the token without including it in the response" do
+ token = "secret token"
+
+ put api(hook_uri, user), params: { url: "http://example.org", token: token }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["url"]).to eq("http://example.org")
+ expect(json_response).not_to include("token")
+
+ expect(hook.reload.url).to eq("http://example.org")
+ expect(hook.reload.token).to eq(token)
+ end
+
+ it "returns 404 error if hook id not found" do
+ put api(hook_uri(non_existing_record_id), user), params: { url: 'http://example.org' }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it "returns 400 error if no parameters are provided" do
+ put api(hook_uri, user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it "returns a 422 error if url is not valid" do
+ put api(hook_uri, user), params: { url: 'ftp://example.com' }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+
+ it "returns a 422 error if token is not valid" do
+ put api(hook_uri, user), params: { token: %w[foo bar].join("\n") }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ describe "DELETE /projects/:id/hooks/:hook_id" do
+ it "deletes hook from project" do
+ expect do
+ delete api(hook_uri, user)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { hooks_count }.by(-1)
+ end
+
+ it "returns a 404 error when deleting non existent hook" do
+ delete api(hook_uri(non_existing_record_id), user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it "returns a 404 error if hook id not given" do
+ delete api(collection_uri, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it "returns forbidden if a user attempts to delete hooks they do not own" do
+ delete api(hook_uri, unauthorized_user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(WebHook.exists?(hook.id)).to be_truthy
+ end
+
+ it_behaves_like '412 response' do
+ let(:request) { api(hook_uri, user) }
+ end
+ end
+
+ describe "PUT #{prefix}/hooks/:hook_id/url_variables/:key", :aggregate_failures do
+ it 'sets the variable' do
+ expect do
+ put api("#{hook_uri}/url_variables/abc", user),
+ params: { value: 'some secret value' }
+ end.to change { hook.reload.url_variables }.to(eq('abc' => 'some secret value'))
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it 'overwrites existing values' do
+ hook.update!(url_variables: { 'abc' => 'xyz', 'def' => 'other value' })
+
+ put api("#{hook_uri}/url_variables/abc", user),
+ params: { value: 'some secret value' }
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(hook.reload.url_variables).to eq('abc' => 'some secret value', 'def' => 'other value')
+ end
+
+ it "returns a 404 error when editing non existent hook" do
+ put api("#{hook_uri(non_existing_record_id)}/url_variables/abc", user),
+ params: { value: 'xyz' }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it "returns a 422 error when the key is illegal" do
+ put api("#{hook_uri}/url_variables/abc%20def", user),
+ params: { value: 'xyz' }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+
+ it "returns a 422 error when the value is illegal" do
+ put api("#{hook_uri}/url_variables/abc", user),
+ params: { value: '' }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ describe "DELETE #{prefix}/hooks/:hook_id/url_variables/:key", :aggregate_failures do
+ before do
+ hook.update!(url_variables: { 'abc' => 'prior value', 'def' => 'other value' })
+ end
+
+ it 'unsets the variable' do
+ expect do
+ delete api("#{hook_uri}/url_variables/abc", user)
+ end.to change { hook.reload.url_variables }.to(eq({ 'def' => 'other value' }))
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it 'returns 404 for keys that do not exist' do
+ hook.update!(url_variables: { 'def' => 'other value' })
+
+ delete api("#{hook_uri}/url_variables/abc", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it "returns a 404 error when deleting a variable from a non existent hook" do
+ delete api(hook_uri(non_existing_record_id) + "/url_variables/abc", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/notes_shared_examples.rb b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
index e7e30665b08..a59235486ec 100644
--- a/spec/support/shared_examples/requests/api/notes_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
@@ -275,7 +275,9 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
context 'when request exceeds the rate limit', :freeze_time, :clean_gitlab_redis_rate_limiting do
before do
stub_application_setting(notes_create_limit: 1)
- allow(::Gitlab::ApplicationRateLimiter).to receive(:increment).and_return(2)
+ allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
+ allow(strategy).to receive(:increment).and_return(2)
+ end
end
it 'prevents user from creating more notes' do
diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
index 795545e4ad1..1a248bb04e7 100644
--- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'PyPI package creation' do |user_type, status, add_member = true|
+RSpec.shared_examples 'PyPI package creation' do |user_type, status, add_member = true, md5_digest = true|
RSpec.shared_examples 'creating pypi package files' do
it 'creates package files' do
expect { subject }
@@ -14,6 +14,17 @@ RSpec.shared_examples 'PyPI package creation' do |user_type, status, add_member
expect(package.name).to eq params[:name]
expect(package.version).to eq params[:version]
expect(package.pypi_metadatum.required_python).to eq params[:requires_python]
+
+ if md5_digest
+ expect(package.package_files.first.file_md5).not_to be_nil
+ else
+ expect(package.package_files.first.file_md5).to be_nil
+ end
+ end
+
+ context 'with FIPS mode', :fips_mode do
+ it_behaves_like 'returning response status', :unprocessable_entity if md5_digest
+ it_behaves_like 'returning response status', status unless md5_digest
end
end
diff --git a/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
index 70cc9b1e6b5..544a0ed8fdd 100644
--- a/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
@@ -52,6 +52,24 @@ RSpec.shared_examples 'an unimplemented route' do
it_behaves_like 'when package feature is disabled'
end
+RSpec.shared_examples 'redirects to version download' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returns a valid response' do
+ subject
+
+ expect(request.url).to include 'module-1/system/download'
+ expect(response.headers).to include 'Location'
+ expect(response.headers['Location']).to include 'module-1/system/1.0.1/download'
+ end
+ end
+end
+
RSpec.shared_examples 'grants terraform module download' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
@@ -84,6 +102,22 @@ RSpec.shared_examples 'returns terraform module packages' do |user_type, status,
end
end
+RSpec.shared_examples 'returns terraform module version' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returning a valid response' do
+ subject
+
+ expect(json_response).to match_schema('public_api/v4/packages/terraform/modules/v1/single_version')
+ end
+ end
+end
+
RSpec.shared_examples 'returns no terraform module packages' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
index 86e7da5bcbe..f8e096297d3 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
@@ -56,7 +56,7 @@ RSpec.shared_examples 'processes recovery alert' do
context 'seen for the first time' do
let(:alert) { AlertManagement::Alert.last }
- include_examples 'processes never-before-seen recovery alert'
+ it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
end
context 'for an existing alert with the same fingerprint' do
@@ -107,7 +107,7 @@ RSpec.shared_examples 'processes recovery alert' do
context 'which is resolved' do
let_it_be(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: gitlab_fingerprint, monitoring_tool: source) }
- include_examples 'processes never-before-seen recovery alert'
+ it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
end
end
end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
index 132f1e0422e..3add5485fca 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
@@ -6,18 +6,24 @@
# - `alert`, alert for which related incidents should be closed
# - `project`, project of the alert
RSpec.shared_examples 'closes related incident if enabled' do
- context 'with issue' do
+ context 'with incident' do
before do
- alert.update!(issue: create(:issue, project: project))
+ alert.update!(issue: create(:incident, project: project))
end
- it { expect { subject }.to change { alert.issue.reload.closed? }.from(false).to(true) }
- it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
+ specify do
+ expect { Sidekiq::Testing.inline! { subject } }
+ .to change { alert.issue.reload.closed? }.from(false).to(true)
+ .and change(ResourceStateEvent, :count).by(1)
+ end
end
- context 'without issue' do
- it { expect { subject }.not_to change { alert.reload.issue } }
- it { expect { subject }.not_to change(ResourceStateEvent, :count) }
+ context 'without incident' do
+ specify do
+ expect(::IncidentManagement::CloseIncidentWorker).not_to receive(:perform_async)
+
+ subject
+ end
end
context 'with incident setting disabled' do
@@ -28,17 +34,23 @@ RSpec.shared_examples 'closes related incident if enabled' do
end
RSpec.shared_examples 'does not close related incident' do
- context 'with issue' do
+ context 'with incident' do
before do
- alert.update!(issue: create(:issue, project: project))
+ alert.update!(issue: create(:incident, project: project))
end
- it { expect { subject }.not_to change { alert.issue.reload.state } }
- it { expect { subject }.not_to change(ResourceStateEvent, :count) }
+ specify do
+ expect { Sidekiq::Testing.inline! { subject } }
+ .to not_change { alert.issue.reload.state }
+ .and not_change(ResourceStateEvent, :count)
+ end
end
- context 'without issue' do
- it { expect { subject }.not_to change { alert.reload.issue } }
- it { expect { subject }.not_to change(ResourceStateEvent, :count) }
+ context 'without incident' do
+ specify do
+ expect(::IncidentManagement::CloseIncidentWorker).not_to receive(:perform_async)
+
+ subject
+ end
end
end
diff --git a/spec/support/shared_examples/services/alert_management_shared_examples.rb b/spec/support/shared_examples/services/alert_management_shared_examples.rb
index f644f1a1687..571cb7dc03d 100644
--- a/spec/support/shared_examples/services/alert_management_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management_shared_examples.rb
@@ -68,14 +68,14 @@ RSpec.shared_examples 'processes one firing and one resolved prometheus alerts'
expect(Gitlab::AppLogger).not_to receive(:warn)
expect { subject }
- .to change(AlertManagement::Alert, :count).by(2)
- .and change(Note, :count).by(4)
+ .to change(AlertManagement::Alert, :count).by(1)
+ .and change(Note, :count).by(1)
expect(subject).to be_success
expect(subject.payload[:alerts]).to all(be_a_kind_of(AlertManagement::Alert))
- expect(subject.payload[:alerts].size).to eq(2)
+ expect(subject.payload[:alerts].size).to eq(1)
end
it_behaves_like 'processes incident issues'
- it_behaves_like 'sends alert notification emails', count: 2
+ it_behaves_like 'sends alert notification emails'
end
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index f18869fb380..3be59af6a37 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
RSpec.shared_context 'container registry auth service context' do
+ let_it_be(:rsa_key) { OpenSSL::PKey::RSA.generate(3072) }
+
let(:current_project) { nil }
let(:current_user) { nil }
let(:current_params) { {} }
- let(:rsa_key) { OpenSSL::PKey::RSA.generate(512) }
let(:payload) { JWT.decode(subject[:token], rsa_key, true, { algorithm: 'RS256' }).first }
let(:authentication_abilities) do
diff --git a/spec/support/shared_examples/services/feature_flags/client_shared_examples.rb b/spec/support/shared_examples/services/feature_flags/client_shared_examples.rb
new file mode 100644
index 00000000000..a62cffc0e1b
--- /dev/null
+++ b/spec/support/shared_examples/services/feature_flags/client_shared_examples.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+shared_examples_for 'update feature flag client' do
+ let!(:client) { create(:operations_feature_flags_client, project: project) }
+
+ it 'updates last feature flag updated at' do
+ freeze_time do
+ expect { subject }.to change { client.reload.last_feature_flag_updated_at }.from(nil).to(Time.current)
+ end
+ end
+end
+
+shared_examples_for 'does not update feature flag client' do
+ let!(:client) { create(:operations_feature_flags_client, project: project) }
+
+ it 'does not update last feature flag updated at' do
+ expect { subject }.not_to change { client.reload.last_feature_flag_updated_at }
+ end
+end
diff --git a/spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb b/spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb
new file mode 100644
index 00000000000..4655585a092
--- /dev/null
+++ b/spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'counter that does not track the event' do
+ it 'does not track the event' do
+ expect { 3.times { track_event } }.to not_change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: event_name,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }
+ end
+end
+
+RSpec.shared_examples 'work item unique counter' do
+ context 'when track_work_items_activity FF is enabled' do
+ it 'tracks a unique event only once' do
+ expect { 3.times { track_event } }.to change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: event_name,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }.by(1)
+ end
+
+ context 'when author is nil' do
+ let(:user) { nil }
+
+ it_behaves_like 'counter that does not track the event'
+ end
+ end
+
+ context 'when track_work_items_activity FF is disabled' do
+ before do
+ stub_feature_flags(track_work_items_activity: false)
+ end
+
+ it_behaves_like 'counter that does not track the event'
+ end
+end
diff --git a/spec/support/shared_examples/views/themed_layout_examples.rb b/spec/support/shared_examples/views/themed_layout_examples.rb
new file mode 100644
index 00000000000..b6c53dce4cb
--- /dev/null
+++ b/spec/support/shared_examples/views/themed_layout_examples.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples "a layout which reflects the application theme setting", :themed_layout do
+ context 'as a themed layout' do
+ let(:default_theme_class) { ::Gitlab::Themes.default.css_class }
+
+ context 'when no theme is explicitly selected' do
+ it 'renders with the default theme' do
+ render
+
+ expect(rendered).to have_selector("body.#{default_theme_class}")
+ end
+ end
+
+ context 'when user is authenticated & has selected a specific theme' do
+ before do
+ allow(view).to receive(:user_application_theme).and_return(chosen_theme.css_class)
+ end
+
+ where(chosen_theme: ::Gitlab::Themes.available_themes)
+
+ with_them do
+ it "renders with the #{params[:chosen_theme].name} theme" do
+ render
+
+ if chosen_theme.css_class != default_theme_class
+ expect(rendered).not_to have_selector("body.#{default_theme_class}")
+ end
+
+ expect(rendered).to have_selector("body.#{chosen_theme.css_class}")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb b/spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb
new file mode 100644
index 00000000000..491662d17d3
--- /dev/null
+++ b/spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples_for 'work item widgetable service' do
+ it 'executes callbacks for expected widgets' do
+ supported_widgets.each do |widget|
+ expect_next_instance_of(widget[:klass]) do |widget_instance|
+ expect(widget_instance).to receive(widget[:callback]).with(params: widget[:params])
+ end
+ end
+
+ service_execute
+ end
+end
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index 54962eac100..1da21633504 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -229,6 +229,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
describe 'executing an entire migration', :freeze_time, if: Gitlab::Database.has_config?(tracking_database) do
include Gitlab::Database::DynamicModelHelpers
+ include Database::DatabaseHelpers
let(:migration_class) do
Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
@@ -347,5 +348,20 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
it 'does not update non-matching records in the range' do
expect { full_migration_run }.not_to change { example_data.where('status <> 1 AND some_column <> 0').count }
end
+
+ context 'health status' do
+ subject(:migration_run) { described_class.new.perform }
+
+ it 'puts migration on hold when there is autovaccum activity on related tables' do
+ swapout_view_for_table(:postgres_autovacuum_activity, connection: connection)
+ create(
+ :postgres_autovacuum_activity,
+ table: migration.table_name,
+ table_identifier: "public.#{migration.table_name}"
+ )
+
+ expect { migration_run }.to change { migration.reload.on_hold? }.from(false).to(true)
+ end
+ end
end
end
diff --git a/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
index 77c4a3431e2..503e331ea2e 100644
--- a/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
+++ b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
@@ -1,10 +1,6 @@
# frozen_string_literal: true
-require 'fileutils'
-
RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
- include GitHelpers
-
let!(:lease_uuid) { SecureRandom.uuid }
let!(:lease_key) { "resource_housekeeping:#{resource.id}" }
let(:params) { [resource.id, task, lease_key, lease_uuid] }
@@ -246,39 +242,6 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
subject.perform(resource.id, 'prune', lease_key, lease_uuid)
end
-
- # Create a new commit on a random new branch
- def create_objects(resource)
- rugged = rugged_repo(resource.repository)
- old_commit = rugged.branches.first.target
- new_commit_sha = Rugged::Commit.create(
- rugged,
- message: "hello world #{SecureRandom.hex(6)}",
- author: { email: 'foo@bar', name: 'baz' },
- committer: { email: 'foo@bar', name: 'baz' },
- tree: old_commit.tree,
- parents: [old_commit]
- )
- rugged.references.create("refs/heads/#{SecureRandom.hex(6)}", new_commit_sha)
- end
-
- def packs(resource)
- Dir["#{path_to_repo}/objects/pack/*.pack"]
- end
-
- def packed_refs(resource)
- path = File.join(path_to_repo, 'packed-refs')
- FileUtils.touch(path)
- File.read(path)
- end
-
- def path_to_repo
- @path_to_repo ||= File.join(TestEnv.repos_path, resource.repository.relative_path)
- end
-
- def bitmap_path(pack)
- pack.sub(/\.pack\z/, '.bitmap')
- end
end
context 'with bitmaps enabled' do
diff --git a/spec/support/snowplow.rb b/spec/support/snowplow.rb
deleted file mode 100644
index e58be667b37..00000000000
--- a/spec/support/snowplow.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'stub_snowplow'
-
-RSpec.configure do |config|
- config.include SnowplowHelpers, :snowplow
- config.include StubSnowplow, :snowplow
-
- config.before(:each, :snowplow) do
- stub_snowplow
- end
-
- config.after(:each, :snowplow) do
- Gitlab::Tracking.send(:snowplow).send(:tracker).flush
- end
-end
diff --git a/spec/support_specs/graphql/arguments_spec.rb b/spec/support_specs/graphql/arguments_spec.rb
index ffb58503a0e..925af1ab79c 100644
--- a/spec/support_specs/graphql/arguments_spec.rb
+++ b/spec/support_specs/graphql/arguments_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Graphql::Arguments do
float: 2.7,
string: %q[he said "no"],
enum: :OFF,
- null: nil, # we expect this to be omitted - absence is the same as explicit nullness
+ null: nil,
bool_true: true,
bool_false: false,
var: ::Graphql::Var.new('x', 'Int')
@@ -64,6 +64,7 @@ RSpec.describe Graphql::Arguments do
'int: 42, float: 2.7',
%q(string: "he said \\"no\\""),
'enum: OFF',
+ 'null: null',
'boolTrue: true, boolFalse: false',
'var: $x'
].join(', '))
diff --git a/spec/support_specs/helpers/graphql_helpers_spec.rb b/spec/support_specs/helpers/graphql_helpers_spec.rb
index f567097af6f..c02e4adf983 100644
--- a/spec/support_specs/helpers/graphql_helpers_spec.rb
+++ b/spec/support_specs/helpers/graphql_helpers_spec.rb
@@ -305,6 +305,7 @@ RSpec.describe GraphqlHelpers do
aFloat: 0.1,
aString: "wibble",
anEnum: LOW,
+ null: null,
aBool: false,
aVar: #{x.to_graphql_value}
EXP
diff --git a/spec/tasks/dev_rake_spec.rb b/spec/tasks/dev_rake_spec.rb
index fa093db414f..14a5ccfa323 100644
--- a/spec/tasks/dev_rake_spec.rb
+++ b/spec/tasks/dev_rake_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'dev rake tasks' do
subject(:setup_task) { run_rake_task('dev:setup') }
- let(:connections) { Gitlab::Database.database_base_models.values.map(&:connection) }
+ let(:connections) { Gitlab::Database.database_base_models_with_gitlab_shared.values.map(&:connection) }
it 'sets up the development environment', :aggregate_failures do
expect(Rake::Task['gitlab:setup']).to receive(:invoke)
@@ -50,8 +50,12 @@ RSpec.describe 'dev rake tasks' do
end
describe 'terminate_all_connections' do
+ before do
+ allow(ActiveRecord::Base).to receive(:clear_all_connections!)
+ end
+
let(:connections) do
- Gitlab::Database.database_base_models.values.filter_map do |model|
+ Gitlab::Database.database_base_models_with_gitlab_shared.values.filter_map do |model|
model.connection if Gitlab::Database.db_config_share_with(model.connection_db_config).nil?
end
end
@@ -75,6 +79,8 @@ RSpec.describe 'dev rake tasks' do
it 'terminates all connections' do
expect_connections_to_be_terminated
+ expect(ActiveRecord::Base).to receive(:clear_all_connections!)
+
terminate_task
end
@@ -82,6 +88,7 @@ RSpec.describe 'dev rake tasks' do
it 'does not terminate connections' do
expect(Rails.env).to receive(:production?).and_return(true)
expect_connections_not_to_be_terminated
+ expect(ActiveRecord::Base).not_to receive(:clear_all_connections!)
terminate_task
end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 4a3b81a072f..9e914f8202e 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -348,14 +348,16 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
project_a = create(:project, :repository)
project_snippet_a = create(:project_snippet, :repository, project: project_a, author: project_a.first_owner)
project_b = create(:project, :repository, repository_storage: second_storage_name)
- project_snippet_b = create(:project_snippet, :repository, project: project_b, author: project_b.first_owner)
- project_snippet_b.snippet_repository.update!(shard: project_b.project_repository.shard)
+ project_snippet_b = create(
+ :project_snippet,
+ :repository,
+ project: project_b,
+ author: project_b.first_owner,
+ repository_storage: second_storage_name
+ )
create(:wiki_page, container: project_a)
create(:design, :with_file, issue: create(:issue, project: project_a))
- move_repository_to_secondary(project_b)
- move_repository_to_secondary(project_snippet_b)
-
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
@@ -400,14 +402,16 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
project_a = create(:project, :repository)
project_snippet_a = create(:project_snippet, :repository, project: project_a, author: project_a.first_owner)
project_b = create(:project, :repository, repository_storage: second_storage_name)
- project_snippet_b = create(:project_snippet, :repository, project: project_b, author: project_b.first_owner)
- project_snippet_b.snippet_repository.update!(shard: project_b.project_repository.shard)
+ project_snippet_b = create(
+ :project_snippet,
+ :repository,
+ project: project_b,
+ author: project_b.first_owner,
+ repository_storage: second_storage_name
+ )
create(:wiki_page, container: project_a)
create(:design, :with_file, issue: create(:issue, project: project_a))
- move_repository_to_secondary(project_b)
- move_repository_to_secondary(project_snippet_b)
-
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
@@ -435,21 +439,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
end
-
- def move_repository_to_secondary(record)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- default_shard_legacy_path = Gitlab.config.repositories.storages.default.legacy_disk_path
- secondary_legacy_path = Gitlab.config.repositories.storages[second_storage_name].legacy_disk_path
- dst_dir = File.join(secondary_legacy_path, File.dirname(record.disk_path))
-
- FileUtils.mkdir_p(dst_dir) unless Dir.exist?(dst_dir)
-
- FileUtils.mv(
- File.join(default_shard_legacy_path, record.disk_path + '.git'),
- File.join(secondary_legacy_path, record.disk_path + '.git')
- )
- end
- end
end
context 'concurrency settings' do
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index d8199c09ca1..74bec406947 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
before do
skip_unless_ci_uses_database_tasks
- allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models)
end
it 'marks the migration complete on each database' do
@@ -90,7 +90,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
let(:base_models) { { 'main' => main_model } }
before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models)
end
it 'prints a warning message' do
@@ -110,7 +110,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
let(:base_models) { { 'main' => main_model } }
before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models)
end
it 'prints an error and exits' do
@@ -136,6 +136,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
context 'when geo is not configured' do
before do
allow(ActiveRecord::Base).to receive_message_chain('configurations.configs_for').and_return([main_config])
+ allow(Gitlab::Database).to receive(:has_config?).with(:geo).and_return(false)
end
context 'when the schema is already loaded' do
@@ -260,7 +261,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
before do
skip_unless_ci_uses_database_tasks
- allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models)
end
context 'when geo is not configured' do
@@ -444,7 +445,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
before do
skip_unless_ci_uses_database_tasks
- allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models)
allow(main_model.connection).to receive(:table_exists?).with('schema_migrations').and_return(true)
allow(ci_model.connection).to receive(:table_exists?).with('schema_migrations').and_return(true)
@@ -574,7 +575,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
before do
skip_if_multiple_databases_not_setup
- allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models)
end
it 'delegates to Gitlab::Database::Reindexing without a specific database' do
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index f48ca5b8f8c..2f52c0fd36c 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -229,7 +229,6 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'app/serializers/jira_connect/app_data_serializer.rb' | [:integrations_be, :backend]
'lib/api/github/entities.rb' | [:integrations_be, :backend]
'lib/api/v3/github.rb' | [:integrations_be, :backend]
- 'app/models/clusters/integrations/elastic_stack.rb' | [:backend]
'app/controllers/clusters/integrations_controller.rb' | [:backend]
'app/services/clusters/integrations/prometheus_health_check_service.rb' | [:backend]
'app/graphql/types/alert_management/integration_type.rb' | [:backend]
@@ -271,6 +270,8 @@ RSpec.describe Tooling::Danger::ProjectHelper do
[:integrations_be, :backend] | '+ Integrations::Foo' | ['app/foo/bar.rb']
[:integrations_be, :backend] | '+ project.execute_hooks(foo, :bar)' | ['ee/lib/ee/foo.rb']
[:integrations_be, :backend] | '+ project.execute_integrations(foo, :bar)' | ['app/foo.rb']
+ [:frontend, :product_intelligence] | '+ api.trackRedisCounterEvent("foo")' | ['app/assets/javascripts/telemetry.js', 'ee/app/assets/javascripts/mr_widget.vue']
+ [:frontend, :product_intelligence] | '+ api.trackRedisHllUserEvent("bar")' | ['app/assets/javascripts/telemetry.js', 'ee/app/assets/javascripts/mr_widget.vue']
end
with_them do
diff --git a/spec/views/admin/application_settings/general.html.haml_spec.rb b/spec/views/admin/application_settings/general.html.haml_spec.rb
index 3614090d3cb..c7d156cde39 100644
--- a/spec/views/admin/application_settings/general.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/general.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'admin/application_settings/general.html.haml' do
- let(:app_settings) { build(:application_setting) }
+ let(:app_settings) { Gitlab::CurrentSettings.current_application_settings }
let(:user) { create(:admin) }
before do
@@ -97,4 +97,33 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
expect(rendered).to match ' data-minimum-password-length='
end
end
+
+ describe 'error tracking integration' do
+ context 'with error tracking feature flag enabled' do
+ before do
+ stub_feature_flags(gitlab_error_tracking: true)
+
+ render
+ end
+
+ it 'expects error tracking settings to be available' do
+ expect(rendered).to have_field('application_setting_error_tracking_api_url')
+ end
+
+ it 'expects display token and reset token to be available' do
+ expect(rendered).to have_content(app_settings.error_tracking_access_token)
+ expect(rendered).to have_button('Reset error tracking access token')
+ end
+ end
+
+ context 'with error tracking feature flag disabled' do
+ it 'expects error tracking settings to not be avaiable' do
+ stub_feature_flags(gitlab_error_tracking: false)
+
+ render
+
+ expect(rendered).not_to have_field('application_setting_error_tracking_api_url')
+ end
+ end
+ end
end
diff --git a/spec/views/dashboard/projects/_blank_state_welcome.html.haml_spec.rb b/spec/views/dashboard/projects/_blank_state_welcome.html.haml_spec.rb
new file mode 100644
index 00000000000..edec46ad0a3
--- /dev/null
+++ b/spec/views/dashboard/projects/_blank_state_welcome.html.haml_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'dashboard/projects/_blank_state_welcome.html.haml' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ it 'has a doc_url' do
+ render
+
+ expect(rendered).to have_link(href: Gitlab::Saas.doc_url)
+ end
+end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index e8232a2c067..b3cd1493149 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -63,6 +63,36 @@ RSpec.describe 'devise/sessions/new' do
end
end
+ describe 'Google Tag Manager' do
+ let!(:gtm_id) { 'GTM-WWKMTWS'}
+
+ subject { rendered }
+
+ before do
+ stub_devise
+ disable_captcha
+ stub_config(extra: { google_tag_manager_id: gtm_id, google_tag_manager_nonce_id: gtm_id })
+ end
+
+ describe 'when Google Tag Manager is enabled' do
+ before do
+ enable_gtm
+ render
+ end
+
+ it { is_expected.to match /www.googletagmanager.com/ }
+ end
+
+ describe 'when Google Tag Manager is disabled' do
+ before do
+ disable_gtm
+ render
+ end
+
+ it { is_expected.not_to match /www.googletagmanager.com/ }
+ end
+ end
+
def disable_other_signin_methods
allow(view).to receive(:password_authentication_enabled_for_web?).and_return(false)
allow(view).to receive(:omniauth_enabled?).and_return(false)
@@ -94,4 +124,12 @@ RSpec.describe 'devise/sessions/new' do
allow(view).to receive(:captcha_enabled?).and_return(false)
allow(view).to receive(:captcha_on_login_required?).and_return(false)
end
+
+ def disable_gtm
+ allow(view).to receive(:google_tag_manager_enabled?).and_return(false)
+ end
+
+ def enable_gtm
+ allow(view).to receive(:google_tag_manager_enabled?).and_return(true)
+ end
end
diff --git a/spec/views/errors/omniauth_error.html.haml_spec.rb b/spec/views/errors/omniauth_error.html.haml_spec.rb
new file mode 100644
index 00000000000..e99cb536bd8
--- /dev/null
+++ b/spec/views/errors/omniauth_error.html.haml_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'errors/omniauth_error' do
+ let(:provider) { FFaker::Product.brand }
+ let(:error) { FFaker::Lorem.sentence }
+
+ before do
+ assign(:provider, provider)
+ assign(:error, error)
+ end
+
+ it 'renders template' do
+ render
+
+ expect(rendered).to have_content(provider)
+ expect(rendered).to have_content(_('Sign-in failed because %{error}.') % { error: error })
+ expect(rendered).to have_link('Sign in')
+ expect(rendered).to have_content(_('If none of the options work, try contacting a GitLab administrator.'))
+ end
+end
diff --git a/spec/views/groups/edit.html.haml_spec.rb b/spec/views/groups/edit.html.haml_spec.rb
index eaa909a5da0..ddcfea0ab10 100644
--- a/spec/views/groups/edit.html.haml_spec.rb
+++ b/spec/views/groups/edit.html.haml_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'groups/edit.html.haml' do
render
- expect(rendered).to have_content("Prevent sharing a project within #{test_group.name} with other groups")
+ expect(rendered).to have_content("Projects in #{test_group.name} cannot be shared with other groups")
expect(rendered).to have_content('help text here')
expect(rendered).to have_field('group_share_with_group_lock', **checkbox_options)
end
diff --git a/spec/views/groups/group_members/index.html.haml_spec.rb b/spec/views/groups/group_members/index.html.haml_spec.rb
index 40d4c9d33c9..2d7d50555d6 100644
--- a/spec/views/groups/group_members/index.html.haml_spec.rb
+++ b/spec/views/groups/group_members/index.html.haml_spec.rb
@@ -37,4 +37,16 @@ RSpec.describe 'groups/group_members/index', :aggregate_failures do
expect(rendered).not_to have_content('You can invite a new member')
end
end
+
+ context 'when @banned is nil' do
+ before do
+ assign(:banned, nil)
+ end
+
+ it 'calls group_members_app_data with { banned: [] }' do
+ expect(view).to receive(:group_members_app_data).with(group, a_hash_including(banned: []))
+
+ render
+ end
+ end
end
diff --git a/spec/views/layouts/_flash.html.haml_spec.rb b/spec/views/layouts/_flash.html.haml_spec.rb
index 82c06feb4fb..a4bed09368f 100644
--- a/spec/views/layouts/_flash.html.haml_spec.rb
+++ b/spec/views/layouts/_flash.html.haml_spec.rb
@@ -9,7 +9,11 @@ RSpec.describe 'layouts/_flash' do
end
describe 'closable flash messages' do
- %w(alert notice success).each do |flash_type|
+ where(:flash_type) do
+ %w[alert notice success]
+ end
+
+ with_them do
let(:flash) { { flash_type => 'This is a closable flash message' } }
it 'shows a close button' do
@@ -19,10 +23,14 @@ RSpec.describe 'layouts/_flash' do
end
describe 'non closable flash messages' do
- %w(error message toast warning).each do |flash_type|
+ where(:flash_type) do
+ %w[error message toast warning]
+ end
+
+ with_them do
let(:flash) { { flash_type => 'This is a non closable flash message' } }
- it 'shows a close button' do
+ it 'does not show a close button' do
expect(rendered).not_to include('js-close-icon')
end
end
diff --git a/spec/views/layouts/application.html.haml_spec.rb b/spec/views/layouts/application.html.haml_spec.rb
index 0f359219718..30c27078ad8 100644
--- a/spec/views/layouts/application.html.haml_spec.rb
+++ b/spec/views/layouts/application.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'layouts/application' do
+RSpec.describe 'layouts/application', :themed_layout do
let(:user) { create(:user) }
before do
diff --git a/spec/views/layouts/devise.html.haml_spec.rb b/spec/views/layouts/devise.html.haml_spec.rb
new file mode 100644
index 00000000000..e69cf93cfb4
--- /dev/null
+++ b/spec/views/layouts/devise.html.haml_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/devise' do
+ it_behaves_like 'a layout which reflects the application theme setting'
+end
diff --git a/spec/views/layouts/devise_empty.html.haml_spec.rb b/spec/views/layouts/devise_empty.html.haml_spec.rb
new file mode 100644
index 00000000000..06d742e74dd
--- /dev/null
+++ b/spec/views/layouts/devise_empty.html.haml_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/devise_empty' do
+ it_behaves_like 'a layout which reflects the application theme setting'
+end
diff --git a/spec/views/layouts/fullscreen.html.haml_spec.rb b/spec/views/layouts/fullscreen.html.haml_spec.rb
new file mode 100644
index 00000000000..0ae2c76ebcb
--- /dev/null
+++ b/spec/views/layouts/fullscreen.html.haml_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/fullscreen' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ end
+
+ it_behaves_like 'a layout which reflects the application theme setting'
+end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index 3943355bffd..9ae3f814679 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -419,50 +419,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'Logs' do
- it 'has a link to the pod logs page' do
- render
-
- expect(rendered).to have_link('Logs', href: project_logs_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the pod logs page' do
- render
-
- expect(rendered).not_to have_link('Logs')
- end
- end
- end
-
- describe 'Tracing' do
- it 'has a link to the tracing page' do
- render
-
- expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
- end
-
- context 'without project.tracing_external_url' do
- it 'has a link to the tracing page' do
- render
-
- expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
- end
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the tracing page' do
- render
-
- expect(rendered).not_to have_text 'Tracing'
- end
- end
- end
-
describe 'Error Tracking' do
it 'has a link to the error tracking page' do
render
@@ -576,7 +532,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
describe 'Google Cloud' do
it 'has a link to the google cloud page' do
render
- expect(rendered).to have_link('Google Cloud', href: project_google_cloud_index_path(project))
+ expect(rendered).to have_link('Google Cloud', href: project_google_cloud_configuration_path(project))
end
describe 'when the user does not have access' do
@@ -953,8 +909,11 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
describe 'Packages & Registries' do
+ let(:packages_enabled) { false }
+
before do
stub_container_registry_config(enabled: registry_enabled)
+ stub_config(packages: { enabled: packages_enabled })
end
context 'when registry is enabled' do
@@ -976,6 +935,17 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).not_to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
end
end
+
+ context 'when packages config is enabled' do
+ let(:registry_enabled) { false }
+ let(:packages_enabled) { true }
+
+ it 'has a link to the Packages & Registries settings' do
+ render
+
+ expect(rendered).to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
+ end
+ end
end
describe 'Usage Quotas' do
diff --git a/spec/views/layouts/signup_onboarding.html.haml_spec.rb b/spec/views/layouts/signup_onboarding.html.haml_spec.rb
new file mode 100644
index 00000000000..8748c673616
--- /dev/null
+++ b/spec/views/layouts/signup_onboarding.html.haml_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/signup_onboarding' do
+ it_behaves_like 'a layout which reflects the application theme setting'
+end
diff --git a/spec/views/layouts/simple_registration.html.haml_spec.rb b/spec/views/layouts/simple_registration.html.haml_spec.rb
new file mode 100644
index 00000000000..98553a12ad8
--- /dev/null
+++ b/spec/views/layouts/simple_registration.html.haml_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/simple_registration' do
+ it_behaves_like 'a layout which reflects the application theme setting'
+end
diff --git a/spec/views/layouts/terms.html.haml_spec.rb b/spec/views/layouts/terms.html.haml_spec.rb
new file mode 100644
index 00000000000..520882449c5
--- /dev/null
+++ b/spec/views/layouts/terms.html.haml_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/terms' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ end
+
+ it_behaves_like 'a layout which reflects the application theme setting'
+end
diff --git a/spec/views/projects/commits/_commit.html.haml_spec.rb b/spec/views/projects/commits/_commit.html.haml_spec.rb
index da93871e0e4..2ca23d4cb2d 100644
--- a/spec/views/projects/commits/_commit.html.haml_spec.rb
+++ b/spec/views/projects/commits/_commit.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'projects/commits/_commit.html.haml' do
- let(:template) { 'projects/commits/commit.html.haml' }
+ let(:template) { 'projects/commits/commit' }
let(:project) { create(:project, :repository) }
let(:commit) { project.repository.commit(ref) }
diff --git a/spec/views/projects/issues/_issue.html.haml_spec.rb b/spec/views/projects/issues/_issue.html.haml_spec.rb
new file mode 100644
index 00000000000..29bef557304
--- /dev/null
+++ b/spec/views/projects/issues/_issue.html.haml_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/issues/_issue.html.haml' do
+ before do
+ assign(:project, issue.project)
+ assign(:issuable_meta_data, {
+ issue.id => Gitlab::IssuableMetadata::IssuableMeta.new(1, 1, 1, 1)
+ })
+
+ render partial: 'projects/issues/issue', locals: { issue: issue }
+ end
+
+ describe 'timestamp', :freeze_time do
+ context 'when issue is open' do
+ let(:issue) { create(:issue, updated_at: 1.day.ago) }
+
+ it 'shows last updated date' do
+ expect(rendered).to have_content("updated #{format_timestamp(1.day.ago)}")
+ end
+ end
+
+ context 'when issue is closed' do
+ let(:issue) { create(:issue, :closed, closed_at: 2.days.ago, updated_at: 1.day.ago) }
+
+ it 'shows closed date' do
+ expect(rendered).to have_content("closed #{format_timestamp(2.days.ago)}")
+ end
+ end
+
+ context 'when issue is closed but closed_at is empty' do
+ let(:issue) { create(:issue, :closed, closed_at: nil, updated_at: 1.day.ago) }
+
+ it 'shows last updated date' do
+ expect(rendered).to have_content("updated #{format_timestamp(1.day.ago)}")
+ end
+ end
+
+ def format_timestamp(time)
+ l(time, format: "%b %d, %Y")
+ end
+ end
+end
diff --git a/spec/views/projects/jobs/show.html.haml_spec.rb b/spec/views/projects/jobs/show.html.haml_spec.rb
index 8242d20a9e7..2ea3dc9f76a 100644
--- a/spec/views/projects/jobs/show.html.haml_spec.rb
+++ b/spec/views/projects/jobs/show.html.haml_spec.rb
@@ -27,7 +27,6 @@ RSpec.describe 'projects/jobs/show' do
it 'shows job vue app' do
expect(rendered).to have_css('#js-job-page')
- expect(rendered).not_to have_css('#js-bridge-page')
end
context 'when job is running' do
@@ -42,18 +41,4 @@ RSpec.describe 'projects/jobs/show' do
end
end
end
-
- context 'when showing a bridge job' do
- let(:bridge) { create(:ci_bridge, status: :pending) }
-
- before do
- assign(:build, bridge)
- render
- end
-
- it 'shows bridge vue app' do
- expect(rendered).to have_css('#js-bridge-page')
- expect(rendered).not_to have_css('#js-job-page')
- end
- end
end
diff --git a/spec/views/projects/project_members/index.html.haml_spec.rb b/spec/views/projects/project_members/index.html.haml_spec.rb
index 0446e1a7fc8..382d400b961 100644
--- a/spec/views/projects/project_members/index.html.haml_spec.rb
+++ b/spec/views/projects/project_members/index.html.haml_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe 'projects/project_members/index', :aggregate_failures do
expect(rendered).to have_content('Project members')
expect(rendered).to have_content('You can invite a new member')
- expect(rendered).to have_selector('.js-import-a-project-modal')
+ expect(rendered).to have_selector('.js-import-project-members-trigger')
+ expect(rendered).to have_selector('.js-import-project-members-modal')
expect(rendered).to have_selector('.js-invite-group-trigger')
expect(rendered).to have_selector('.js-invite-members-trigger')
expect(rendered).not_to have_content('Members can be added by project')
@@ -51,7 +52,8 @@ RSpec.describe 'projects/project_members/index', :aggregate_failures do
expect(rendered).to have_content('Project members')
expect(rendered).not_to have_content('You can invite a new member')
- expect(rendered).not_to have_selector('.js-import-a-project-modal')
+ expect(rendered).not_to have_selector('.js-import-project-members-trigger')
+ expect(rendered).not_to have_selector('.js-import-project-members-modal')
expect(rendered).not_to have_selector('.js-invite-group-trigger')
expect(rendered).not_to have_selector('.js-invite-members-trigger')
expect(rendered).to have_content('Members can be added by project')
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index 8853b34074a..664c8b7432e 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -10,10 +10,6 @@ RSpec.describe 'projects/settings/operations/show' do
create(:project_error_tracking_setting, project: project)
end
- let_it_be_with_reload(:tracing_setting) do
- create(:project_tracing_setting, project: project)
- end
-
let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
before_all do
@@ -25,8 +21,6 @@ RSpec.describe 'projects/settings/operations/show' do
allow(view).to receive(:error_tracking_setting)
.and_return(error_tracking_setting)
- allow(view).to receive(:tracing_setting)
- .and_return(tracing_setting)
allow(view).to receive(:prometheus_integration)
.and_return(prometheus_integration)
allow(view).to receive(:current_user).and_return(user)
@@ -51,14 +45,4 @@ RSpec.describe 'projects/settings/operations/show' do
end
end
end
-
- describe 'Operations > Tracing' do
- context 'Settings page ' do
- it 'renders the Tracing Settings page' do
- render
-
- expect(rendered).to have_content _('Embed an image of your existing Jaeger server in GitLab.')
- end
- end
- end
end
diff --git a/spec/views/projects/tracing/show.html.haml_spec.rb b/spec/views/projects/tracing/show.html.haml_spec.rb
deleted file mode 100644
index 96dc6a18fc7..00000000000
--- a/spec/views/projects/tracing/show.html.haml_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/tracings/show' do
- let_it_be_with_reload(:project) { create(:project) }
- let_it_be(:error_tracking_setting) { create(:project_error_tracking_setting, project: project) }
-
- before do
- assign(:project, project)
- allow(view).to receive(:error_tracking_setting)
- .and_return(error_tracking_setting)
- end
-
- context 'with project.tracing_external_url' do
- let_it_be(:tracing_url) { 'https://tracing.url' }
- let_it_be(:tracing_setting) { create(:project_tracing_setting, project: project, external_url: tracing_url) }
-
- before do
- allow(view).to receive(:can?).and_return(true)
- allow(view).to receive(:tracing_setting).and_return(tracing_setting)
- end
-
- it 'renders iframe' do
- render
-
- expect(rendered).to match(/iframe/)
- end
-
- context 'with malicious external_url' do
- let(:malicious_tracing_url) { "https://replaceme.com/'><script>alert(document.cookie)</script>" }
- let(:cleaned_url) { "https://replaceme.com/'&gt;" }
-
- before do
- tracing_setting.update_column(:external_url, malicious_tracing_url)
- end
-
- it 'sanitizes external_url' do
- render
-
- expect(tracing_setting.external_url).to eq(malicious_tracing_url)
- expect(rendered).to have_xpath("//iframe[@src=\"#{cleaned_url}\"]")
- end
- end
- end
-
- context 'without project.tracing_external_url' do
- before do
- allow(view).to receive(:can?).and_return(true)
- end
-
- it 'renders empty state' do
- render
-
- expect(rendered).to have_link('Add Jaeger URL')
- expect(rendered).not_to match(/iframe/)
- end
- end
-end
diff --git a/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb b/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb
index 5ac42952f78..74ad0ccb77a 100644
--- a/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb
+++ b/spec/views/shared/deploy_tokens/_form.html.haml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'shared/deploy_tokens/_form.html.haml' do
RSpec.shared_examples "display deploy token settings" do |role, shows_package_registry_permissions|
before do
- subject.add_user(user, role)
+ subject.add_member(user, role)
allow(view).to receive(:current_user).and_return(user)
stub_config(packages: { enabled: packages_enabled })
end
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index b4b986662d2..2ff173c1558 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -12,7 +12,6 @@ RSpec.describe BuildFinishedWorker do
let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
before do
- stub_feature_flags(ci_build_finished_worker_namespace_changed: build.project)
expect(Ci::Build).to receive(:find_by).with({ id: build.id }).and_return(build)
end
@@ -30,18 +29,6 @@ RSpec.describe BuildFinishedWorker do
subject
end
- context 'with ci_build_finished_worker_namespace_changed feature flag disabled' do
- before do
- stub_feature_flags(ci_build_finished_worker_namespace_changed: false)
- end
-
- it 'calls deprecated worker' do
- expect(ArchiveTraceWorker).to receive(:perform_in)
-
- subject
- end
- end
-
context 'when build is failed' do
before do
build.update!(status: :failed)
diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb
index a69e188b441..426eb03638c 100644
--- a/spec/workers/build_hooks_worker_spec.rb
+++ b/spec/workers/build_hooks_worker_spec.rb
@@ -23,6 +23,25 @@ RSpec.describe BuildHooksWorker do
end
end
+ describe '.perform_async' do
+ it 'sends a message to the application logger, before performing', :sidekiq_inline do
+ build = create(:ci_build)
+
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: include('Enqueuing hooks for Build'),
+ class: described_class.name,
+ build_id: build.id,
+ pipeline_id: build.pipeline_id,
+ project_id: build.project_id,
+ build_status: build.status
+ )
+
+ expect_any_instance_of(Ci::Build).to receive(:execute_hooks)
+
+ described_class.perform_async(build)
+ end
+ end
+
it_behaves_like 'worker with data consistency',
described_class,
data_consistency: :delayed
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index b5f20e9ff76..fe2039bd79e 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -189,7 +189,7 @@ RSpec.describe BulkImports::PipelineWorker do
end
end
- context 'when network error is raised' do
+ context 'when retry pipeline error is raised' do
let(:pipeline_tracker) do
create(
:bulk_import_tracker,
@@ -200,7 +200,7 @@ RSpec.describe BulkImports::PipelineWorker do
end
let(:exception) do
- BulkImports::NetworkError.new(response: instance_double(HTTParty::Response, code: 429, headers: {}))
+ BulkImports::RetryPipelineError.new('Error!', 60)
end
before do
@@ -213,54 +213,36 @@ RSpec.describe BulkImports::PipelineWorker do
end
end
- context 'when error is retriable' do
- it 'reenqueues the worker' do
- expect_any_instance_of(BulkImports::Tracker) do |tracker|
- expect(tracker).to receive(:retry).and_call_original
- end
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- hash_including(
- 'pipeline_name' => 'FakePipeline',
- 'entity_id' => entity.id
- )
- )
- end
+ it 'reenqueues the worker' do
+ expect_any_instance_of(BulkImports::Tracker) do |tracker|
+ expect(tracker).to receive(:retry).and_call_original
+ end
- expect(described_class)
- .to receive(:perform_in)
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
.with(
- 60.seconds,
- pipeline_tracker.id,
- pipeline_tracker.stage,
- pipeline_tracker.entity.id
+ hash_including(
+ 'pipeline_name' => 'FakePipeline',
+ 'entity_id' => entity.id
+ )
)
-
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
-
- pipeline_tracker.reload
-
- expect(pipeline_tracker.enqueued?).to be_truthy
end
- context 'when error is not retriable' do
- let(:exception) do
- BulkImports::NetworkError.new(response: instance_double(HTTParty::Response, code: 503, headers: {}))
- end
-
- it 'marks tracker as failed and logs the error' do
- expect(described_class).not_to receive(:perform_in)
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(
+ 60.seconds,
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ pipeline_tracker.entity.id
+ )
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
- pipeline_tracker.reload
+ pipeline_tracker.reload
- expect(pipeline_tracker.failed?).to eq(true)
- end
- end
+ expect(pipeline_tracker.enqueued?).to be_truthy
end
end
end
diff --git a/spec/workers/ci/archive_trace_worker_spec.rb b/spec/workers/ci/archive_trace_worker_spec.rb
index 52723ff5823..3ac769aab9e 100644
--- a/spec/workers/ci/archive_trace_worker_spec.rb
+++ b/spec/workers/ci/archive_trace_worker_spec.rb
@@ -27,23 +27,6 @@ RSpec.describe Ci::ArchiveTraceWorker do
subject
end
-
- context 'when sticky_ci_archive_trace_worker is disabled' do
- before do
- stub_feature_flags(sticky_ci_archive_trace_worker: false)
- end
-
- it 'does not preload associations' do
- allow_next_instance_of(Ci::ArchiveTraceService) do |instance|
- allow(instance).to receive(:execute) do |job|
- expect(job.association(:project)).not_to be_loaded
- expect(job.association(:pending_state)).not_to be_loaded
- end
- end
-
- subject
- end
- end
end
context 'when job is not found' do
diff --git a/spec/workers/ci/build_finished_worker_spec.rb b/spec/workers/ci/build_finished_worker_spec.rb
index e9e7a057f98..201182636e7 100644
--- a/spec/workers/ci/build_finished_worker_spec.rb
+++ b/spec/workers/ci/build_finished_worker_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe Ci::BuildFinishedWorker do
let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
before do
- stub_feature_flags(ci_build_finished_worker_namespace_changed: build.project)
expect(Ci::Build).to receive(:find_by).with({ id: build.id }).and_return(build)
end
@@ -28,18 +27,6 @@ RSpec.describe Ci::BuildFinishedWorker do
subject
end
- context 'with ci_build_finished_worker_namespace_changed feature flag disabled' do
- before do
- stub_feature_flags(ci_build_finished_worker_namespace_changed: false)
- end
-
- it 'calls deprecated worker' do
- expect(ArchiveTraceWorker).to receive(:perform_in)
-
- subject
- end
- end
-
context 'when build is failed' do
before do
build.update!(status: :failed)
diff --git a/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
index 000eda055af..7b28384a5bf 100644
--- a/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
@@ -2,28 +2,57 @@
require 'spec_helper'
-RSpec.describe ::Ci::PipelineArtifacts::CoverageReportWorker do
+RSpec.describe Ci::PipelineArtifacts::CoverageReportWorker do
describe '#perform' do
+ let(:pipeline_id) { pipeline.id }
+
subject { described_class.new.perform(pipeline_id) }
context 'when pipeline exists' do
- let(:pipeline) { create(:ci_pipeline) }
- let(:pipeline_id) { pipeline.id }
+ let(:pipeline) { create(:ci_pipeline, :success) }
- it 'calls pipeline report result service' do
- expect_next_instance_of(::Ci::PipelineArtifacts::CoverageReportService) do |create_artifact_service|
- expect(create_artifact_service).to receive(:execute)
+ it 'calls the pipeline coverage report service' do
+ expect_next_instance_of(::Ci::PipelineArtifacts::CoverageReportService, pipeline) do |service|
+ expect(service).to receive(:execute)
end
subject
end
end
+ context 'when the pipeline is part of a hierarchy' do
+ let_it_be(:root_ancestor_pipeline) { create(:ci_pipeline, :success) }
+ let_it_be(:pipeline) { create(:ci_pipeline, :success, child_of: root_ancestor_pipeline) }
+ let_it_be(:another_child_pipeline) { create(:ci_pipeline, :success, child_of: root_ancestor_pipeline) }
+
+ context 'when all pipelines is complete' do
+ it 'calls the pipeline coverage report service on the root ancestor pipeline' do
+ expect_next_instance_of(::Ci::PipelineArtifacts::CoverageReportService, root_ancestor_pipeline) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject
+ end
+ end
+
+ context 'when the pipeline hierarchy has incomplete pipeline' do
+ before do
+ another_child_pipeline.update!(status: :running)
+ end
+
+ it 'does not call pipeline coverage report service' do
+ expect(Ci::PipelineArtifacts::CoverageReportService).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+
context 'when pipeline does not exist' do
let(:pipeline_id) { non_existing_record_id }
it 'does not call pipeline create artifact service' do
- expect(Ci::PipelineArtifacts::CoverageReportService).not_to receive(:execute)
+ expect(Ci::PipelineArtifacts::CoverageReportService).not_to receive(:new)
subject
end
diff --git a/spec/workers/clusters/applications/activate_integration_worker_spec.rb b/spec/workers/clusters/applications/activate_integration_worker_spec.rb
index ecb49be5a4b..5163e4681fa 100644
--- a/spec/workers/clusters/applications/activate_integration_worker_spec.rb
+++ b/spec/workers/clusters/applications/activate_integration_worker_spec.rb
@@ -40,15 +40,6 @@ RSpec.describe Clusters::Applications::ActivateIntegrationWorker, '#perform' do
expect { described_class.new.perform(cluster.id, integration_name) }
.to change { project.reload.prometheus_integration&.active }.from(nil).to(true)
end
-
- context 'when using the old worker class' do
- let(:described_class) { Clusters::Applications::ActivateServiceWorker }
-
- it 'ensures Prometheus integration is activated' do
- expect { described_class.new.perform(cluster.id, integration_name) }
- .to change { project.reload.prometheus_integration&.active }.from(nil).to(true)
- end
- end
end
end
end
diff --git a/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb b/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb
index 3f0188eee23..62792a3b7d9 100644
--- a/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb
+++ b/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb
@@ -46,15 +46,6 @@ RSpec.describe Clusters::Applications::DeactivateIntegrationWorker, '#perform' d
expect { described_class.new.perform(cluster.id, integration_name) }
.to change { prometheus_integration.reload.active }.from(true).to(false)
end
-
- context 'when using the old worker class' do
- let(:described_class) { Clusters::Applications::ActivateServiceWorker }
-
- it 'ensures Prometheus integration is deactivated' do
- expect { described_class.new.perform(cluster.id, integration_name) }
- .to change { prometheus_integration.reload.active }.from(true).to(false)
- end
- end
end
end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 3cd82b8bf4d..5a32c1b40bb 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures do
end
let_it_be(:project) { create(:project, :import_started) }
+ let_it_be(:project2) { create(:project, :import_canceled) }
let(:importer_class) { double(:importer_class, name: 'klass_name') }
let(:importer_instance) { double(:importer_instance) }
@@ -110,6 +111,27 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures do
})
end
+ it 'logs info if the import state is canceled' do
+ expect(project2.import_state.status).to eq('canceled')
+
+ expect(importer_class).not_to receive(:new)
+
+ expect(importer_instance).not_to receive(:execute)
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ github_identifiers: nil,
+ message: 'project import canceled',
+ project_id: project2.id,
+ importer: 'klass_name'
+ }
+ )
+
+ worker.import(project2, client, { 'number' => 11, 'github_id' => 2 } )
+ end
+
it 'logs error when the import fails' do
expect(importer_class)
.to receive(:new)
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index 1e088929f66..0ac1733781a 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::StageMethods do
let_it_be(:project) { create(:project, :import_started, import_url: 'https://t0ken@github.com/repo/repo.git') }
+ let_it_be(:project2) { create(:project, :import_canceled) }
let(:worker) do
Class.new do
@@ -22,6 +23,37 @@ RSpec.describe Gitlab::GithubImport::StageMethods do
worker.perform(-1)
end
+ it 'returns if the import state is canceled' do
+ allow(worker)
+ .to receive(:find_project)
+ .with(project2.id)
+ .and_return(project2)
+
+ expect(worker).not_to receive(:try_import)
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'starting stage',
+ project_id: project2.id,
+ import_stage: 'DummyStage'
+ }
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'project import canceled',
+ project_id: project2.id,
+ import_stage: 'DummyStage'
+ }
+ )
+
+ worker.perform(project2.id)
+ end
+
it 'imports the data when the project exists' do
allow(worker)
.to receive(:find_project)
diff --git a/spec/workers/concerns/waitable_worker_spec.rb b/spec/workers/concerns/waitable_worker_spec.rb
index 824ae8fcf83..f6d4cc4679d 100644
--- a/spec/workers/concerns/waitable_worker_spec.rb
+++ b/spec/workers/concerns/waitable_worker_spec.rb
@@ -29,40 +29,41 @@ RSpec.describe WaitableWorker do
subject(:job) { worker.new }
describe '.bulk_perform_and_wait' do
- it 'schedules the jobs and waits for them to complete' do
- worker.bulk_perform_and_wait([[1], [2]])
-
- expect(worker.counter).to eq(3)
+ context '1 job' do
+ it 'inlines the job' do
+ args_list = [[1]]
+ expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
+ expect(Gitlab::AppJsonLogger).to(
+ receive(:info).with(a_hash_including('message' => 'running inline',
+ 'class' => 'Gitlab::Foo::Bar::DummyWorker',
+ 'job_status' => 'running',
+ 'queue' => 'foo_bar_dummy'))
+ .once)
+
+ worker.bulk_perform_and_wait(args_list)
+
+ expect(worker.counter).to eq(1)
+ end
end
- it 'inlines workloads <= 3 jobs' do
- args_list = [[1], [2], [3]]
- expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
- expect(Gitlab::AppJsonLogger).to(
- receive(:info).with(a_hash_including('message' => 'running inline',
- 'class' => 'Gitlab::Foo::Bar::DummyWorker',
- 'job_status' => 'running',
- 'queue' => 'foo_bar_dummy'))
- .exactly(3).times)
-
- worker.bulk_perform_and_wait(args_list)
+ context 'between 2 and 3 jobs' do
+ it 'runs the jobs asynchronously' do
+ arguments = [[1], [2], [3]]
- expect(worker.counter).to eq(6)
- end
-
- it 'runs > 3 jobs using sidekiq and a waiter key' do
- expect(worker).to receive(:bulk_perform_async)
- .with([[1, anything], [2, anything], [3, anything], [4, anything]])
+ expect(worker).to receive(:bulk_perform_async).with(arguments)
- worker.bulk_perform_and_wait([[1], [2], [3], [4]])
+ worker.bulk_perform_and_wait(arguments)
+ end
end
- it 'runs > 10 * timeout jobs using sidekiq and no waiter key' do
- arguments = 1.upto(21).map { |i| [i] }
+ context '>= 4 jobs' do
+ it 'runs jobs using sidekiq' do
+ arguments = 1.upto(5).map { |i| [i] }
- expect(worker).to receive(:bulk_perform_async).with(arguments)
+ expect(worker).to receive(:bulk_perform_async).with(arguments)
- worker.bulk_perform_and_wait(arguments, timeout: 2)
+ worker.bulk_perform_and_wait(arguments)
+ end
end
end
diff --git a/spec/workers/container_registry/migration/enqueuer_worker_spec.rb b/spec/workers/container_registry/migration/enqueuer_worker_spec.rb
index ab3bd8f75d4..c2381c0ced7 100644
--- a/spec/workers/container_registry/migration/enqueuer_worker_spec.rb
+++ b/spec/workers/container_registry/migration/enqueuer_worker_spec.rb
@@ -158,7 +158,7 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
expect(worker).to receive(:handle_next_migration).exactly(3).times.and_call_original
- expect { subject }.to make_queries_matching(/LIMIT 2/)
+ expect { subject }.to make_queries_matching(/LIMIT 25/)
expect(container_repository.reload).to be_pre_importing
expect(container_repository2.reload).to be_pre_importing
diff --git a/spec/workers/disallow_two_factor_for_group_worker_spec.rb b/spec/workers/disallow_two_factor_for_group_worker_spec.rb
index a69dd893f81..f30b12dd7f4 100644
--- a/spec/workers/disallow_two_factor_for_group_worker_spec.rb
+++ b/spec/workers/disallow_two_factor_for_group_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe DisallowTwoFactorForGroupWorker do
end
it "updates group members" do
- group.add_user(user, GroupMember::DEVELOPER)
+ group.add_member(user, GroupMember::DEVELOPER)
described_class.new.perform(group.id)
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index a9e886de52a..e8ec7c28537 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -181,9 +181,7 @@ RSpec.describe 'Every Sidekiq worker' do
'ClusterWaitForAppUpdateWorker' => 3,
'ClusterWaitForIngressIpAddressWorker' => 3,
'Clusters::Applications::ActivateIntegrationWorker' => 3,
- 'Clusters::Applications::ActivateServiceWorker' => 3,
'Clusters::Applications::DeactivateIntegrationWorker' => 3,
- 'Clusters::Applications::DeactivateServiceWorker' => 3,
'Clusters::Applications::UninstallWorker' => 3,
'Clusters::Applications::WaitForUninstallAppWorker' => 3,
'Clusters::Cleanup::AppWorker' => 3,
@@ -261,6 +259,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::GithubImport::AdvanceStageWorker' => 3,
'Gitlab::GithubImport::ImportDiffNoteWorker' => 5,
'Gitlab::GithubImport::ImportIssueWorker' => 5,
+ 'Gitlab::GithubImport::ImportIssueEventWorker' => 5,
'Gitlab::GithubImport::ImportLfsObjectWorker' => 5,
'Gitlab::GithubImport::ImportNoteWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestMergedByWorker' => 5,
@@ -270,6 +269,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::GithubImport::Stage::FinishImportWorker' => 5,
'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 5,
'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportIssueEventsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportNotesWorker' => 5,
'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 5,
@@ -310,7 +310,6 @@ RSpec.describe 'Every Sidekiq worker' do
'Integrations::ExecuteWorker' => 3,
'Integrations::IrkerWorker' => 3,
'InvalidGpgSignatureUpdateWorker' => 3,
- 'IrkerWorker' => 3,
'IssuableExportCsvWorker' => 3,
'Issues::PlacementWorker' => 3,
'Issues::RebalancingWorker' => 3,
@@ -358,6 +357,7 @@ RSpec.describe 'Every Sidekiq worker' do
'ObjectStorage::BackgroundMoveWorker' => 5,
'ObjectStorage::MigrateUploadsWorker' => 3,
'Packages::CleanupPackageFileWorker' => 0,
+ 'Packages::Cleanup::ExecutePolicyWorker' => 0,
'Packages::Composer::CacheUpdateWorker' => false,
'Packages::Go::SyncPackagesWorker' => 3,
'Packages::MarkPackageFilesForDestructionWorker' => 3,
@@ -366,7 +366,6 @@ RSpec.describe 'Every Sidekiq worker' do
'Packages::Rubygems::ExtractionWorker' => 3,
'PagesDomainSslRenewalWorker' => 3,
'PagesDomainVerificationWorker' => 3,
- 'PagesTransferWorker' => 3,
'PagesWorker' => 3,
'PersonalAccessTokens::Groups::PolicyWorker' => 3,
'PersonalAccessTokens::Instance::PolicyWorker' => 3,
@@ -381,7 +380,6 @@ RSpec.describe 'Every Sidekiq worker' do
'ProjectExportWorker' => false,
'ProjectImportScheduleWorker' => 1,
'ProjectScheduleBulkRepositoryShardMovesWorker' => 3,
- 'ProjectServiceWorker' => 3,
'ProjectTemplateExportWorker' => false,
'ProjectUpdateRepositoryStorageWorker' => 3,
'Projects::GitGarbageCollectWorker' => false,
diff --git a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
index af15f465107..15bc55c1526 100644
--- a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Gitlab::GithubImport::ImportDiffNoteWorker do
describe '#import' do
it 'imports a diff note' do
- project = double(:project, full_path: 'foo/bar', id: 1, import_state: nil)
+ import_state = create(:import_state, :started)
+ project = double(:project, full_path: 'foo/bar', id: 1, import_state: import_state)
client = double(:client)
importer = double(:importer)
hash = {
diff --git a/spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb
new file mode 100644
index 00000000000..03a6503fb84
--- /dev/null
+++ b/spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::ImportIssueEventWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#import' do
+ let(:import_state) { create(:import_state, :started) }
+
+ let(:project) do
+ instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
+ end
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:importer) { instance_double('Gitlab::GithubImport::Importer::IssueEventImporter') }
+
+ let(:event_hash) do
+ {
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => 'closed',
+ 'commit_id' => nil,
+ 'commit_url' => nil,
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'performed_via_github_app' => nil
+ }
+ end
+
+ it 'imports an issue event' do
+ expect(Gitlab::GithubImport::Importer::IssueEventImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::IssueEvent),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer).to receive(:execute)
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .and_call_original
+
+ worker.import(project, client, event_hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
index 29f21c1d184..c2a7639fde4 100644
--- a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Gitlab::GithubImport::ImportIssueWorker do
describe '#import' do
it 'imports an issue' do
- project = double(:project, full_path: 'foo/bar', id: 1, import_state: nil)
+ import_state = create(:import_state, :started)
+ project = double(:project, full_path: 'foo/bar', id: 1, import_state: import_state)
client = double(:client)
importer = double(:importer)
hash = {
diff --git a/spec/workers/gitlab/github_import/import_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
index f4598340938..16ca5658f77 100644
--- a/spec/workers/gitlab/github_import/import_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Gitlab::GithubImport::ImportNoteWorker do
describe '#import' do
it 'imports a note' do
- project = double(:project, full_path: 'foo/bar', id: 1, import_state: nil)
+ import_state = create(:import_state, :started)
+ project = double(:project, full_path: 'foo/bar', id: 1, import_state: import_state)
client = double(:client)
importer = double(:importer)
hash = {
diff --git a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
index faed2f8f340..59f45b437c4 100644
--- a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Gitlab::GithubImport::ImportPullRequestWorker do
describe '#import' do
it 'imports a pull request' do
- project = double(:project, full_path: 'foo/bar', id: 1, import_state: nil)
+ import_state = create(:import_state, :started)
+ project = double(:project, full_path: 'foo/bar', id: 1, import_state: import_state)
client = double(:client)
importer = double(:importer)
hash = {
diff --git a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
new file mode 100644
index 00000000000..b3c6a48767c
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker do
+ subject(:worker) { described_class.new }
+
+ let(:project) { create(:project) }
+ let!(:group) { create(:group, projects: [project]) }
+ let(:feature_flag_state) { [group] }
+
+ describe '#import' do
+ let(:importer) { instance_double('Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter') }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+
+ before do
+ stub_feature_flags(github_importer_issue_events_import: feature_flag_state)
+ end
+
+ it 'imports all the issue events' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer).to receive(:execute).and_return(waiter)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :notes)
+
+ worker.import(client, project)
+ end
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_state) { false }
+
+ it 'skips issue events import and calls next stage' do
+ expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter).not_to receive(:new)
+ expect(Gitlab::GithubImport::AdvanceStageWorker).to receive(:perform_async).with(project.id, {}, :notes)
+
+ worker.import(client, project)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
index c0dd4f488cc..a88256b3cae 100644
--- a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker do
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :notes)
+ .with(project.id, { '123' => 2 }, :issue_events)
worker.import(client, project)
end
diff --git a/spec/workers/gitlab_service_ping_worker_spec.rb b/spec/workers/gitlab_service_ping_worker_spec.rb
index 057639dcf1d..c88708dc50a 100644
--- a/spec/workers/gitlab_service_ping_worker_spec.rb
+++ b/spec/workers/gitlab_service_ping_worker_spec.rb
@@ -21,29 +21,12 @@ RSpec.describe GitlabServicePingWorker, :clean_gitlab_redis_shared_state do
subject.perform
end
- context 'with prerecord_service_ping_data feature enabled' do
- it 'delegates to ServicePing::SubmitService' do
- stub_feature_flags(prerecord_service_ping_data: true)
-
- expect_next_instance_of(ServicePing::SubmitService, payload: payload) do |service|
- expect(service).to receive(:execute)
- end
-
- subject.perform
+ it 'delegates to ServicePing::SubmitService' do
+ expect_next_instance_of(ServicePing::SubmitService, payload: payload) do |service|
+ expect(service).to receive(:execute)
end
- end
- context 'with prerecord_service_ping_data feature disabled' do
- it 'does not prerecord ServicePing, and calls SubmitService', :aggregate_failures do
- stub_feature_flags(prerecord_service_ping_data: false)
-
- expect(ServicePing::BuildPayload).not_to receive(:new)
- expect(ServicePing::BuildPayload).not_to receive(:new)
- expect_next_instance_of(ServicePing::SubmitService, payload: nil) do |service|
- expect(service).to receive(:execute)
- end
- expect { subject.perform }.not_to change { RawUsageData.count }
- end
+ subject.perform
end
context 'payload computation' do
diff --git a/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb b/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
new file mode 100644
index 00000000000..5d595a3679b
--- /dev/null
+++ b/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'google/apis/sqladmin_v1beta4'
+
+RSpec.describe GoogleCloud::CreateCloudsqlInstanceWorker do
+ let(:random_user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:worker_options) do
+ {
+ gcp_project_id: :gcp_project_id,
+ instance_name: :instance_name,
+ database_version: :database_version,
+ environment_name: :environment_name,
+ is_protected: true
+ }
+ end
+
+ context 'when triggered' do
+ subject do
+ user_id = project.creator.id
+ project_id = project.id
+ described_class.new.perform(user_id, project_id, worker_options)
+ end
+
+ it 'calls GoogleCloud::SetupCloudsqlInstanceService' do
+ allow_next_instance_of(GoogleCloud::SetupCloudsqlInstanceService) do |service|
+ expect(service).to receive(:execute).and_return({ status: :success })
+ end
+
+ subject
+ end
+
+ context 'when GoogleCloud::SetupCloudsqlInstanceService fails' do
+ subject do
+ user_id = random_user.id
+ project_id = project.id
+ described_class.new.perform(user_id, project_id, worker_options)
+ end
+
+ it 'raises error' do
+ allow_next_instance_of(GoogleCloud::SetupCloudsqlInstanceService) do |service|
+ expect(service).to receive(:execute).and_return({ status: :error })
+ end
+
+ expect { subject }.to raise_error(Exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/incident_management/close_incident_worker_spec.rb b/spec/workers/incident_management/close_incident_worker_spec.rb
new file mode 100644
index 00000000000..b0d284ba5db
--- /dev/null
+++ b/spec/workers/incident_management/close_incident_worker_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::CloseIncidentWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let_it_be(:user) { User.alert_bot }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue, reload: true) { create(:incident, project: project) }
+
+ let(:issue_id) { issue.id }
+
+ it 'calls the close issue service' do
+ expect_next_instance_of(Issues::CloseService, project: project, current_user: user) do |service|
+ expect(service).to receive(:execute).with(issue, system_note: false).and_call_original
+ end
+
+ expect { worker.perform(issue_id) }.to change(ResourceStateEvent, :count).by(1)
+ end
+
+ shared_examples 'does not call the close issue service' do
+ specify do
+ expect(Issues::CloseService).not_to receive(:new)
+
+ expect { worker.perform(issue_id) }.not_to change(ResourceStateEvent, :count)
+ end
+ end
+
+ context 'when the incident does not exist' do
+ let(:issue_id) { non_existing_record_id }
+
+ it_behaves_like 'does not call the close issue service'
+ end
+
+ context 'when issue type is not incident' do
+ before do
+ issue.update!(issue_type: :issue)
+ end
+
+ it_behaves_like 'does not call the close issue service'
+ end
+
+ context 'when incident is not open' do
+ before do
+ issue.close
+ end
+
+ it_behaves_like 'does not call the close issue service'
+ end
+
+ context 'when incident fails to close' do
+ before do
+ allow_next_instance_of(Issues::CloseService) do |service|
+ expect(service).to receive(:close_issue).and_return(issue)
+ end
+ end
+
+ specify do
+ expect { worker.perform(issue_id) }.not_to change(ResourceStateEvent, :count)
+ end
+ end
+ end
+end
diff --git a/spec/workers/integrations/execute_worker_spec.rb b/spec/workers/integrations/execute_worker_spec.rb
index 19600f35c8f..0e585e3006c 100644
--- a/spec/workers/integrations/execute_worker_spec.rb
+++ b/spec/workers/integrations/execute_worker_spec.rb
@@ -36,26 +36,4 @@ RSpec.describe Integrations::ExecuteWorker, '#perform' do
end.not_to raise_error
end
end
-
- context 'when using the old worker class' do
- let(:described_class) { ProjectServiceWorker }
-
- it 'uses the correct worker attributes', :aggregate_failures do
- expect(described_class.sidekiq_options).to include('retry' => 3, 'dead' => false)
- expect(described_class.get_data_consistency).to eq(:always)
- expect(described_class.get_feature_category).to eq(:integrations)
- expect(described_class.get_urgency).to eq(:low)
- expect(described_class.worker_has_external_dependencies?).to be(true)
- end
-
- it 'executes integration with given data' do
- data = { test: 'test' }
-
- expect_next_found_instance_of(integration.class) do |integration|
- expect(integration).to receive(:execute).with(data)
- end
-
- worker.perform(integration.id, data)
- end
- end
end
diff --git a/spec/workers/integrations/irker_worker_spec.rb b/spec/workers/integrations/irker_worker_spec.rb
index 27dc08212ea..3b7b9af72fd 100644
--- a/spec/workers/integrations/irker_worker_spec.rb
+++ b/spec/workers/integrations/irker_worker_spec.rb
@@ -101,12 +101,6 @@ RSpec.describe Integrations::IrkerWorker, '#perform' do
subject.perform(*arguments)
end
end
-
- context 'when using the old worker class' do
- let(:described_class) { ::IrkerWorker }
-
- it { expect(subject.perform(*arguments)).to be_truthy }
- end
end
def wrap_message(text)
diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
index 632e4fb3071..77190dc49d9 100644
--- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
+++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
@@ -169,7 +169,7 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
let(:expected_connection) { expected_connection_model.constantize.connection }
before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(database_base_models)
+ allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(database_base_models)
if database_base_models.has_key?(:ci)
Gitlab::Database::SharedModel.using_connection(database_base_models[:ci].connection) do
diff --git a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
index 32e7bdd563d..53116815ce7 100644
--- a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Namespaces::OnboardingIssueCreatedWorker, '#perform' do
let_it_be(:issue) { create(:issue) }
- let(:namespace) { issue.namespace }
+ let(:namespace) { issue.project.namespace }
it_behaves_like 'records an onboarding progress action', :issue_created do
subject { described_class.new.perform(namespace.id) }
diff --git a/spec/workers/packages/cleanup/execute_policy_worker_spec.rb b/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
new file mode 100644
index 00000000000..81fcec1a360
--- /dev/null
+++ b/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Cleanup::ExecutePolicyWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ subject(:perform_work) { worker.perform_work }
+
+ shared_examples 'not executing any policy' do
+ it 'is a no op' do
+ expect(::Packages::Cleanup::ExecutePolicyService).not_to receive(:new)
+
+ expect { perform_work }.not_to change { Packages::PackageFile.installable.count }
+ end
+ end
+
+ context 'with no policies' do
+ it_behaves_like 'not executing any policy'
+ end
+
+ context 'with no runnable policies' do
+ let_it_be(:policy) { create(:packages_cleanup_policy) }
+
+ it_behaves_like 'not executing any policy'
+ end
+
+ context 'with runnable policies linked to no packages' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
+
+ it_behaves_like 'not executing any policy'
+ end
+
+ context 'with runnable policies linked to packages' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, :runnable, keep_n_duplicated_package_files: '1') }
+ let_it_be(:package) { create(:package, project: policy.project) }
+
+ let_it_be(:package_file1) { create(:package_file, file_name: 'test1', package: package) }
+ let_it_be(:package_file2) { create(:package_file, file_name: 'test1', package: package) }
+
+ include_examples 'an idempotent worker' do
+ it 'executes the policy' do
+ expect(::Packages::Cleanup::ExecutePolicyService)
+ .to receive(:new).with(policy).and_call_original
+ expect_log_extra_metadata(:project_id, policy.project_id)
+ expect_log_extra_metadata(:execution_timeout, false)
+ expect_log_extra_metadata(:marked_package_files_total_count, 1)
+ expect_log_extra_metadata(:unique_package_id_and_file_name_total_count, 1)
+
+ expect { perform_work }
+ .to change { package.package_files.installable.count }.by(-1)
+ .and change { policy.reload.next_run_at.future? }.from(false).to(true)
+ end
+
+ context 'with a timeout' do
+ let(:mark_service_response) do
+ ServiceResponse.error(
+ message: 'Timeout',
+ payload: { marked_package_files_count: 1 }
+ )
+ end
+
+ it 'executes the policy partially' do
+ expect_next_instance_of(::Packages::MarkPackageFilesForDestructionService) do |service|
+ expect(service).to receive(:execute).and_return(mark_service_response)
+ end
+
+ expect_log_extra_metadata(:project_id, policy.project_id)
+ expect_log_extra_metadata(:execution_timeout, true)
+ expect_log_extra_metadata(:marked_package_files_total_count, 1)
+ expect_log_extra_metadata(:unique_package_id_and_file_name_total_count, 1)
+
+ expect { perform_work }
+ .to change { policy.reload.next_run_at.future? }.from(false).to(true)
+ end
+ end
+ end
+
+ context 'with several eligible policies' do
+ let_it_be(:policy2) { create(:packages_cleanup_policy, :runnable) }
+ let_it_be(:package2) { create(:package, project: policy2.project) }
+
+ before do
+ policy2.update_column(:next_run_at, 100.years.ago)
+ end
+
+ it 'executes the most urgent policy' do
+ expect(::Packages::Cleanup::ExecutePolicyService)
+ .to receive(:new).with(policy2).and_call_original
+ expect_log_extra_metadata(:project_id, policy2.project_id)
+ expect_log_extra_metadata(:execution_timeout, false)
+ expect_log_extra_metadata(:marked_package_files_total_count, 0)
+ expect_log_extra_metadata(:unique_package_id_and_file_name_total_count, 0)
+
+ expect { perform_work }
+ .to change { policy2.reload.next_run_at.future? }.from(false).to(true)
+ .and not_change { policy.reload.next_run_at }
+ end
+ end
+ end
+
+ context 'with runnable policy linked to packages in a disabled state' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, :runnable, keep_n_duplicated_package_files: 'all') }
+ let_it_be(:package) { create(:package, project: policy.project) }
+
+ it_behaves_like 'not executing any policy'
+ end
+
+ def expect_log_extra_metadata(key, value)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(key, value)
+ end
+ end
+
+ describe '#remaining_work_count' do
+ subject { worker.remaining_work_count}
+
+ context 'with no policies' do
+ it { is_expected.to eq(0) }
+ end
+
+ context 'with no runnable policies' do
+ let_it_be(:policy) { create(:packages_cleanup_policy) }
+
+ it { is_expected.to eq(0) }
+ end
+
+ context 'with runnable policies linked to no packages' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
+
+ it { is_expected.to eq(0) }
+ end
+
+ context 'with runnable policies linked to packages' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
+ let_it_be(:package) { create(:package, project: policy.project) }
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'with runnable policy linked to packages in a disabled state' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, :runnable, keep_n_duplicated_package_files: 'all') }
+ let_it_be(:package) { create(:package, project: policy.project) }
+
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe '#max_running_jobs' do
+ let(:capacity) { 50 }
+
+ subject { worker.max_running_jobs }
+
+ before do
+ stub_application_setting(package_registry_cleanup_policies_worker_capacity: capacity)
+ end
+
+ it { is_expected.to eq(capacity) }
+ end
+end
diff --git a/spec/workers/packages/cleanup_package_file_worker_spec.rb b/spec/workers/packages/cleanup_package_file_worker_spec.rb
index 380e8916d13..95cf65c18c5 100644
--- a/spec/workers/packages/cleanup_package_file_worker_spec.rb
+++ b/spec/workers/packages/cleanup_package_file_worker_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe Packages::CleanupPackageFileWorker do
end
it 'handles the error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(RuntimeError), class: described_class.name)
expect { subject }.to change { Packages::PackageFile.error.count }.from(0).to(1)
expect(package_file.reload).to be_error
end
@@ -71,7 +72,9 @@ RSpec.describe Packages::CleanupPackageFileWorker do
end
it 'handles the error' do
- expect { subject }.to change { Packages::PackageFile.count }.by(-1)
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(RuntimeError), class: described_class.name)
+ expect { subject }.not_to change { Packages::PackageFile.count }
+ expect(package_file.reload).to be_error
end
end
end
diff --git a/spec/workers/packages/cleanup_package_registry_worker_spec.rb b/spec/workers/packages/cleanup_package_registry_worker_spec.rb
index e43864975f6..e12f2198f66 100644
--- a/spec/workers/packages/cleanup_package_registry_worker_spec.rb
+++ b/spec/workers/packages/cleanup_package_registry_worker_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Packages::CleanupPackageRegistryWorker do
describe '#perform' do
let_it_be_with_reload(:package_files) { create_list(:package_file, 2, :pending_destruction) }
+ let_it_be(:policy) { create(:packages_cleanup_policy, :runnable) }
+ let_it_be(:package) { create(:package, project: policy.project) }
let(:worker) { described_class.new }
@@ -34,6 +36,28 @@ RSpec.describe Packages::CleanupPackageRegistryWorker do
end
end
+ context 'with runnable policies' do
+ it_behaves_like 'an idempotent worker'
+
+ it 'queues the cleanup job' do
+ expect(Packages::Cleanup::ExecutePolicyWorker).to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+
+ context 'with no runnable policies' do
+ before do
+ policy.update_column(:next_run_at, 5.minutes.from_now)
+ end
+
+ it 'does not queue the cleanup job' do
+ expect(Packages::Cleanup::ExecutePolicyWorker).not_to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+
describe 'counts logging' do
let_it_be(:processing_package_file) { create(:package_file, status: :processing) }
@@ -41,6 +65,7 @@ RSpec.describe Packages::CleanupPackageRegistryWorker do
expect(worker).to receive(:log_extra_metadata_on_done).with(:pending_destruction_package_files_count, 2)
expect(worker).to receive(:log_extra_metadata_on_done).with(:processing_package_files_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:error_package_files_count, 0)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:pending_cleanup_policies_count, 1)
perform
end
diff --git a/spec/workers/packages/debian/generate_distribution_worker_spec.rb b/spec/workers/packages/debian/generate_distribution_worker_spec.rb
index a4627ec5d36..a3e956f14c8 100644
--- a/spec/workers/packages/debian/generate_distribution_worker_spec.rb
+++ b/spec/workers/packages/debian/generate_distribution_worker_spec.rb
@@ -18,6 +18,12 @@ RSpec.describe Packages::Debian::GenerateDistributionWorker, type: :worker do
context "for #{container_type}" do
include_context 'with Debian distribution', container_type
+ context 'with FIPS mode enabled', :fips_mode do
+ it 'raises an error' do
+ expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
+ end
+ end
+
context 'with mocked service' do
it 'calls GenerateDistributionService' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
diff --git a/spec/workers/packages/debian/process_changes_worker_spec.rb b/spec/workers/packages/debian/process_changes_worker_spec.rb
index 4a8eb855398..93eba4bfa9a 100644
--- a/spec/workers/packages/debian/process_changes_worker_spec.rb
+++ b/spec/workers/packages/debian/process_changes_worker_spec.rb
@@ -16,6 +16,12 @@ RSpec.describe Packages::Debian::ProcessChangesWorker, type: :worker do
subject { worker.perform(package_file_id, user_id) }
+ context 'with FIPS mode enabled', :fips_mode do
+ it 'raises an error' do
+ expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
+ end
+ end
+
context 'with mocked service' do
it 'calls ProcessChangesService' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
diff --git a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
new file mode 100644
index 00000000000..1c1586ef199
--- /dev/null
+++ b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::InvalidateDomainCacheWorker do
+ shared_examples 'clears caches with' do |event_class:, event_data:, caches:|
+ let(:event) do
+ event_class.new(data: event_data)
+ end
+
+ subject { consume_event(subscriber: described_class, event: event) }
+
+ it_behaves_like 'subscribes to event'
+
+ it 'clears the cache with Gitlab::Pages::CacheControl' do
+ caches.each do |cache_type, cache_id|
+ expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache_type, id: cache_id) do |cache_control|
+ expect(cache_control).to receive(:clear_cache)
+ end
+ end
+
+ subject
+ end
+ end
+
+ it_behaves_like 'clears caches with',
+ event_class: Pages::PageDeployedEvent,
+ event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
+ caches: { namespace: 3, project: 1 }
+
+ it_behaves_like 'clears caches with',
+ event_class: Pages::PageDeletedEvent,
+ event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
+ caches: { namespace: 3, project: 1 }
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectDeletedEvent,
+ event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
+ caches: { namespace: 3, project: 1 }
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectCreatedEvent,
+ event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
+ caches: { namespace: 3, project: 1 }
+end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 3951c20c048..4ddb793516f 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -274,6 +274,32 @@ RSpec.describe PostReceive do
expect { perform }.to change { counter.read(:pushes) }.by(1)
end
+
+ it 'records correct payload with Snowplow event', :snowplow do
+ stub_feature_flags(route_hll_to_snowplow_phase2: true)
+
+ perform
+
+ expect_snowplow_event(
+ category: 'PostReceive',
+ action: 'source_code_pushes',
+ namespace: project.namespace,
+ user: project.first_owner,
+ project: project
+ )
+ end
+
+ context 'when FF is disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ end
+
+ it 'doesnt emit snowplow events', :snowplow do
+ perform
+
+ expect_no_snowplow_event
+ end
+ end
end
end
diff --git a/spec/workers/projects/after_import_worker_spec.rb b/spec/workers/projects/after_import_worker_spec.rb
index 332b547bb66..a14b2443173 100644
--- a/spec/workers/projects/after_import_worker_spec.rb
+++ b/spec/workers/projects/after_import_worker_spec.rb
@@ -39,8 +39,7 @@ RSpec.describe Projects::AfterImportWorker do
end
it 'removes refs/pull/**/*' do
- expect(rugged.references.map(&:name))
- .not_to include(%r{\Arefs/pull/})
+ expect(repository.list_refs(['refs/pull/'])).to be_empty
end
end
@@ -53,8 +52,7 @@ RSpec.describe Projects::AfterImportWorker do
end
it "does not remove refs/#{name}/tmp" do
- expect(rugged.references.map(&:name))
- .to include("refs/#{name}/tmp")
+ expect(repository.list_refs(["refs/#{name}/tmp"]).length).to be(1)
end
end
end
@@ -100,8 +98,7 @@ RSpec.describe Projects::AfterImportWorker do
it 'removes refs/pull/**/*' do
subject
- expect(rugged.references.map(&:name))
- .not_to include(%r{\Arefs/pull/})
+ expect(repository.list_refs(['refs/pull/'])).to be_empty
end
it 'records the failures in the database', :aggregate_failures do
@@ -123,9 +120,5 @@ RSpec.describe Projects::AfterImportWorker do
expect(import_failure.correlation_id_value).not_to be_empty
end
end
-
- def rugged
- rugged_repo(repository)
- end
end
end
diff --git a/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
index 4a6a525a5a7..c7e45e7e4d7 100644
--- a/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
+++ b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
@@ -62,32 +62,11 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsWorker do
describe '#max_running_jobs' do
subject { worker.max_running_jobs }
- context 'when all projects_build_artifacts_size_refresh flags are enabled' do
- it { is_expected.to eq(described_class::MAX_RUNNING_HIGH) }
- end
-
- context 'when projects_build_artifacts_size_refresh_high flags is disabled' do
- before do
- stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
- end
-
- it { is_expected.to eq(described_class::MAX_RUNNING_MEDIUM) }
- end
-
- context 'when projects_build_artifacts_size_refresh_high and projects_build_artifacts_size_refresh_medium flags are disabled' do
- before do
- stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
- stub_feature_flags(projects_build_artifacts_size_refresh_medium: false)
- end
-
- it { is_expected.to eq(described_class::MAX_RUNNING_LOW) }
- end
+ it { is_expected.to eq(10) }
- context 'when all projects_build_artifacts_size_refresh flags are disabled' do
+ context 'when projects_build_artifacts_size_refresh flag is disabled' do
before do
- stub_feature_flags(projects_build_artifacts_size_refresh_low: false)
- stub_feature_flags(projects_build_artifacts_size_refresh_medium: false)
- stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh: false)
end
it { is_expected.to eq(0) }
diff --git a/spec/workers/remove_expired_group_links_worker_spec.rb b/spec/workers/remove_expired_group_links_worker_spec.rb
index 151bbb75226..7bdf6fc0d59 100644
--- a/spec/workers/remove_expired_group_links_worker_spec.rb
+++ b/spec/workers/remove_expired_group_links_worker_spec.rb
@@ -51,16 +51,41 @@ RSpec.describe RemoveExpiredGroupLinksWorker do
subject.perform
end
- it 'removes project authorization', :sidekiq_inline do
- shared_group = group_group_link.shared_group
- shared_with_group = group_group_link.shared_with_group
- project = create(:project, group: shared_group)
+ context 'with skip_group_share_unlink_auth_refresh feature flag disabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: false)
+ end
+
+ it 'removes project authorization', :sidekiq_inline do
+ shared_group = group_group_link.shared_group
+ shared_with_group = group_group_link.shared_with_group
+ project = create(:project, group: shared_group)
+
+ user = create(:user)
+ shared_with_group.add_maintainer(user)
+
+ expect { subject.perform }.to(
+ change { user.can?(:read_project, project) }.from(true).to(false))
+ end
+ end
+
+ context 'with skip_group_share_unlink_auth_refresh feature flag enabled' do
+ before do
+ stub_feature_flags(skip_group_share_unlink_auth_refresh: true)
+ end
+
+ it 'does not remove project authorization', :sidekiq_inline do
+ shared_group = group_group_link.shared_group
+ shared_with_group = group_group_link.shared_with_group
+ project = create(:project, group: shared_group)
+
+ user = create(:user)
+ shared_with_group.add_maintainer(user)
- user = create(:user)
- shared_with_group.add_maintainer(user)
+ subject.perform
- expect { subject.perform }.to(
- change { user.can?(:read_project, project) }.from(true).to(false))
+ expect(user.can?(:read_project, project)).to be_truthy
+ end
end
end
diff --git a/spec/workers/remove_expired_members_worker_spec.rb b/spec/workers/remove_expired_members_worker_spec.rb
index 6d0d4aeef89..8d7d488094f 100644
--- a/spec/workers/remove_expired_members_worker_spec.rb
+++ b/spec/workers/remove_expired_members_worker_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe RemoveExpiredMembersWorker do
let_it_be(:expired_project_bot) { create(:user, :project_bot) }
before do
- project.add_user(expired_project_bot, :maintainer, expires_at: 1.day.from_now)
+ project.add_member(expired_project_bot, :maintainer, expires_at: 1.day.from_now)
travel_to(3.days.from_now)
end
@@ -67,7 +67,7 @@ RSpec.describe RemoveExpiredMembersWorker do
let_it_be(:other_project_bot) { create(:user, :project_bot) }
before do
- project.add_user(other_project_bot, :maintainer, expires_at: 10.days.from_now)
+ project.add_member(other_project_bot, :maintainer, expires_at: 10.days.from_now)
travel_to(3.days.from_now)
end
diff --git a/spec/workers/stage_update_worker_spec.rb b/spec/workers/stage_update_worker_spec.rb
index 75b324a9e0a..e50c7183153 100644
--- a/spec/workers/stage_update_worker_spec.rb
+++ b/spec/workers/stage_update_worker_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe StageUpdateWorker do
describe '#perform' do
context 'when stage exists' do
- let(:stage) { create(:ci_stage_entity) }
+ let(:stage) { create(:ci_stage) }
it 'updates stage status' do
expect_any_instance_of(Ci::Stage).to receive(:set_status).with('skipped')
diff --git a/spec/workers/web_hooks/destroy_worker_spec.rb b/spec/workers/web_hooks/destroy_worker_spec.rb
deleted file mode 100644
index 8e75610a031..00000000000
--- a/spec/workers/web_hooks/destroy_worker_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe WebHooks::DestroyWorker do
- include AfterNextHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- before_all do
- project.add_maintainer(user)
- end
-
- subject { described_class.new }
-
- describe "#perform" do
- context 'with a Web hook' do
- let!(:hook) { create(:project_hook, project: project) }
- let!(:other_hook) { create(:project_hook, project: project) }
- let!(:log) { create(:web_hook_log, web_hook: hook) }
- let!(:other_log) { create(:web_hook_log, web_hook: other_hook) }
-
- it "deletes the Web hook and logs", :aggregate_failures do
- expect(WebHooks::LogDestroyWorker).to receive(:perform_async)
-
- expect { subject.perform(user.id, hook.id) }
- .to change { WebHook.count }.from(2).to(1)
-
- expect(WebHook.find(other_hook.id)).to be_present
- expect(WebHookLog.find(other_log.id)).to be_present
- end
-
- it "raises and tracks an error if destroy failed" do
- expect_next(::WebHooks::DestroyService)
- .to receive(:sync_destroy).with(anything)
- .and_return(ServiceResponse.error(message: "failed"))
-
- expect(Gitlab::ErrorTracking)
- .to receive(:track_and_raise_exception)
- .with(an_instance_of(described_class::DestroyError), { web_hook_id: hook.id })
- .and_call_original
-
- expect { subject.perform(user.id, hook.id) }.to raise_error(described_class::DestroyError)
- end
-
- context 'with unknown hook' do
- it 'does not raise an error' do
- expect { subject.perform(user.id, non_existing_record_id) }.not_to raise_error
-
- expect(WebHook.count).to eq(2)
- end
- end
-
- context 'with unknown user' do
- it 'does not raise an error' do
- expect { subject.perform(non_existing_record_id, hook.id) }.not_to raise_error
-
- expect(WebHook.count).to eq(2)
- end
- end
- end
- end
-end